from django.apps import apps
from django.test import TestCase
from django.db.migrations.executor import MigrationExecutor
from django.db import connection
# we're just importing this so that the ZMQ monkey-patching happens before any chain stuff
import chain.core.test_core
from chain.localsettings import INFLUX_MEASUREMENT
from chain.core.chaintestcase import ChainTestCase
from django.utils.timezone import now
from datetime import timedelta
from chain.core.resources import influx_client
from chain.influx_client import InfluxClient, HTTP_STATUS_SUCCESSFUL_WRITE
from django.utils.dateparse import parse_datetime

# this came directly from:

class TestMigrations(ChainTestCase):
    # for some reason this is returning "chain.core", which throws an error when
    # we actuall use it because the migration graph wants just "core"
    # @property
    # def app(self):
    #     return apps.get_containing_app_config(type(self).__module__).name
    app = 'core'

    migrate_from = None
    migrate_to = None

    def setUp(self):
        assert self.migrate_from and self.migrate_to, \
            "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__)
        self.migrate_from = [(, self.migrate_from)]
        self.migrate_to = [(, self.migrate_to)]
        executor = MigrationExecutor(connection)
        old_apps = executor.loader.project_state(self.migrate_from).apps

        # Reverse to the original migration

        super(TestMigrations, self).setUp()

        # Run the migration to test
        executor = MigrationExecutor(connection)
        executor.loader.build_graph()  # reload.

        self.apps = executor.loader.project_state(self.migrate_to).apps

    def setUpBeforeMigration(self, apps):

class AddInfluxConvenienceTags(TestMigrations):
    # disable writing of influx data by default
    write_scalar_data = False

    migrate_from = '0002_auto_20191017_1403'
    migrate_to = '0003_add_influx_convenience_tags'

    # post data in the old format, so we can test that the upgrade happened
    # successfully
    def old_post_data(self, site_id, device_id, sensor_id, value, timestamp=None):
        timestamp = InfluxClient.convert_timestamp(timestamp)
        data = '{0},sensor_id={1},site_id={2},device_id={3} value={4}'.format(INFLUX_MEASUREMENT,
        if timestamp:
            data += ' ' + str(timestamp)
        response ='write', data)
        if response.status_code != HTTP_STATUS_SUCCESSFUL_WRITE:
            raise IntegrityError('Error storing data')
        return response

    def setUpBeforeMigration(self, apps):
        # add the data in the old format (without the metric tag)
        for data in self.scalar_data:
                               data['timestamp'])'query', '''
                SELECT max("value"), min("value"), mean("value"), count("value"), sum("value")
                INTO "{0}" FROM "{1}" GROUP BY "sensor_id", time(1h), *
            '''.format(INFLUX_MEASUREMENT + '_1h', INFLUX_MEASUREMENT), True)'query', '''
                SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                INTO "{0}" FROM "{1}" GROUP BY "sensor_id", time(1d), *
            '''.format(INFLUX_MEASUREMENT + '_1d', INFLUX_MEASUREMENT + '_1h'), True)'query', '''
                SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                INTO "{0}" FROM "{1}" GROUP BY "sensor_id", time(1w), *
            '''.format(INFLUX_MEASUREMENT + '_1w', INFLUX_MEASUREMENT + '_1d'), True)

    # now we confirm that the metric tag is present in the migrated data
    def test_tags_migrated(self):
        for sensor in self.sensors:
            queried = sorted(influx_client.get_sensor_data({'sensor_id':}),
                             key=lambda d: d["time"])
            expected = sorted(filter(lambda d: d["sensor"].id ==, self.scalar_data),
                              key=lambda d: d["timestamp"])
            for qd, ed in zip(queried, expected):
                self.assertIn("metric", qd.keys())
                self.assertEqual(qd["value"], ed["value"])
                # it seems that somewhere in the round-trip we loose a little accuracy
                self.assertLess(abs(parse_datetime(qd["time"]) - ed["timestamp"]), timedelta(milliseconds=100))
            for agg in ["1h", "1d", "1w"]:
                queried = influx_client.get_sensor_data({'sensor_id':, 'aggtime': agg})
                for qd in queried:
                    self.assertIn("metric", qd.keys())
                    self.assertEqual(qd["metric"], ed["sensor"]
                    values = map(lambda d: d["value"], expected)
                    self.assertEqual(qd["count"], len(values))
                    self.assertEqual(qd["sum"], sum(values))
                    self.assertEqual(qd["mean"], sum(values)/len(values))
                    self.assertEqual(qd["min"], min(values))
                    self.assertEqual(qd["max"], max(values))
        # check that old data is removed
        for agg in ["", "_1h", "_1d", "_1w"]:
            query = "SELECT * FROM {}{} WHERE metric = ''".format(
                INFLUX_MEASUREMENT, agg)
            db_data = influx_client.get_values(influx_client.get(query, True))
            self.assertEqual(len(db_data), 0)