# -*- coding: utf-8 -*-
from elasticsearch import NotFoundError as ElasticsearchNotFoundError
from elasticsearch import RequestError as ElasticsearchRequestError
from elasticsearch import TransportError as ElasticsearchTransportError
from flask_restful import abort
from werkzeug.exceptions import NotFound

from distribution import app
from distribution.models import get_model
from distribution.utils.paginator import Paginator

from .backends import current_backend


class BaseManager(object):
    _model_class = None
    MAX_ROWS = app.config['MAX_ROWS_LIMIT']
    MAX_DAYS = app.config['MAX_DAYS_LIMIT']
    MAX_RESULT_WINDOW = app.config.get('MAX_RESULT_WINDOW', 10000)
    paginator_class = Paginator
    backend = current_backend
    index = None

    def __init__(self, model_class, *args, **kwargs):
        if not model_class:
            raise ValueError('You need to specify a model class for the manager')
        if self.index is None:
            raise ValueError('You need to specify an index for the manager')
        elif self.index not in self.backend._get_indices_names():
            raise ValueError(
                'The index stated for the manager: ({}) is not defined in the backend'.format(
                    self.index))

        self._model_class = model_class

    @property
    def model_class(self):
        return self._model_class

    @property
    def doc_type(self):
        return self.model_class.__name__.lower()

    def save(self, document):
        self.backend.client.index(
            self.index,
            self.doc_type,
            document.data, document.key)

    def bulk_save(self, documents, errors=None, domain=None):
        actions = []

        for document in documents:
            actions.append(
                {
                    '_op_type': 'index',
                    '_index': self.index,
                    '_type': self.doc_type,
                    '_id': document.key,
                    '_source': document.data,
                }
            )

        app.logger.debug('bulk_save: Saving {} documents in a bulk operation'.format(
            len(actions)))
        self.backend.bulk(actions)

    def disable_many(self, keys):
        actions = []
        errors_id = []
        error_messages = []

        for key in keys:
            actions.append({'_op_type': 'update',
                            '_index': self.index,
                            '_type': self.doc_type,
                            '_id': key,
                            'doc': {'meta': {'is_deleted': True}}})

        app.logger.debug('disable_many: Disabling {} elements for query {}'.format(
            len(actions), actions))
        correct_items, errors = self.backend.bulk(actions)
        for error in errors:
            errors_id.append(error['update']['_id'])
            error_messages.append(error['update']['error'])

        return errors_id, error_messages

    def delete(self, keys):
        actions = []

        for key in keys:
            actions.append({'_op_type': 'delete',
                            '_index': self.index,
                            '_type': self.doc_type,
                            '_id': key})
        app.logger.debug('delete: Deleting {} elements for query {}'.format(
            len(actions), actions))
        self.backend.bulk(actions)

    def get(self, key):
        try:
            data = self.backend.client.get(self.index, key)
            return self.cast_to_model(data)
        except ElasticsearchNotFoundError as e:
            raise NotFound(e)

    def get_by_field(self, field, value, is_deleted=False):
        query = {
            "query": {
                "bool": {
                    "must": {
                        "match": {
                            "{}".format(field):
                            {
                                "query": "{}".format(value),
                                "type": "phrase"
                            }
                        }
                    },
                    "should": [{
                        "match": {
                            "meta.is_deleted": "{}".format(str(is_deleted).lower())
                        }
                    }]
                }
            }
        }
        try:
            result = self.backend.client.search(self.index, '', query, size=1)
        except (ElasticsearchRequestError, ElasticsearchTransportError) as e:
            app.logger.exception(e)
            abort(400, message='Invalid Request')
        hits = result['hits']['hits']
        if not hits:
            raise NotFound()
        return self.cast_to_model(hits[0])

    def mget(self, keys):  # pragma: no cover
        objects = []

        if len(keys) == 0:
            return objects

        result = self.backend.client.mget({'ids': list(keys)}, self.index)

        for doc in result['docs']:
            if doc['found']:
                objects.append(self.cast_to_model(doc))

        return objects

    def build_search_query(self, query=None, *args, **kwargs):
        return query

    def _search(self, *args, **kwargs):
        query = self.build_search_query(*args, **kwargs)
        index = self.index

        try:
            result = self.backend.client.search(index, '', query)
            return query, result
        except (ElasticsearchRequestError, ElasticsearchTransportError) as e:
            app.logger.exception(e)
            abort(400, message='Invalid Request')

    def count(self, *args, **kwargs):  # pragma: no cover
        query = self.build_search_query(*args, **kwargs)
        index = self.index

        try:
            result = self.backend.client.count(index, '', query)
            return result
        except (ElasticsearchRequestError, ElasticsearchTransportError) as e:
            app.logger.exception(e)
            abort(400, message='Invalid Request')

    def search_by_field(self, field, value, default_operator='AND'):
        # We need to escape the special characteres in the uri because otherwise elasticsearch
        # takes it as a regexp. We wrap the value in " " and escape those to tell explicitaly to ES
        # that is not a regular expression, for more details see:
        # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_regular_expressions
        query = {
            "query": {
                "query_string": {
                    "default_operator": default_operator,
                    "query": "{}:\"{}\"".format(field, value)
                }
            }
        }
        try:
            result = self.backend.client.search(self.index, '', query)
        except (ElasticsearchRequestError, ElasticsearchTransportError) as e:
            app.logger.exception(e)
            abort(400, message='Invalid Request')
        hits = result['hits']['hits']
        if not hits:
            raise NotFound()
        return self.cast_to_models(hits)

    def cast_to_model(self, data):
        return get_model(data['_type'])(data['_source'], validate=False)

    def cast_to_models(self, hits):
        return [self.cast_to_model(hit) for hit in hits]

    def search(self, *args, **kwargs):
        query, result = self._search(*args, **kwargs)
        hits = result['hits']['hits']
        app.logger.debug(u'search: Found {} elements for query: {}'
                         .format(len(hits), query))
        return self.cast_to_models(hits)

    def extended_search(self, *args, **kwargs):
        """
        Special search method that extends the regular search results
        with other information. To do so, the returned data type is a
        dictionary. Currently, only a paginator element is added.

        :param args: positional arguments to be forwarded to _search.
        :param kwargs: keyword arguments to be forwarded to _search.
        :return: a dictionary containing "items" and "paginator" keys.

        """
        query, result = self._search(*args, **kwargs)

        total_count = result['hits']['total']
        app.logger.debug(u'search: Found {} elements for query: {}'
                         .format(total_count, query))

        data = {'items': self.cast_to_models(result['hits']['hits'])}
        if self.paginator_class:
            max_rows = self.MAX_ROWS
            first_item_index = query.get('from', 0) if query else 0
            page_size = query.get('size', max_rows) if query else max_rows
            page_start = first_item_index / page_size + 1

            data['paginator'] = self.paginator_class(page_start, page_size,
                                                     total_count,
                                                     self.MAX_RESULT_WINDOW)

        return data

    def aggregated_search(self, *args, **kwargs):
        if 'group_by' not in kwargs:
            raise TypeError('aggregated_search was called without a "group_by" '
                            'keyword argument')

        query, result = self._search(*args, **kwargs)
        aggs = result.get('aggregations', {})

        app.logger.debug(u'aggregated_search: Query {} had {} aggregation(s): '
                         u'{}.'.format(query, len(aggs), ', '.join(aggs)))

        return aggs
