#!/usr/bin/python
# -*- coding: utf8 -*-
"""
Скрипт для проставления тегов выложенности на тикеты в стартреке.
"""
import argparse
import itertools
import logging
import os
import re
import subprocess
import sys
import tempfile
import shutil
import datetime
import time
import multiprocessing

from project_specific import ProjectSpecificSettings

sys.path.insert(0, '/opt/direct-py/startrek-python-client-sni-fix')
from startrek_client import Startrek
logging.getLogger('startrek_client').setLevel(logging.CRITICAL)

# обязательно после стартрека, так как там тоже импортируется requests
import requests
import direct_juggler.juggler as dj

sys.path.insert(0, 'opt/release_changelog')
sys.path.insert(0, '/opt/release_changelog')
import changelog_utils
from changelog_utils import APPS_CONFIG as apps_conf


SCRIPT_NAME = os.path.basename(__file__)
with open("/etc/direct-tokens/startrek") as fh:
    STARTREK_TOKEN = fh.read().strip()
TAG = 'deployed_tag_on_all_tickets'
SIGN_COMMENT = ProjectSpecificSettings().get_sign_comment(SCRIPT_NAME)
JUGGLER_SERVICE_NAME = 'dt-deploy-notifier.working'
LOG_PATH = '/var/log/yandex/dt-deploy-notifier'
ALL_APPS = sorted([
    app for app in apps_conf.keys()
    if 'releases-for-deploy-notification' not in apps_conf[app].get('ignore-features', [])
])

FRONTEND_PROJECT_NAMES = ['dna', 'uac']
FRONTEND_PROJECT_ARCADIA_PATH_BY_APP_NAME = {
    'dna': 'arcadia/adv/frontend/services/dna',
    'uac': 'arcadia/adv/frontend/services/uac',
}
FRONTEND_PROJECT_RELEASE_BRANCH_PREFIX_BY_APP_NAME = {
    'dna': 'releases/direct/dna/',
    'uac': 'releases/direct/frontend/services/uac/v',
}
FRONTEND_RELEASE_VERSION_REGEX_BY_APP_NAME = {
    'dna': r'([0-9\.]+)', # в номер ветки включается и мажорная и минорная версии
    'uac': r'([0-9]+)', # в номер ветки включается только мажорная версия (так как собираем в new ci)
}

def parse_options():
    parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=__doc__)

    parser.add_argument(
        '-a', dest='apps', action='append', choices=ALL_APPS,
        help="название приложения, для которого посчитать выложенность (можно указать несколько, по умолчанию все)"
    )
    parser.add_argument(
        '-n', dest="releases_number", type=int, default=3,
        help="сколько последних релизов проверяем (по умолчанию 3)"
    )
    parser.add_argument(
        '-q', '--quiet', dest='quiet', action='store_true',
        help="выводить лог в файл (по умолчанию, лог выводится на stdout)"
    )
    parser.add_argument(
        '-r', '--release', dest='release', type=str,
        help="посчитать выложенность для конкретного релиза"
    )
    parser.add_argument(
        '--timeout', dest='timeout', type=int,
        help="таймаут в минутах, за который должно быть все посчитано (по умолчанию, без таймаута)"
    )
    parser.add_argument(
        '-j', '--juggler', dest='juggler', action='store_true',
        help="отправлять событие в juggler, успешно завершил работу скрипт или нет (по умолчанию, не отправлять)"
    )
    parser.add_argument(
        '-m', '--send-metrics', dest='send_metrics', action='store_true',
        help=(
            "отправлять статистику по тикетам в metrics.log " +
            "(метрики отправляются роботом по крону, для ручного запуска лучше не указывать)"
        )
    )

    opts = parser.parse_args()
    if opts.releases_number <= 0:
        parser.error("параметр -n должен быть больше 0")

    if opts.release:
        release_app = check_release_is_deployed(opts.release)
        opts.apps = [release_app]
        opts.releases_number = 1

    if not opts.apps:
        opts.apps = ALL_APPS

    return opts


def retry(cmd, tries_num=5, delay=1):
    for i in xrange(tries_num):
        try:
            cmd()
            return True
        except:
            if i + 1 != tries_num:
                time.sleep(delay)
    return False


def get_revisions_with_tickets_for_frontend_release(releases, app):
    """
    Получаем ревизии с тикетами из лога для проектов, которые не собираются при помощи ya make
    """
    ARCADIA_MOUNT_DIR = 'arcadia'
    ARCADIA_STORE_DIR = 'store'

    arcadia_mount_frontend_project_dir = FRONTEND_PROJECT_ARCADIA_PATH_BY_APP_NAME.get(app)
    if arcadia_mount_frontend_project_dir is None:
        logging.exception("app", app, "not in FRONTEND_PROJECT_ARCADIA_PATH_BY_APP_NAME")
        raise Exception('Missing frontend project arcadia path for app {}'.format(app))

    ARCADIA_OBJECT_STORE_DIR = '/var/www/arc-object-store'
    
    release_version_regex = FRONTEND_RELEASE_VERSION_REGEX_BY_APP_NAME.get(app)
    if release_version_regex is None:
        logging.exception("app", app, "not in FRONTEND_RELEASE_VERSION_REGEX_BY_APP_NAME")
        raise Exception('Missing release version regex for app {}'.format(app))
    
    release_versions = [re.search(release_version_regex, release.summary).group(1) for release in releases]

    tmp_dir = None
    revs_with_tickets = []
    try:
        if any(TAG not in release.tags for release in releases[1:]):
            tmp_dir = tempfile.mkdtemp(dir='/tmp/temp-ttl/ttl_1d')

            os.chdir(tmp_dir)
            os.mkdir(ARCADIA_MOUNT_DIR)
            os.mkdir(ARCADIA_STORE_DIR)

            logging.info(
                'Mounting arcadia at {}/{}'.format(tmp_dir, ARCADIA_MOUNT_DIR))
            subprocess.check_call(['arc', '--update'])
            subprocess.check_call([
                'arc', 'mount', '--vfs-version', '2', '--allow-other',
                '--mount', ARCADIA_MOUNT_DIR, '--store', ARCADIA_STORE_DIR,
                '--object-store', ARCADIA_OBJECT_STORE_DIR,
            ], env={'ARC_ALLOW_WRITE_TO_ALL': '1'})

            os.chdir(arcadia_mount_frontend_project_dir)

        for i in xrange(1, len(releases)):
            if TAG in releases[i].tags:
                revs_with_tickets.append([])
                continue

            release_branch_prefix = FRONTEND_PROJECT_RELEASE_BRANCH_PREFIX_BY_APP_NAME.get(app)
            if release_branch_prefix is None:
                logging.exception("app", app, "not in FRONTEND_PROJECT_RELEASE_BRANCH_PREFIX_BY_APP_NAME")
                raise Exception('Missing frontend project release branch prefix for app {}'.format(app))

            releases_range = '{branch_prefix}{from_release}..{branch_prefix}{to_release}'.format(
                branch_prefix=release_branch_prefix,
                from_release=release_versions[i - 1],
                to_release=release_versions[i]
            )

            log = subprocess.check_output(['arc', 'log', '--oneline', releases_range, '.'])

            revs_with_tickets.append(re.findall(ur"^(\w+).+(DIRECT-[0-9]+)", log, re.I | re.U | re.M))
    finally:
        if tmp_dir:
            os.chdir(tmp_dir)
            try:
                logging.info(
                    'Unmounting arcadia at {}/{}'.format(tmp_dir, ARCADIA_MOUNT_DIR))
                subprocess.call(['arc', 'unmount', ARCADIA_MOUNT_DIR])
            except subprocess.CalledProcessError as e:
                logging.info('Arcadia not mounted at {}/{}'.format(
                    tmp_dir, ARCADIA_MOUNT_DIR))

            logging.info('Removing {} working copy from {}'.format(app, tmp_dir))
            shutil.rmtree(tmp_dir)

    return revs_with_tickets


def get_revisions_with_tickets_arcadia(app, releases, working_dir):
    """
    Получаем ревизии с тикетами для приложений в аркадии
    """
    release_versions = [re.search(r'1\.(\d+)(?:\.(\d+))?-1', release.summary).groups() for release in releases]

    dependencies = []
    if any(TAG not in release.tags for release in releases[1:]):
        logging.info("making dependencies for %s" % app)
        dependencies = changelog_utils.get_app_dependencies(app, working_dir)
        logging.info("found %d dependencies for %s" % (len(dependencies), app))

    revs_with_tickets = []
    for i in xrange(1, len(releases)):
        release = releases[i]
        base_ver = release_versions[i][0]
        head_ver = release_versions[i][1]
        prev_base_ver = release_versions[i - 1][0]

        cur_revs_with_tickets = []

        if TAG not in release.tags:
            logging.info("making svn log for %s" % release.key)
            svn_log = changelog_utils.get_svnlog_with_deps(
                app, prev_base_ver, base_ver, dependencies=dependencies
            )
            cur_revs_with_tickets.extend(changelog_utils.get_revisions_with_tickets(svn_log))
            logging.info("svn log for %s is done" % release.key)

        # для последнего закрытого релиза проверяем хотфиксы
        if i + 1 == len(releases) and head_ver:
            logging.info("making hotfix svn log for %s" % release.key)
            hotfix_log = changelog_utils.get_svnlog_from_branch(app, base_ver, base_ver, head_ver)
            cur_revs_with_tickets.extend(changelog_utils.get_revisions_with_tickets(hotfix_log))
            logging.info("hotfix svn log for %s is done" % release.key)

        revs_with_tickets.append(cur_revs_with_tickets)

    return revs_with_tickets


def get_recent_releases(startrek_client, app, releases_number, selected_release):
    """
    Получаем последние релизы конкретного приложения
    """
    query = 'Queue: DIRECT Type: Release Components: "%s" %s Status: Closed "Sort by": key desc' % (
        apps_conf[app]['tracker-component'],
        "" if not selected_release else "Key: <=%s" % selected_release
    )
    releases = startrek_client.issues.find(query, per_page=releases_number + 1)
    if isinstance(releases, list):
        releases = releases[:releases_number + 1]
    else:
        releases = list(itertools.islice(releases, releases_number + 1))

    releases = list(reversed(releases))
    return releases


def tag_tickets(startrek_client, ticket2revs, app):
    """
    Проставляем тег выложенности и пишем комментарий
    Возвращаем:
    - количество тикетов, на которых успешно проставлен тег
    - булево значение: были ли проблемы или нет   
    """
    all_tickets_with_tag = True
    tagged_tickets = 0

    for ticket, revs in ticket2revs.items():
        try:
            issue = startrek_client.issues[ticket]
        except:
            continue
        if apps_conf[app]['tracker-deployed-tag'] in issue.tags:
            continue

        comment = u"Автоматический мониторинг: выехало в продакшен проекта %s с коммитом %s" % (
            app, ', '.join(ticket2revs[ticket])
        )
        retry(lambda: startrek_client.issues[ticket].comments.create(
            text=comment + u"\n" + SIGN_COMMENT
        ))

        tag_added = retry(lambda: startrek_client.issues[ticket].update(
            tags=startrek_client.issues[ticket].tags + [apps_conf[app]['tracker-deployed-tag']]
        ))
        tagged_tickets += int(tag_added)
        all_tickets_with_tag &= tag_added

    return tagged_tickets, all_tickets_with_tag


def send_metrics(release, app, tagged_tickets, all_tickets_tagged):
    """
    Отправляем количество протегированных тикетов в metrics.log
    """
    logging.info("sending metrics for release %s" % release.key)

    metrics_url = 'http://intapi.direct.yandex.ru/metrics/add'
    task = 'deployed_tags.%s.cnt' % app
    data = {
        'metrics': [{
            'name': task,
            'value': tagged_tickets,
            'context': {'release': release.key, 'app': app, 'all_tickets_tagged': all_tickets_tagged}
        }]
    }
    headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}

    metrics_sent = retry(
        lambda: requests.post(metrics_url, json=data, headers=headers, timeout=2),
        tries_num=2
    )
    if not metrics_sent:
        logging.error("failed to send metrics for release %s" % release.key)


def process_app(args):
    """
    Выполняем всю работу с конкретным приложением
    Возвращает exception, если он случился, чтобы не прерывать работу с остальными приложениями
    """
    try:
        app, opts, working_dir = args

        set_logger('%s.log' % app, opts.quiet)
        logging.info('start %s' % app)

        startrek_client = Startrek(token=STARTREK_TOKEN, useragent=SCRIPT_NAME)

        releases = get_recent_releases(startrek_client, app, opts.releases_number, opts.release)
        logging.info("found releases: %s" % ", ".join(release.key for release in releases[1:]))

        logging.info("start making revisions with correspondent tickets for %s" % app)
        if app in FRONTEND_PROJECT_NAMES:
            revs_with_tickets = get_revisions_with_tickets_for_frontend_release(releases, app)
        else:
            revs_with_tickets = get_revisions_with_tickets_arcadia(app, releases, working_dir)
        logging.info("revisions with correspondent tickets for %s are done" % app)

        for revs_tickets, release in zip(revs_with_tickets, releases[1:]):
            if not revs_tickets:
                continue

            ticket2revs = {}
            for rev, ticket in revs_tickets:
                if ticket not in ticket2revs:
                    ticket2revs[ticket] = []
                ticket2revs[ticket].append(rev)

            logging.info("start tagging %d tickets for release %s" % (len(ticket2revs.keys()), release.key))
            tagged_tickets, all_tickets_with_tag = tag_tickets(startrek_client, ticket2revs, app)
            logging.info("tagging for release %s is done, tagged %d new tickets" % (release.key, tagged_tickets))

            if opts.send_metrics:
                send_metrics(release, app, tagged_tickets, all_tickets_with_tag)

            if all_tickets_with_tag:
                if TAG not in release.tags:
                    logging.info("all tickets are tagged, place tag '%s' on release %s" % (TAG, release.key))
                    retry(lambda: startrek_client.issues[release.key].update(
                        tags=startrek_client.issues[release.key].tags + [TAG]
                    ))
            else:
                logging.warn("some tickets are missed in release %s, will try again next time" % release.key)

        logging.info('finished %s' % app)
        return None

    except Exception as e:
        logging.exception("failed, unexpected exception")
        return str(e)


def set_logger(filename, quiet=False):
    for handler in logging.root.handlers[:]:
        logging.root.removeHandler(handler)

    logging_kwargs = {
        'format': u'%(levelname)-8s [%(asctime)s] ' + filename + ': %(message)s',
        'level': logging.INFO
    }
    if quiet:
        if not os.path.isdir(LOG_PATH):
            os.makedirs(LOG_PATH)
        logging_kwargs['filename'] = "%s/%s.%s" % (LOG_PATH, filename, datetime.datetime.now().strftime("%Y%m%d"))

    logging.basicConfig(**logging_kwargs)


def check_release_is_deployed(ticket):
    startrek_client = Startrek(token=STARTREK_TOKEN, useragent=SCRIPT_NAME)
    try:
        issue = startrek_client.issues[ticket]
    except:
        sys.exit("can't find ticket '%s'" % ticket)

    if issue.type.key != 'release':
        sys.exit("'%s' is type of '%s' must be a 'release'" % (ticket, issue.type.key))
    if issue.status.key != 'closed':
        sys.exit("'%s' is in '%s' status but must be closed" % (ticket, issue.status.key))

    component2app = {
        apps_conf[app]['tracker-component'] : app
        for app in apps_conf if 'tracker-component' in apps_conf[app]
    }

    app = None
    for component in issue.components:
        if component.name in component2app:
            app = component2app[component.name]
            break

    if not app:
        sys.exit("can't recognize app of release '%s'" % ticket)

    return app


def run_main_work(opts):
    set_logger('main.log', opts.quiet)
    logging.info("start")

    working_dir = None
    try:
        vcs = 'arc'
        if any(not app in FRONTEND_PROJECT_NAMES for app in opts.apps):
            # создаем одну рабочую копию на все приложения
            logging.info('creating working copy using arc')
            working_dir = changelog_utils.checkout_working_copy('', vcs)
            logging.info('working copy is created in %s' % working_dir)

        processes_pool = multiprocessing.Pool(processes=len(opts.apps))
        results = processes_pool.map_async(process_app, [(app, opts, working_dir) for app in opts.apps])

        try:
            exceptions = results.get(timeout=opts.timeout if not opts.timeout else opts.timeout * 60)
            processes_pool.close()

        except Exception as e:
            if isinstance(e, multiprocessing.TimeoutError):
                logging.error("timeout for workers exceeded")
            else:
                logging.exception("unexpected exception")
            processes_pool.terminate()
            raise

        finally:
            processes_pool.join()

        if any(exceptions):
            logging.error("some apps are failed")
            for e, app in zip(exceptions, opts.apps):
                if e:
                    logging.error("exception during proccessing '%s': %s, %s" % (app, type(e), e))
        else:
            logging.info("success for all apps")
    finally:
        if working_dir:
            logging.info("trying to remove working copy %s" % working_dir)
            changelog_utils.remove_working_copy(working_dir, vcs)
            logging.info("working copy successfully removed")

    logging.info("finish")


def main():
    opts = parse_options()

    try:
        run_main_work(opts)
        if opts.juggler:
            dj.queue_events([dict(service=JUGGLER_SERVICE_NAME, status='OK', description='OK')])
    except Exception as e:
        if opts.juggler:
            dj.queue_events([dict(
                service=JUGGLER_SERVICE_NAME, status='CRIT', description='exception %s %s' % (type(e), e)
            )])
        else:
            raise


if __name__ == "__main__":
    main()
