#!/usr/bin/env python
# -*- coding:utf-8 -*-
'''
Воркер которы загружает файлы на s3.mds.yandex.net без перезаписи.
Смотрит на каталог /var/lib/dt-s3-uploader-worker, каждый файл в каталоге - задание.
Формат файла с заданием: <полный путь до каталога который нужно загрузить> <путь в s3 куда будет загружен каталог>
'''
import os
import json
import time
import logging
import sys

from boto.s3.connection import S3Connection
from boto.s3.key import Key

def run():
    logging.basicConfig(format=u'%(levelname)-8s [%(asctime)s] %(message)s', level=logging.INFO, filename='/var/log/yandex/dt-s3-uploader-worker/dt-s3-uploader-worker.log')
    my_dir = '/var/lib/dt-s3-uploader-worker'

    logging.info('dt-s3-uploader-worker is started')

    while True:
        to_s3_upload = [task for task in os.listdir('/var/lib/dt-s3-uploader-worker') if os.path.isfile(os.path.join(my_dir, task))]
        to_s3_upload = [task for task in to_s3_upload if not task.endswith('.failed')]
        for task in to_s3_upload:
            file_task = os.path.join(my_dir, task)
            file_task_processing = file_task + '.processing'
            file_task_failed = file_task + '.failed'

            os.rename(file_task, file_task_processing)
            logging.info('%s --> %s' % (file_task, file_task_processing))

            try:
                src, dst = open(file_task_processing).read().split(' ')
                src = src.strip()
                dst = dst.strip()

                if not os.path.isdir(src):
                    raise ValueError('src: %s не является каталогом!' % (src))

                upload_dir(src.strip(), dst.strip())
            except Exception as e:
                error = str(e)
                os.rename(file_task_processing, file_task_failed)
                logging.info('%s --> %s' % (file_task_processing, file_task_failed))
                with open(file_task_failed, "a") as myfile:
                    myfile.write('\n%s\n' % (error))
                logging.error(error)
            finally:
                if (os.path.isfile(file_task_processing)):
                    os.remove(file_task_processing)

        if not to_s3_upload:
            time.sleep(10)


def upload_dir(src, dst): 
    AWS_ACCESS_KEY_ID=open('/etc/direct-tokens/s3_robot-twilight_access_key_id').read()
    AWS_SECRET_ACCESS_KEY=open('/etc/direct-tokens/s3_robot-twilight_secret_access_key').read()
    HOST='s3.mds.yandex.net'

    s3 = S3Connection(
        host=HOST, 
        aws_access_key_id=AWS_ACCESS_KEY_ID, 
        aws_secret_access_key=AWS_SECRET_ACCESS_KEY
    )

    bucket = s3.get_bucket('direct-dna')

    for root, dirs, files in os.walk(src):
        for file in files:
            abs_path = os.path.join(root,file)
            [_, s3_key] = abs_path.split('/dna/build/', 1)
            s3_key = os.path.join(dst, s3_key)
            logging.info('Local file %s --> to S3 file %s' % (abs_path, s3_key))
            if is_file_exists(bucket, s3_key):
                logging.info('S3 file %s already exists!' % (s3_key))
                continue
            k = Key(bucket)
            k.key = s3_key
            k.set_contents_from_filename(abs_path)

def is_file_exists(bucket, file_key):
    return bucket.get_key(file_key)

if __name__ == '__main__':
    run()

