import cloudpickle as pickle
import datetime
import json
from airflow.configuration import conf

class DateTimeEncoder(json.JSONEncoder):
    def default(self, obj):
        if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
            return obj.isoformat()
        elif isinstance(obj, datetime.timedelta):
            return (datetime.datetime.min + obj).time().isoformat()

json._default_encoder = DateTimeEncoder()

encoded = {serialized_dag}
dag = pickle.loads(encoded)
for task in dag.tasks:
    task.queue = conf.get('celery', 'default_queue')
