import json
import threading
import time
import traceback

from base.math_utils.math_impl.mainbody_context_impl import CommonMainbodyContext
from base.math_utils.math_impl.news_cheak_impl import SupervisedCommonNewsCheak
from base.model.search_word_model import SearchWebdriverModel
from base.model.smart_deep_model import SmartDeepWebdriverModel
from base.utils import mongo_helper, config
from base.utils.log import logger
from base.utils.redis_helper import handle_redis_27

SOURCE_LIST = ["新浪财经", "中国经营网", "中国经济网", "北京商报", "21财经", "四川新闻网", "格隆汇", "智通财经", "野马财经",
               "一点财经", "GPLP犀牛财经", "面包财经", "雷帝网"]
SEARCH_WORD = ["联易融"]


def single_serve_deep(source):
    while True:
        try:
            logger.info(f"当前被采集渠道{source},开始采集时间{time.time()}")
            filter = {
                'name': source,
            }
            row = mongo_helper.query_one(table=config.DB_NEWS_SEED, filter=filter)
            url = row['url']
            logger.info(f"deep_model实例化 source={source}, url={url}")
            model_seep = SmartDeepWebdriverModel()
            model_seep.deal(url=url, source=source)
        except:
            traceback.print_exc()
        finally:
            time.sleep(300)


def single_serve_search(source):
    while True:
        for word in SEARCH_WORD:
            try:
                logger.info(f"search_model实例化 word={word}, source={source}")
                model_search = SearchWebdriverModel([SupervisedCommonNewsCheak(), CommonMainbodyContext()])
                model_search.deal(search_word=word,name=source)
            except:
                traceback.print_exc()
            finally:
                time.sleep(300)


def xiange_run():
    for source in SOURCE_LIST:
        print(source)
        # threading.Thread(target=single_serve_deep, args=(source,)).start()
        threading.Thread(target=single_serve_search, args=(source,)).start()


if __name__ == '__main__':
    xiange_run()
