@ -1,4 +1,4 @@
from code_list import code_list_pickel
from data. code_list_all import code_list_up , code_list_down
from TSLfm import TSLfm
from DDBfm import DDBfm
import pandas as pd
@ -9,13 +9,6 @@ import sys
running_which_env = ' prd '
ROOT_DIR = abspath ( join ( dirname ( abspath ( __file__ ) ) , " .. " ) )
logger . remove ( )
logger . add ( sys . stderr , level = " WARNING " )
logger . add ( ROOT_DIR + " /logs/ { time:YYYYMMDD-HHmmss} " + f " _ { running_which_env } .log " ,
rotation = " 10 MB " , compression = " zip " , level = " INFO " )
def run_add_1day_code_init_minKline ( date , code_list ) :
"""
too slow . depracated .
@ -29,7 +22,7 @@ def run_add_1day_code_init_minKline(date, code_list):
else :
code_list_filtered . append ( code )
if len ( code_list_filtered ) == 0 :
return 0
return
with TSLfm ( ) as tsl :
df = tsl . process_result_data_type (
@ -81,7 +74,7 @@ def run_pool_add_by_datelist_codeinit(typ, date_list, code_list, if_check=1):
if not df . empty :
df_list . append ( df )
if not df_list :
return 0
return
df_all = pd . concat ( df_list )
ddb2 = DDBfm ( running_which_env , pool = True )
@ -147,6 +140,7 @@ def run():
def run_pool_dates_by_code_init_n_group ( typ = ' mink ' , code_gp_amt = 10 , date_gp_amt = 10 , start_date = ' 20220101 ' , end_date = ' 20221031 ' , if_check = 1 , code_dict_by = ' init ' ) :
logger . info ( " Running run_pool_dates_by_group " )
code_list_pickel = code_list_down + code_list_up
all_code_dict_by_init = { }
for c in code_list_pickel :
@ -189,6 +183,8 @@ def run_pool_dates_by_code_init_n_group(typ='mink', code_gp_amt=10, date_gp_amt=
logger . info (
f " Getting { code_init } (no. { ind } / { num_of_code_group } of date_group { group_no } / { date_gp_amt } ) " )
code_list = all_code_dict [ code_init ]
logger . info ( date_list )
logger . info ( code_list )
if typ == ' mink ' :
# logger.info('Running mink')
logger . info ( date_list )
@ -204,6 +200,13 @@ def run_pool_dates_by_code_init_n_group(typ='mink', code_gp_amt=10, date_gp_amt=
if __name__ == ' __main__ ' :
ROOT_DIR = abspath ( join ( dirname ( abspath ( __file__ ) ) , " .. " ) )
logger . remove ( )
logger . add ( sys . stderr , level = " WARNING " )
# logger.add(ROOT_DIR+"/logs/{time:YYYYMMDD-HHmmss}"+f"_{running_which_env}.log",
# rotation="10 MB", compression="zip", level="INFO")
import time
# run_create_hft_db() # including two tables
@ -223,6 +226,10 @@ if __name__ == '__main__':
split_code_into_howmany_groups_no = 5
split_date_into_howmany_groups = 20
logger . add ( ROOT_DIR + " /logs/ { time:YYYYMMDD-HHmmss} " + f " _ { running_which_env } _ { typ } _ { st_d } _ { en_d } _ { if_check } _ { split_code_into_howmany_groups_no } _ { split_date_into_howmany_groups } .log " ,
rotation = " 10 MB " , compression = " zip " , level = " INFO " )
logger . warning (
f " Going to run * { typ } * from { st_d } to { en_d } with if_check dupliactes= { if_check } in * { running_which_env } *, plz check if this info is correct. \n \n \n \n " )
run_pool_dates_by_code_init_n_group ( typ = typ , code_gp_amt = split_code_into_howmany_groups_no ,