From 94314df941a7c1880efd259877a18c147f651402 Mon Sep 17 00:00:00 2001
From: Guofu Li
Date: Wed, 24 Aug 2022 17:57:19 +0800
Subject: [PATCH] bug fixes
---
src/loader/DDBHFTLoader.py | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/src/loader/DDBHFTLoader.py b/src/loader/DDBHFTLoader.py
index 5bcb6d4..fe420b8 100644
--- a/src/loader/DDBHFTLoader.py
+++ b/src/loader/DDBHFTLoader.py
@@ -27,7 +27,7 @@ from .DDBLoader import DDBLoader
class DDBHFTLoader(DDBLoader):
"""
0. 从sql-server中读取calendar数据,并创建成员变量df_calendar,df_calendar可以保存在本地pickle作为缓存
- |- `def make_calendar_df(self) -> df_calendar`
+ |- `def make_calendar_df(Nelf) -> df_calendar`
1. 创建ddb中的数据库,分区性质从calendar数据中获取
|- `def create_ddb_database(self, df_calendar) -> void`
@@ -416,7 +416,7 @@ class DDBHFTLoader(DDBLoader):
with tqdm(total=num_rows, leave=False) as sub_pbar:
for _ in pool.imap_unordered(
functools.partial(
- dump_stock_daily_to_ddb,
+ DDBHFTLoader.dump_stock_daily_to_ddb,
type_name = type_name,
stock_id = stock_id
),
@@ -434,7 +434,7 @@ class DDBHFTLoader(DDBLoader):
用于做多进程录入ddb的函数
"""
blob = gzip.decompress(blob)
- dataArray = eval(f"ProtoBuffEntitys.{type_name}Message_pb2.{type_name}Array()")
+ dataArray = eval(f"{type_name}Message_pb2.{type_name}Array()")
dataArray.ParseFromString(blob)
data_dict_list = [
@@ -452,10 +452,10 @@ class DDBHFTLoader(DDBLoader):
df = pd.DataFrame(data_dict_list)
#df['code'] = make_symbol(df['code'])
df['code'] = stock_id
- df['m_nDate'] = self.make_date(df['m_nDate'])
- df['m_nTime'] = df['m_nDate'] + self.make_time(df['m_nTime'])
+ df['m_nDate'] = DDBLoader.make_date(df['m_nDate'])
+ df['m_nTime'] = df['m_nDate'] + DDBLoader.make_time(df['m_nTime'])
for field_name in array_type_list:
- df[field_name] = self.make_nparray(df[field_name])
+ df[field_name] = DDBLoader.make_nparray(df[field_name])
#print(f"Did create ddb table for dataframe of shape {df.shape}")
# self.make_table_skeleton(type_name, df.shape[0])
@@ -468,7 +468,7 @@ class DDBHFTLoader(DDBLoader):
用于做多进程录入ddb的函数
"""
df_table_name = type_name
- df = make_stock_daily_df(row[2], type_name, stock_id)
+ df = DDBHFTLoader.make_stock_daily_df(row[2], type_name, stock_id)
ddb_sess = ddb.session(DDBLoader.ddb_config['host'], 8848)
ddb_sess.login(DDBLoader.ddb_config['username'], DDBLoader.ddb_config['password'])
@@ -477,8 +477,8 @@ class DDBHFTLoader(DDBLoader):
# 因为在做Tick数据的时候,偶然发生'CHUNK[xxx] does not exist.',所以在这里使用`append!`函数代换一下试试
ddb_sess.run("append!(loadTable('{dbPath}', `{partitioned_table_name}), {df_table_name})".format(
#ddb_sess.run("tableInsert(loadTable('{dbPath}', `{partitioned_table_name}), {df_table_name})".format(
- dbPath = DDBLoader.ddb_path,
- partitioned_table_name = type_name + DDBLoader.ddb_partition_table_suffix,
+ dbPath = DDBHFTLoader.ddb_path,
+ partitioned_table_name = type_name + DDBHFTLoader.ddb_partition_table_suffix,
df_table_name = df_table_name
))