Guofu Li 2 years ago
commit a479f1c26c

@ -0,0 +1,513 @@
import importlib
import gzip
import pickle
import functools
from pprint import pprint
from pathlib import Path
from tqdm import tqdm
#from tqdm.contrib.concurrent import process_map
from multiprocessing import Pool
import numpy as np
import pandas as pd
import dolphindb as ddb
import dolphindb.settings as keys
import sqlalchemy as sa
import ProtoBuffEntitys
def make_stock_daily_df(blob, type_name, stock_id):
blob = gzip.decompress(blob)
dataArray = eval(f"ProtoBuffEntitys.{type_name}Message_pb2.{type_name}Array()")
dataArray.ParseFromString(blob)
data_dict_list = [
{field.name : val for field, val in entry.ListFields()}
for entry in dataArray.dataArray
]
array_type_list = [
field.name
for field, val in dataArray.dataArray[0].ListFields()
if isinstance(field.default_value, list)
]
#pprint(array_type_list)
df = pd.DataFrame(data_dict_list)
#df['code'] = make_symbol(df['code'])
df['code'] = stock_id
df['m_nDate'] = make_date(df['m_nDate'])
df['m_nTime'] = df['m_nDate'] + make_time(df['m_nTime'])
for field_name in array_type_list:
df[field_name] = make_nparray(df[field_name])
#print(f"Did create ddb table for dataframe of shape {df.shape}")
# self.make_table_skeleton(type_name, df.shape[0])
return df
def dump_stock_daily_to_ddb(row, type_name, stock_id):
df_table_name = type_name
df = make_stock_daily_df(row[2], type_name, stock_id)
ddb_sess = ddb.session(DDBLoader.ddb_config['host'], 8848)
ddb_sess.login(DDBLoader.ddb_config['username'], DDBLoader.ddb_config['password'])
ddb_sess.upload({df_table_name : df})
ddb_sess.run("tableInsert(loadTable('{dbPath}', `{partitioned_table_name}), {df_table_name})".format(
dbPath = DDBLoader.ddb_path,
partitioned_table_name = type_name + DDBLoader.ddb_partition_table_suffix,
df_table_name = df_table_name
))
def make_symbol(series):
return series.astype('int32').astype('str')\
.apply(str.zfill, args=(6,))\
.apply(lambda code : \
code + '.SH' if code[0] == '6' \
else code + '.SZ')
def make_date(series):
return pd.to_datetime(
series.astype(str), format='%Y%m%d')
def make_nparray(series):
return series.apply(lambda x : np.array(x))
def make_time(series):
s_hr = series // 10000000 * 3600000
s_min = series % 10000000 // 100000 * 60000
s_sec = series % 100000 // 1000
s_ms = series % 1000
return pd.to_timedelta(s_hr + s_min + s_sec + s_ms, unit='ms')
class DDBLoader(object):
"""
0. 从sql-server中读取calendar数据并创建成员变量df_calendardf_calendar可以保存在本地pickle作为缓存
|- `def make_calendar_df(self) -> df_calendar`
1. 创建ddb中的数据库分区性质从calendar数据中获取
|- `def create_ddb_database(self, df_calendar) -> void`
|- `def load_ddb_database(self) -> void`
2. 在ddb数据库中创建calendar表
|- `def create_ddb_calendar(self, df_calendar) -> void`
3. 创建ddb的分布式表结构
|- `create_ddb_partition_table(self, hft_type_name)`
|- `_make_table_skeleton(self, hft_type_name, capacity) -> memory_table_name`
4. 从sql server的高频数据转录到dolpindb数据库中
|- `dump_hft_to_ddb(self, type_name, stock_id, trade_date=None)`
"""
hft_type_list = ['KLine', 'Order', 'Tick', 'TickQueue', 'Transe']
protobuff_name_dict = {
name : f"{name}Message_pb2" for name in hft_type_list
}
protobuff_module_dict = {
type_name : importlib.import_module(f".{module_name}", package='ProtoBuffEntitys')
for type_name, module_name in protobuff_name_dict.items()
}
protobuff_desc_dict = {
type_name : eval(f"ProtoBuffEntitys.{module_name}.{type_name}Array.{type_name}Data.DESCRIPTOR")
for type_name, module_name in protobuff_name_dict.items()
}
mssql_name_dict = {
type_name : (
f"{type_name}" if type_name != 'TickQueue' \
else f"TickQue"
) for type_name in hft_type_list
}
# 数据库路径和数据库名可以不一致
ddb_path = "dfs://hft_stock_ts"
ddb_dbname = "db_stock_ts"
ddb_memory_table_suffix = "Memroy"
ddb_partition_table_suffix = "Partitioned"
# calendar表不需要分区因此需要创建一个新的数据库
# 该数据库可以是一个简单的csv现在还不清楚两者的差别
#ddb_calendar_path = "dfs://daily_calendar"
#ddb_calendar_dbname = "db_calendar"
ddb_calendar_table_name = "Calendar"
col_type_mapping = {
'code' : 'SYMBOL',
'm_nDate' : 'DATE',
'm_nTime' : 'TIME',
1 : 'FLOAT',
3 : 'INT',
5 : 'INT',
13 : 'INT',
}
mssql_config = {
'host' : '192.168.1.7',
'username' : 'sa',
'password' : 'passw0rd!'
}
ddb_config = {
'host' : '192.168.1.7',
'username' : 'admin',
'password' : '123456'
}
num_workers = 8
default_table_capacity = 10000
ddb_dump_journal_fname = 'ddb_dump_journal.csv'
def __init__(self):
self.mssql_engine = sa.create_engine(
"mssql+pyodbc://{username}:{password}@{host}/master?driver=ODBC+Driver+18+for+SQL+Server".format(**self.mssql_config),
connect_args = {
"TrustServerCertificate": "yes"
}, echo=False
)
self.ddb_sess = ddb.session(self.ddb_config['host'], 8848)
self.ddb_sess.login(self.ddb_config['username'], self.ddb_config['password'])
def init_ddb_database(self, df_calendar):
"""
1. 创建ddb_database
2. 创建calendar表
3. 创建数据分区表
"""
# df_calendar还是由外部输入比较方便
#df_calendar = self.make_calendar_df()
self.create_ddb_database(df_calendar)
self.create_ddb_calendar(df_calendar)
for hft_type_name in self.hft_type_list:
self.create_ddb_partition_table(hft_type_name)
def init_ddb_table_data(self, df_calendar, num_workers=None):
"""
对每个股票进行循环转录数据到分区表
"""
stock_list = df_calendar['code'].unique().astype('str')
# 不能重复创建Pool对象因此需要在循环的最外侧创建好Pool对象然后传参进去
with Pool(self.num_workers if num_workers is None else num_workers) as pool:
for hft_type_name in self.hft_type_list:
print('Will work on hft type:', hft_type_name)
with tqdm(stock_list) as pbar:
for stock_id in pbar:
pbar.set_description(f"Working on stock {stock_id}")
self.dump_hft_to_ddb(hft_type_name, stock_id, pbar=pbar, pool=pool)
def _get_stock_date_list(self, cache=False):
"""
Deprecated: This function is replaced by `create_ddb_calendar()`.
"""
if cache:
with open('tmp.pkl', 'rb') as fin:
stock_list, date_list = pickle.load(fin)
else:
with self.mssql_engine.connect() as conn:
# 从KLine表查询主要是因为KLine表最小
stat = "select distinct S_INFO_WINDCODE, TRADE_DT from Level2BytesKline.dbo.KLine"
rs = conn.execute(stat)
stock_date_list = [(stock_name, date) for stock_name, date in rs.fetchall()]
stock_list, date_list = zip(*stock_date_list)
# cache
#with open('tmp.pkl', 'wb') as fout:
# pickle.dump((stock_list, date_list), fout)
return pd.Series(stock_list, dtype='str').unique(), \
pd.Series(date_list, dtype='datetime64[D]').unique()
def create_ddb_database(self, pd_calendar):
# 从`pd_calendar`中创建`stock_list`和`date_list`
stock_list = pd_calendar['code'].unique().astype('str')
date_list = pd_calendar['m_nDate'].unique().astype('datetime64[D]')
# 可以把所有股票高频数据放在一个数据库中不同的表
# 分区策略是跟数据库绑定的,因此需要保证同一个数据库中的表都使用同样的分区额策略
# 对于股票高频数据我们可以使用COMPO的分区策略并且两个子db的分区策略都是VALUE类型的code和m_nDate字段
if self.ddb_sess.existsDatabase(self.ddb_path):
print('Wiil drop database:', self.ddb_path)
self.ddb_sess.dropDatabase(self.ddb_path)
# 要创建一个COMPO分区的数据库需要首先创建两个简单分区的子数据库
# 这里我们使用先按日期,然后按股票分区的子数据库
# Please note that when creating a DFS database with COMPO domain,
# the parameter dbPath for each partition level must be either an empty string or unspecified.
db_date = self.ddb_sess.database('db_date', partitionType=keys.VALUE, partitions=date_list, dbPath='')
# 这里看起来直接使用dolphindb的脚本语句更方便一些
self.ddb_sess.run("""
db_stock = database("", 5, [SYMBOL, 50])
""")
#self.ddb_sess.run("""
# db_stock = database("", 1, symbol({partitions}))
#""".format(
# partitions = '`' + '`'.join(stock_list)
#))
self.ddb_sess.run("""
{dbName} = database(
directory = '{dbPath}',
partitionType = COMPO,
partitionScheme = [db_date, db_stock],
engine = "TSDB")
""".format(
dbName = self.ddb_dbname,
dbPath = self.ddb_path
))
self._load_ddb_dump_journal(recreate=True)
def load_ddb_database(self):
db_date = self.ddb_sess.database('db_date', dbPath='')
db_stock = self.ddb_sess.database('db_stock', dbPath='')
self.ddb_sess.run("{dbName} = database(directory='{dbPath}')".format(
dbName = self.ddb_dbname,
dbPath = self.ddb_path
))
self._load_ddb_dump_journal()
def _load_ddb_dump_journal(self, recreate=False):
if recreate or not Path(self.ddb_dump_journal_fname).exists():
print('Will create new dump journal.')
self.dump_journal_writer = open(self.ddb_dump_journal_fname, 'w')
self.dump_journal_writer.write("type_name,stock_id,status\n")
self.dump_journal_writer.flush()
else:
print('Will load previous dump journal.')
self.dump_journal_writer = open(self.ddb_dump_journal_fname, 'a')
self.dump_journal_df = pd.read_csv(self.ddb_dump_journal_fname)
self.dump_journal_df.set_index(['type_name', 'stock_id', 'status'], inplace=True)
# 因为dump_journal_df只会在创建的时候载入一次数据之后不会在写入因此可以在此时对index进行排序
self.dump_journal_df.sort_index(inplace=True)
print('Did load the dump journal, shape', self.dump_journal_df.shape)
#pprint(self.dump_journal_df.head())
def create_ddb_calendar(self, df_calendar):
mem_table = self.ddb_calendar_table_name + self.ddb_memory_table_suffix
per_table = self.ddb_calendar_table_name
# 1. 创建临时内存表
# calendar的行数大概是股票数量 * 交易日数量
self.ddb_sess.run("""
{table_name} = table({capacity}:0, {col_names}, [{col_types}]);
""".format(
table_name = mem_table,
capacity = 5000 * 1000,
col_names = '`code`m_nDate',
col_types = "SYMBOL, DATE"
))
print('Did create the memory table')
# 2. 向内存表中插入所有(code, date)数据
appender = ddb.tableAppender(tableName=mem_table, ddbSession=self.ddb_sess)
num = appender.append(df_calendar)
print('Did append calendar data into ddb memory table, return code', num)
# 3. 创建持久化表格之前需要先根据路径创建一个database对象
# 但研究了一下发现好像一个database里面可以同时存在分区表和非分区表
# 所以在这里暂时就不创建新的database了
# 但因为原database设置成了TSDB所以必须在createTable的时候指定sortKey
#self.ddb_sess.run("""
# {db_name} =
#""")
# 4. 直接从内存表创建一个持久化表格
if self.ddb_sess.existsTable(self.ddb_path, per_table):
self.ddb_sess.dropTable(self.ddb_path, per_table)
self.ddb_sess.run("""
tableInsert(createTable(
dbHandle={ddb_dbname},
table={mem_table},
tableName=`{per_table},
sortCOlumns=`code`m_nDate,
compressMethods={{"m_nDate":"delta"}}
), {mem_table})
""".format(
ddb_dbname = self.ddb_dbname,
mem_table = mem_table,
per_table = per_table
))
print('Did create the persistent table with the memory table')
def make_calendar_df(self):
print('Will create calendar dataframe from SQL Server')
# 从KLine表查询主要是因为KLine表最小
with self.mssql_engine.connect() as conn:
stat = "select distinct S_INFO_WINDCODE, TRADE_DT from Level2BytesKline.dbo.KLine"
rs = conn.execute(stat)
stock_date_list = [(stock_name, date) for stock_name, date in rs.fetchall()]
df_calendar = pd.DataFrame(stock_date_list, columns=['code', 'm_nDate'])
df_calendar['m_nDate'] = make_date(df_calendar['m_nDate'])
print('Did make the DataFrame for calendar')
return df_calendar
def _make_table_skeleton(self, hft_type_name, table_capacity=default_table_capacity):
def _make_tbl_config(field_list):
"""
根据ProtoBuffEntity对象的Descriptor.fields创建ddb标准的列名列表和列类型列表
"""
col_name_list, col_type_list = [], []
for desc in field_list:
col_name_list.append(desc.name)
# 如果对列明有特殊设定,目前仅包括`code`m_nDate和`m_nTime三个字段
if desc.name in self.col_type_mapping:
col_type_list.append(self.col_type_mapping[desc.name])
# 通过对ProtoBuffEntity的类型编号映射到ddb的类型编号
# 如果默认值是一个数组那么ddb类型要额外增加说明是数组
# ProtoBuffEntity的类型编号只针对基本类型数组需要通过`default_value`来判断
else:
col_type = self.col_type_mapping[desc.type]
if isinstance(desc.default_value, list):
col_type += '[]'
col_type_list.append(col_type)
return col_name_list, col_type_list
desc_obj = self.protobuff_desc_dict[hft_type_name]
col_name_list, col_type_list = _make_tbl_config(desc_obj.fields)
table_name = hft_type_name + self.ddb_memory_table_suffix
print('-' * 80)
print('Will create table structure:', table_name)
self.ddb_sess.run("""
{table_name} = table({capacity}:0, {col_names}, [{col_types}]);
""".format(
table_name = table_name,
capacity = table_capacity,
col_names = '`' + '`'.join(col_name_list),
col_types = ', '.join([f"'{type_name}'" for type_name in col_type_list])
))
res = self.ddb_sess.run(f"schema({table_name}).colDefs")
pprint(res)
print('-' * 80)
return table_name
def create_ddb_partition_table(self, hft_type_name):
memory_table_name = self._make_table_skeleton(hft_type_name, 10)
partition_table_name = hft_type_name + self.ddb_partition_table_suffix
print('-' * 80)
print('Will create partitioned table:', partition_table_name)
self.ddb_sess.run("""
{ddb_dbname}.createPartitionedTable(
table = {memory_table_name},
tableName = `{partition_table_name},
partitionColumns = `m_nDate`code,
sortColumns = `code`m_nDate`m_nTime,
compressMethods = {{m_nDate:"delta", m_nTime:"delta"}}
)
""".format(
ddb_dbname = self.ddb_dbname,
memory_table_name = memory_table_name,
partition_table_name = partition_table_name
))
res = self.ddb_sess.run(f"schema(loadTable('{self.ddb_path}', '{partition_table_name}')).colDefs")
pprint(res)
print('-' * 80)
def dump_hft_to_ddb(self, type_name, stock_id, trade_date=None, pbar=None, pool=None):
if (type_name, stock_id, 'OK') in self.dump_journal_df.index:
message = f"Wiil skip ({type_name}, {stock_id}) as it appears in the dump journal."
if pbar is None:
print(message)
else:
pbar.set_description(message)
return
self.dump_journal_writer.write(f"{type_name},{stock_id},START\n")
self.dump_journal_writer.flush()
# 经过尝试按个股来做batch查询效率还是可以接受的
# mssql中索引字段是(S_INFO_WINDCODE, TRADE_DT)
with self.mssql_engine.connect() as conn:
stat = """
select * from [Level2Bytes{mssql_type_name}].dbo.[{mssql_type_name}]
where S_INFO_WINDCODE='{stock_id}'
""".format(
mssql_type_name = self.mssql_name_dict[type_name],
stock_id = stock_id
)
row_list = list(conn.execute(stat).fetchall())
num_rows = len(row_list)
if pbar:
#pbar.set_description(f"Did get the result set for stock {stock_id} from mssql")
pbar.set_description(f"Will work in paralle on dumping job on {stock_id} of len {num_rows}")
else:
print(f"Did get the result set for stock {stock_id} from mssql")
# 每一行是当个个股某一日的所有高频交易信息
# 使用多进程来加快速度
#with Pool(self.num_workers if num_workers is None else num_workers) as pool:
if pool is None:
print("Will create new Pool object, but this is not encourage for large batch work.")
pool = Pool(self.num_worker)
with tqdm(total=num_rows, leave=False) as sub_pbar:
for _ in pool.imap_unordered(
functools.partial(
dump_stock_daily_to_ddb,
type_name = type_name,
stock_id = stock_id
),
row_list
):
sub_pbar.update()
self.dump_journal_writer.write(f"{type_name},{stock_id},OK\n")
self.dump_journal_writer.flush()
def main():
loader = DDBLoader()
df_calendar = loader.make_calendar_df()
loader.init_ddb_database(df_calendar)
print('Did finish init_ddb_database')
#loader.load_ddb_database()
#print('Did load ddb database')
loader.init_ddb_table_data(df_calendar)
print('Did finish init_table_data')
if __name__ == '__main__':
main()

@ -0,0 +1,122 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: HFDataTableMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='HFDataTableMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x18HFDataTableMessage.proto\"\x86\x01\n\x10HFDataTableArray\x12\x34\n\tdataArray\x18\x01 \x03(\x0b\x32!.HFDataTableArray.HFDataTableData\x1a<\n\x0fHFDataTableData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\t\x12\x0c\n\x04\x64\x61te\x18\x02 \x02(\x05\x12\r\n\x05\x42ytes\x18\x03 \x02(\x0c'
)
_HFDATATABLEARRAY_HFDATATABLEDATA = _descriptor.Descriptor(
name='HFDataTableData',
full_name='HFDataTableArray.HFDataTableData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='HFDataTableArray.HFDataTableData.code', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='date', full_name='HFDataTableArray.HFDataTableData.date', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='Bytes', full_name='HFDataTableArray.HFDataTableData.Bytes', index=2,
number=3, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=103,
serialized_end=163,
)
_HFDATATABLEARRAY = _descriptor.Descriptor(
name='HFDataTableArray',
full_name='HFDataTableArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='HFDataTableArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_HFDATATABLEARRAY_HFDATATABLEDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=29,
serialized_end=163,
)
_HFDATATABLEARRAY_HFDATATABLEDATA.containing_type = _HFDATATABLEARRAY
_HFDATATABLEARRAY.fields_by_name['dataArray'].message_type = _HFDATATABLEARRAY_HFDATATABLEDATA
DESCRIPTOR.message_types_by_name['HFDataTableArray'] = _HFDATATABLEARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HFDataTableArray = _reflection.GeneratedProtocolMessageType('HFDataTableArray', (_message.Message,), {
'HFDataTableData' : _reflection.GeneratedProtocolMessageType('HFDataTableData', (_message.Message,), {
'DESCRIPTOR' : _HFDATATABLEARRAY_HFDATATABLEDATA,
'__module__' : 'HFDataTableMessage_pb2'
# @@protoc_insertion_point(class_scope:HFDataTableArray.HFDataTableData)
})
,
'DESCRIPTOR' : _HFDATATABLEARRAY,
'__module__' : 'HFDataTableMessage_pb2'
# @@protoc_insertion_point(class_scope:HFDataTableArray)
})
_sym_db.RegisterMessage(HFDataTableArray)
_sym_db.RegisterMessage(HFDataTableArray.HFDataTableData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,178 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: IndexFutureKLineMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='IndexFutureKLineMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x1dIndexFutureKLineMessage.proto\"\xc0\x02\n\x15IndexFutureKLineArray\x12>\n\tdataArray\x18\x01 \x03(\x0b\x32+.IndexFutureKLineArray.IndexFutureKLineData\x1a\xe6\x01\n\x14IndexFutureKLineData\x12\x15\n\rm_strWindCode\x18\x01 \x02(\t\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x0f\n\x07m_nOpen\x18\x04 \x02(\x01\x12\x0f\n\x07m_nHigh\x18\x05 \x02(\x01\x12\x0e\n\x06m_nLow\x18\x06 \x02(\x01\x12\x10\n\x08m_nClose\x18\x07 \x02(\x01\x12\x11\n\tm_iVolume\x18\x08 \x02(\x03\x12\x12\n\nm_iTurover\x18\t \x02(\x03\x12\x15\n\rm_nMatchItems\x18\n \x02(\x05\x12\x13\n\x0bm_nInterest\x18\x0b \x02(\x05'
)
_INDEXFUTUREKLINEARRAY_INDEXFUTUREKLINEDATA = _descriptor.Descriptor(
name='IndexFutureKLineData',
full_name='IndexFutureKLineArray.IndexFutureKLineData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='m_strWindCode', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_strWindCode', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOpen', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nOpen', index=3,
number=4, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nHigh', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nHigh', index=4,
number=5, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nLow', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nLow', index=5,
number=6, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nClose', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nClose', index=6,
number=7, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iVolume', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_iVolume', index=7,
number=8, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTurover', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_iTurover', index=8,
number=9, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nMatchItems', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nMatchItems', index=9,
number=10, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nInterest', full_name='IndexFutureKLineArray.IndexFutureKLineData.m_nInterest', index=10,
number=11, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=124,
serialized_end=354,
)
_INDEXFUTUREKLINEARRAY = _descriptor.Descriptor(
name='IndexFutureKLineArray',
full_name='IndexFutureKLineArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='IndexFutureKLineArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INDEXFUTUREKLINEARRAY_INDEXFUTUREKLINEDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=34,
serialized_end=354,
)
_INDEXFUTUREKLINEARRAY_INDEXFUTUREKLINEDATA.containing_type = _INDEXFUTUREKLINEARRAY
_INDEXFUTUREKLINEARRAY.fields_by_name['dataArray'].message_type = _INDEXFUTUREKLINEARRAY_INDEXFUTUREKLINEDATA
DESCRIPTOR.message_types_by_name['IndexFutureKLineArray'] = _INDEXFUTUREKLINEARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
IndexFutureKLineArray = _reflection.GeneratedProtocolMessageType('IndexFutureKLineArray', (_message.Message,), {
'IndexFutureKLineData' : _reflection.GeneratedProtocolMessageType('IndexFutureKLineData', (_message.Message,), {
'DESCRIPTOR' : _INDEXFUTUREKLINEARRAY_INDEXFUTUREKLINEDATA,
'__module__' : 'IndexFutureKLineMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexFutureKLineArray.IndexFutureKLineData)
})
,
'DESCRIPTOR' : _INDEXFUTUREKLINEARRAY,
'__module__' : 'IndexFutureKLineMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexFutureKLineArray)
})
_sym_db.RegisterMessage(IndexFutureKLineArray)
_sym_db.RegisterMessage(IndexFutureKLineArray.IndexFutureKLineData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,255 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: IndexFutureL1TickMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='IndexFutureL1TickMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x1eIndexFutureL1TickMessage.proto\"\xb5\x04\n\x16IndexFutureL1TickArray\x12@\n\tdataArray\x18\x01 \x03(\x0b\x32-.IndexFutureL1TickArray.IndexFutureL1TickData\x1a\xd8\x03\n\x15IndexFutureL1TickData\x12\x15\n\rm_strWindCode\x18\x01 \x02(\t\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x10\n\x08m_nPrice\x18\x04 \x02(\x01\x12\x11\n\tm_iVolume\x18\x05 \x02(\x03\x12\x12\n\nm_iTurover\x18\x06 \x02(\x03\x12\x15\n\rm_nMatchItems\x18\x07 \x02(\x05\x12\x15\n\rm_chTradeFlag\x18\x08 \x02(\r\x12\x12\n\nm_chBSFlag\x18\t \x02(\r\x12\x14\n\x0cm_iAccVolume\x18\n \x02(\x03\x12\x15\n\rm_iAccTurover\x18\x0b \x02(\x03\x12\x0f\n\x07m_nHigh\x18\x0c \x02(\x01\x12\x0e\n\x06m_nLow\x18\r \x02(\x01\x12\x0f\n\x07m_nOpen\x18\x0e \x02(\x01\x12\x13\n\x0bm_nPreClose\x18\x0f \x02(\x01\x12\x13\n\x0bm_nAskPrice\x18\x10 \x02(\x01\x12\x14\n\x0cm_nAskVolume\x18\x11 \x02(\x05\x12\x13\n\x0bm_nBidPrice\x18\x12 \x02(\x01\x12\x14\n\x0cm_nBidVolume\x18\x13 \x02(\x05\x12\x13\n\x0bm_nPosition\x18\x14 \x02(\x05\x12\x14\n\x0cm_nPreSettle\x18\x15 \x02(\x01\x12\x16\n\x0em_nPrePosition\x18\x16 \x02(\x05'
)
_INDEXFUTUREL1TICKARRAY_INDEXFUTUREL1TICKDATA = _descriptor.Descriptor(
name='IndexFutureL1TickData',
full_name='IndexFutureL1TickArray.IndexFutureL1TickData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='m_strWindCode', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_strWindCode', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPrice', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nPrice', index=3,
number=4, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iVolume', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_iVolume', index=4,
number=5, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTurover', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_iTurover', index=5,
number=6, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nMatchItems', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nMatchItems', index=6,
number=7, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chTradeFlag', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_chTradeFlag', index=7,
number=8, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chBSFlag', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_chBSFlag', index=8,
number=9, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iAccVolume', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_iAccVolume', index=9,
number=10, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iAccTurover', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_iAccTurover', index=10,
number=11, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nHigh', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nHigh', index=11,
number=12, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nLow', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nLow', index=12,
number=13, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOpen', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nOpen', index=13,
number=14, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPreClose', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nPreClose', index=14,
number=15, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nAskPrice', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nAskPrice', index=15,
number=16, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nAskVolume', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nAskVolume', index=16,
number=17, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nBidPrice', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nBidPrice', index=17,
number=18, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nBidVolume', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nBidVolume', index=18,
number=19, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPosition', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nPosition', index=19,
number=20, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPreSettle', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nPreSettle', index=20,
number=21, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPrePosition', full_name='IndexFutureL1TickArray.IndexFutureL1TickData.m_nPrePosition', index=21,
number=22, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=128,
serialized_end=600,
)
_INDEXFUTUREL1TICKARRAY = _descriptor.Descriptor(
name='IndexFutureL1TickArray',
full_name='IndexFutureL1TickArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='IndexFutureL1TickArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INDEXFUTUREL1TICKARRAY_INDEXFUTUREL1TICKDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=35,
serialized_end=600,
)
_INDEXFUTUREL1TICKARRAY_INDEXFUTUREL1TICKDATA.containing_type = _INDEXFUTUREL1TICKARRAY
_INDEXFUTUREL1TICKARRAY.fields_by_name['dataArray'].message_type = _INDEXFUTUREL1TICKARRAY_INDEXFUTUREL1TICKDATA
DESCRIPTOR.message_types_by_name['IndexFutureL1TickArray'] = _INDEXFUTUREL1TICKARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
IndexFutureL1TickArray = _reflection.GeneratedProtocolMessageType('IndexFutureL1TickArray', (_message.Message,), {
'IndexFutureL1TickData' : _reflection.GeneratedProtocolMessageType('IndexFutureL1TickData', (_message.Message,), {
'DESCRIPTOR' : _INDEXFUTUREL1TICKARRAY_INDEXFUTUREL1TICKDATA,
'__module__' : 'IndexFutureL1TickMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexFutureL1TickArray.IndexFutureL1TickData)
})
,
'DESCRIPTOR' : _INDEXFUTUREL1TICKARRAY,
'__module__' : 'IndexFutureL1TickMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexFutureL1TickArray)
})
_sym_db.RegisterMessage(IndexFutureL1TickArray)
_sym_db.RegisterMessage(IndexFutureL1TickArray.IndexFutureL1TickData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,164 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: IndexKLineMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='IndexKLineMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x17IndexKLineMessage.proto\"\xf3\x01\n\x0fIndexKLineArray\x12\x32\n\tdataArray\x18\x01 \x03(\x0b\x32\x1f.IndexKLineArray.IndexKLineData\x1a\xab\x01\n\x0eIndexKLineData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x0f\n\x07m_nOpen\x18\x04 \x02(\x01\x12\x0f\n\x07m_nHigh\x18\x05 \x02(\x01\x12\x0e\n\x06m_nLow\x18\x06 \x02(\x01\x12\x10\n\x08m_nClose\x18\x07 \x02(\x01\x12\x11\n\tm_iVolume\x18\x08 \x02(\x03\x12\x12\n\nm_iTurover\x18\t \x02(\x03'
)
_INDEXKLINEARRAY_INDEXKLINEDATA = _descriptor.Descriptor(
name='IndexKLineData',
full_name='IndexKLineArray.IndexKLineData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='IndexKLineArray.IndexKLineData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='IndexKLineArray.IndexKLineData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='IndexKLineArray.IndexKLineData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOpen', full_name='IndexKLineArray.IndexKLineData.m_nOpen', index=3,
number=4, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nHigh', full_name='IndexKLineArray.IndexKLineData.m_nHigh', index=4,
number=5, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nLow', full_name='IndexKLineArray.IndexKLineData.m_nLow', index=5,
number=6, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nClose', full_name='IndexKLineArray.IndexKLineData.m_nClose', index=6,
number=7, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iVolume', full_name='IndexKLineArray.IndexKLineData.m_iVolume', index=7,
number=8, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTurover', full_name='IndexKLineArray.IndexKLineData.m_iTurover', index=8,
number=9, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=100,
serialized_end=271,
)
_INDEXKLINEARRAY = _descriptor.Descriptor(
name='IndexKLineArray',
full_name='IndexKLineArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='IndexKLineArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INDEXKLINEARRAY_INDEXKLINEDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=28,
serialized_end=271,
)
_INDEXKLINEARRAY_INDEXKLINEDATA.containing_type = _INDEXKLINEARRAY
_INDEXKLINEARRAY.fields_by_name['dataArray'].message_type = _INDEXKLINEARRAY_INDEXKLINEDATA
DESCRIPTOR.message_types_by_name['IndexKLineArray'] = _INDEXKLINEARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
IndexKLineArray = _reflection.GeneratedProtocolMessageType('IndexKLineArray', (_message.Message,), {
'IndexKLineData' : _reflection.GeneratedProtocolMessageType('IndexKLineData', (_message.Message,), {
'DESCRIPTOR' : _INDEXKLINEARRAY_INDEXKLINEDATA,
'__module__' : 'IndexKLineMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexKLineArray.IndexKLineData)
})
,
'DESCRIPTOR' : _INDEXKLINEARRAY,
'__module__' : 'IndexKLineMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexKLineArray)
})
_sym_db.RegisterMessage(IndexKLineArray)
_sym_db.RegisterMessage(IndexKLineArray.IndexKLineData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,185 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: IndexTickMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='IndexTickMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x16IndexTickMessage.proto\"\xb1\x02\n\x0eIndexTickArray\x12\x30\n\tdataArray\x18\x01 \x03(\x0b\x32\x1d.IndexTickArray.IndexTickData\x1a\xec\x01\n\rIndexTickData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x10\n\x08m_nPrice\x18\x04 \x02(\x01\x12\x11\n\tm_iVolume\x18\x05 \x02(\x03\x12\x12\n\nm_iTurover\x18\x06 \x02(\x03\x12\x14\n\x0cm_iAccVolume\x18\x07 \x02(\x03\x12\x15\n\rm_iAccTurover\x18\x08 \x02(\x03\x12\x0f\n\x07m_nHigh\x18\t \x02(\x01\x12\x0e\n\x06m_nLow\x18\n \x02(\x01\x12\x0f\n\x07m_nOpen\x18\x0b \x02(\x01\x12\x13\n\x0bm_nPreClose\x18\x0c \x02(\x01'
)
_INDEXTICKARRAY_INDEXTICKDATA = _descriptor.Descriptor(
name='IndexTickData',
full_name='IndexTickArray.IndexTickData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='IndexTickArray.IndexTickData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='IndexTickArray.IndexTickData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='IndexTickArray.IndexTickData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPrice', full_name='IndexTickArray.IndexTickData.m_nPrice', index=3,
number=4, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iVolume', full_name='IndexTickArray.IndexTickData.m_iVolume', index=4,
number=5, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTurover', full_name='IndexTickArray.IndexTickData.m_iTurover', index=5,
number=6, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iAccVolume', full_name='IndexTickArray.IndexTickData.m_iAccVolume', index=6,
number=7, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iAccTurover', full_name='IndexTickArray.IndexTickData.m_iAccTurover', index=7,
number=8, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nHigh', full_name='IndexTickArray.IndexTickData.m_nHigh', index=8,
number=9, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nLow', full_name='IndexTickArray.IndexTickData.m_nLow', index=9,
number=10, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOpen', full_name='IndexTickArray.IndexTickData.m_nOpen', index=10,
number=11, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPreClose', full_name='IndexTickArray.IndexTickData.m_nPreClose', index=11,
number=12, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=96,
serialized_end=332,
)
_INDEXTICKARRAY = _descriptor.Descriptor(
name='IndexTickArray',
full_name='IndexTickArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='IndexTickArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_INDEXTICKARRAY_INDEXTICKDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=27,
serialized_end=332,
)
_INDEXTICKARRAY_INDEXTICKDATA.containing_type = _INDEXTICKARRAY
_INDEXTICKARRAY.fields_by_name['dataArray'].message_type = _INDEXTICKARRAY_INDEXTICKDATA
DESCRIPTOR.message_types_by_name['IndexTickArray'] = _INDEXTICKARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
IndexTickArray = _reflection.GeneratedProtocolMessageType('IndexTickArray', (_message.Message,), {
'IndexTickData' : _reflection.GeneratedProtocolMessageType('IndexTickData', (_message.Message,), {
'DESCRIPTOR' : _INDEXTICKARRAY_INDEXTICKDATA,
'__module__' : 'IndexTickMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexTickArray.IndexTickData)
})
,
'DESCRIPTOR' : _INDEXTICKARRAY,
'__module__' : 'IndexTickMessage_pb2'
# @@protoc_insertion_point(class_scope:IndexTickArray)
})
_sym_db.RegisterMessage(IndexTickArray)
_sym_db.RegisterMessage(IndexTickArray.IndexTickData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,171 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: KLineMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='KLineMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x12KLineMessage.proto\"\xf6\x01\n\nKLineArray\x12(\n\tdataArray\x18\x01 \x03(\x0b\x32\x15.KLineArray.KLineData\x1a\xbd\x01\n\tKLineData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x0f\n\x07m_nOpen\x18\x04 \x02(\x01\x12\x0f\n\x07m_nHigh\x18\x05 \x02(\x01\x12\x0e\n\x06m_nLow\x18\x06 \x02(\x01\x12\x10\n\x08m_nClose\x18\x07 \x02(\x01\x12\x11\n\tm_iVolume\x18\x08 \x02(\x03\x12\x12\n\nm_iTurover\x18\t \x02(\x03\x12\x15\n\rm_nMatchItems\x18\n \x02(\x05'
)
_KLINEARRAY_KLINEDATA = _descriptor.Descriptor(
name='KLineData',
full_name='KLineArray.KLineData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='KLineArray.KLineData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='KLineArray.KLineData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='KLineArray.KLineData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOpen', full_name='KLineArray.KLineData.m_nOpen', index=3,
number=4, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nHigh', full_name='KLineArray.KLineData.m_nHigh', index=4,
number=5, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nLow', full_name='KLineArray.KLineData.m_nLow', index=5,
number=6, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nClose', full_name='KLineArray.KLineData.m_nClose', index=6,
number=7, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iVolume', full_name='KLineArray.KLineData.m_iVolume', index=7,
number=8, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTurover', full_name='KLineArray.KLineData.m_iTurover', index=8,
number=9, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nMatchItems', full_name='KLineArray.KLineData.m_nMatchItems', index=9,
number=10, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=80,
serialized_end=269,
)
_KLINEARRAY = _descriptor.Descriptor(
name='KLineArray',
full_name='KLineArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='KLineArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_KLINEARRAY_KLINEDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23,
serialized_end=269,
)
_KLINEARRAY_KLINEDATA.containing_type = _KLINEARRAY
_KLINEARRAY.fields_by_name['dataArray'].message_type = _KLINEARRAY_KLINEDATA
DESCRIPTOR.message_types_by_name['KLineArray'] = _KLINEARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
KLineArray = _reflection.GeneratedProtocolMessageType('KLineArray', (_message.Message,), {
'KLineData' : _reflection.GeneratedProtocolMessageType('KLineData', (_message.Message,), {
'DESCRIPTOR' : _KLINEARRAY_KLINEDATA,
'__module__' : 'KLineMessage_pb2'
# @@protoc_insertion_point(class_scope:KLineArray.KLineData)
})
,
'DESCRIPTOR' : _KLINEARRAY,
'__module__' : 'KLineMessage_pb2'
# @@protoc_insertion_point(class_scope:KLineArray)
})
_sym_db.RegisterMessage(KLineArray)
_sym_db.RegisterMessage(KLineArray.KLineData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,164 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: OrderMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='OrderMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x12OrderMessage.proto\"\xf8\x01\n\nOrderArray\x12(\n\tdataArray\x18\x01 \x03(\x0b\x32\x15.OrderArray.OrderData\x1a\xbf\x01\n\tOrderData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x10\n\x08m_nIndex\x18\x04 \x02(\x05\x12\x10\n\x08m_nOrder\x18\x05 \x02(\x05\x12\x15\n\rm_chOrderKind\x18\x06 \x02(\r\x12\x18\n\x10m_chFunctionCode\x18\x07 \x02(\r\x12\x15\n\rm_nTradePrice\x18\x08 \x02(\x01\x12\x16\n\x0em_nTradeVolume\x18\t \x02(\x05'
)
_ORDERARRAY_ORDERDATA = _descriptor.Descriptor(
name='OrderData',
full_name='OrderArray.OrderData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='OrderArray.OrderData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='OrderArray.OrderData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='OrderArray.OrderData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nIndex', full_name='OrderArray.OrderData.m_nIndex', index=3,
number=4, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOrder', full_name='OrderArray.OrderData.m_nOrder', index=4,
number=5, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chOrderKind', full_name='OrderArray.OrderData.m_chOrderKind', index=5,
number=6, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chFunctionCode', full_name='OrderArray.OrderData.m_chFunctionCode', index=6,
number=7, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTradePrice', full_name='OrderArray.OrderData.m_nTradePrice', index=7,
number=8, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTradeVolume', full_name='OrderArray.OrderData.m_nTradeVolume', index=8,
number=9, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=80,
serialized_end=271,
)
_ORDERARRAY = _descriptor.Descriptor(
name='OrderArray',
full_name='OrderArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='OrderArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ORDERARRAY_ORDERDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23,
serialized_end=271,
)
_ORDERARRAY_ORDERDATA.containing_type = _ORDERARRAY
_ORDERARRAY.fields_by_name['dataArray'].message_type = _ORDERARRAY_ORDERDATA
DESCRIPTOR.message_types_by_name['OrderArray'] = _ORDERARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
OrderArray = _reflection.GeneratedProtocolMessageType('OrderArray', (_message.Message,), {
'OrderData' : _reflection.GeneratedProtocolMessageType('OrderData', (_message.Message,), {
'DESCRIPTOR' : _ORDERARRAY_ORDERDATA,
'__module__' : 'OrderMessage_pb2'
# @@protoc_insertion_point(class_scope:OrderArray.OrderData)
})
,
'DESCRIPTOR' : _ORDERARRAY,
'__module__' : 'OrderMessage_pb2'
# @@protoc_insertion_point(class_scope:OrderArray)
})
_sym_db.RegisterMessage(OrderArray)
_sym_db.RegisterMessage(OrderArray.OrderData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,262 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: TickMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='TickMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x11TickMessage.proto\"\x99\x04\n\tTickArray\x12&\n\tdataArray\x18\x01 \x03(\x0b\x32\x13.TickArray.TickData\x1a\xe3\x03\n\x08TickData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x10\n\x08m_nPrice\x18\x04 \x02(\x01\x12\x11\n\tm_iVolume\x18\x05 \x02(\x03\x12\x12\n\nm_iTurover\x18\x06 \x02(\x03\x12\x15\n\rm_nMatchItems\x18\x07 \x02(\x05\x12\x15\n\rm_chTradeFlag\x18\x08 \x02(\r\x12\x12\n\nm_chBSFlag\x18\t \x02(\r\x12\x14\n\x0cm_iAccVolume\x18\n \x02(\x03\x12\x15\n\rm_iAccTurover\x18\x0b \x02(\x03\x12\x0f\n\x07m_nHigh\x18\x0c \x02(\x01\x12\x0e\n\x06m_nLow\x18\r \x02(\x01\x12\x0f\n\x07m_nOpen\x18\x0e \x02(\x01\x12\x13\n\x0bm_nPreClose\x18\x0f \x02(\x01\x12\x13\n\x0bm_nAskPrice\x18\x10 \x03(\x01\x12\x14\n\x0cm_nAskVolume\x18\x11 \x03(\x05\x12\x13\n\x0bm_nBidPrice\x18\x12 \x03(\x01\x12\x14\n\x0cm_nBidVolume\x18\x13 \x03(\x05\x12\x15\n\rm_nAskAvPrice\x18\x14 \x02(\x01\x12\x15\n\rm_nBidAvPrice\x18\x15 \x02(\x01\x12\x19\n\x11m_iTotalAskVolume\x18\x16 \x02(\x03\x12\x19\n\x11m_iTotalBidVolume\x18\x17 \x02(\x03'
)
_TICKARRAY_TICKDATA = _descriptor.Descriptor(
name='TickData',
full_name='TickArray.TickData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='TickArray.TickData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='TickArray.TickData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='TickArray.TickData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPrice', full_name='TickArray.TickData.m_nPrice', index=3,
number=4, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iVolume', full_name='TickArray.TickData.m_iVolume', index=4,
number=5, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTurover', full_name='TickArray.TickData.m_iTurover', index=5,
number=6, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nMatchItems', full_name='TickArray.TickData.m_nMatchItems', index=6,
number=7, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chTradeFlag', full_name='TickArray.TickData.m_chTradeFlag', index=7,
number=8, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chBSFlag', full_name='TickArray.TickData.m_chBSFlag', index=8,
number=9, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iAccVolume', full_name='TickArray.TickData.m_iAccVolume', index=9,
number=10, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iAccTurover', full_name='TickArray.TickData.m_iAccTurover', index=10,
number=11, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nHigh', full_name='TickArray.TickData.m_nHigh', index=11,
number=12, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nLow', full_name='TickArray.TickData.m_nLow', index=12,
number=13, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOpen', full_name='TickArray.TickData.m_nOpen', index=13,
number=14, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPreClose', full_name='TickArray.TickData.m_nPreClose', index=14,
number=15, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nAskPrice', full_name='TickArray.TickData.m_nAskPrice', index=15,
number=16, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nAskVolume', full_name='TickArray.TickData.m_nAskVolume', index=16,
number=17, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nBidPrice', full_name='TickArray.TickData.m_nBidPrice', index=17,
number=18, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nBidVolume', full_name='TickArray.TickData.m_nBidVolume', index=18,
number=19, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nAskAvPrice', full_name='TickArray.TickData.m_nAskAvPrice', index=19,
number=20, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nBidAvPrice', full_name='TickArray.TickData.m_nBidAvPrice', index=20,
number=21, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTotalAskVolume', full_name='TickArray.TickData.m_iTotalAskVolume', index=21,
number=22, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_iTotalBidVolume', full_name='TickArray.TickData.m_iTotalBidVolume', index=22,
number=23, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=76,
serialized_end=559,
)
_TICKARRAY = _descriptor.Descriptor(
name='TickArray',
full_name='TickArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='TickArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TICKARRAY_TICKDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=559,
)
_TICKARRAY_TICKDATA.containing_type = _TICKARRAY
_TICKARRAY.fields_by_name['dataArray'].message_type = _TICKARRAY_TICKDATA
DESCRIPTOR.message_types_by_name['TickArray'] = _TICKARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TickArray = _reflection.GeneratedProtocolMessageType('TickArray', (_message.Message,), {
'TickData' : _reflection.GeneratedProtocolMessageType('TickData', (_message.Message,), {
'DESCRIPTOR' : _TICKARRAY_TICKDATA,
'__module__' : 'TickMessage_pb2'
# @@protoc_insertion_point(class_scope:TickArray.TickData)
})
,
'DESCRIPTOR' : _TICKARRAY,
'__module__' : 'TickMessage_pb2'
# @@protoc_insertion_point(class_scope:TickArray)
})
_sym_db.RegisterMessage(TickArray)
_sym_db.RegisterMessage(TickArray.TickData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,157 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: TickQueueMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='TickQueueMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x16TickQueueMessage.proto\"\xe7\x01\n\x0eTickQueueArray\x12\x30\n\tdataArray\x18\x01 \x03(\x0b\x32\x1d.TickQueueArray.TickQueueData\x1a\xa2\x01\n\rTickQueueData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x0f\n\x07m_nSide\x18\x04 \x02(\r\x12\x10\n\x08m_nPrice\x18\x05 \x02(\x01\x12\x15\n\rm_nOrderItems\x18\x06 \x02(\x05\x12\x12\n\nm_nABItems\x18\x07 \x02(\x05\x12\x13\n\x0bm_nABVolume\x18\x08 \x03(\x05'
)
_TICKQUEUEARRAY_TICKQUEUEDATA = _descriptor.Descriptor(
name='TickQueueData',
full_name='TickQueueArray.TickQueueData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='TickQueueArray.TickQueueData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='TickQueueArray.TickQueueData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='TickQueueArray.TickQueueData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nSide', full_name='TickQueueArray.TickQueueData.m_nSide', index=3,
number=4, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nPrice', full_name='TickQueueArray.TickQueueData.m_nPrice', index=4,
number=5, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nOrderItems', full_name='TickQueueArray.TickQueueData.m_nOrderItems', index=5,
number=6, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nABItems', full_name='TickQueueArray.TickQueueData.m_nABItems', index=6,
number=7, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nABVolume', full_name='TickQueueArray.TickQueueData.m_nABVolume', index=7,
number=8, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=96,
serialized_end=258,
)
_TICKQUEUEARRAY = _descriptor.Descriptor(
name='TickQueueArray',
full_name='TickQueueArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='TickQueueArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TICKQUEUEARRAY_TICKQUEUEDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=27,
serialized_end=258,
)
_TICKQUEUEARRAY_TICKQUEUEDATA.containing_type = _TICKQUEUEARRAY
_TICKQUEUEARRAY.fields_by_name['dataArray'].message_type = _TICKQUEUEARRAY_TICKQUEUEDATA
DESCRIPTOR.message_types_by_name['TickQueueArray'] = _TICKQUEUEARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TickQueueArray = _reflection.GeneratedProtocolMessageType('TickQueueArray', (_message.Message,), {
'TickQueueData' : _reflection.GeneratedProtocolMessageType('TickQueueData', (_message.Message,), {
'DESCRIPTOR' : _TICKQUEUEARRAY_TICKQUEUEDATA,
'__module__' : 'TickQueueMessage_pb2'
# @@protoc_insertion_point(class_scope:TickQueueArray.TickQueueData)
})
,
'DESCRIPTOR' : _TICKQUEUEARRAY,
'__module__' : 'TickQueueMessage_pb2'
# @@protoc_insertion_point(class_scope:TickQueueArray)
})
_sym_db.RegisterMessage(TickQueueArray)
_sym_db.RegisterMessage(TickQueueArray.TickQueueData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,178 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: TranseMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='TranseMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n\x13TranseMessage.proto\"\xa8\x02\n\x0bTranseArray\x12*\n\tdataArray\x18\x01 \x03(\x0b\x32\x17.TranseArray.TranseData\x1a\xec\x01\n\nTranseData\x12\x0c\n\x04\x63ode\x18\x01 \x02(\x05\x12\x0f\n\x07m_nDate\x18\x02 \x02(\x05\x12\x0f\n\x07m_nTime\x18\x03 \x02(\x05\x12\x10\n\x08m_nIndex\x18\x04 \x02(\x05\x12\x18\n\x10m_chFunctionCode\x18\x05 \x02(\r\x12\x15\n\rm_chOrderKind\x18\x06 \x02(\r\x12\x12\n\nm_chBSFlag\x18\x07 \x02(\r\x12\x15\n\rm_nTradePrice\x18\x08 \x02(\x01\x12\x16\n\x0em_nTradeVolume\x18\t \x02(\x05\x12\x13\n\x0bm_nAskOrder\x18\n \x02(\x05\x12\x13\n\x0bm_nBidOrder\x18\x0b \x02(\x05'
)
_TRANSEARRAY_TRANSEDATA = _descriptor.Descriptor(
name='TranseData',
full_name='TranseArray.TranseData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='TranseArray.TranseData.code', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nDate', full_name='TranseArray.TranseData.m_nDate', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTime', full_name='TranseArray.TranseData.m_nTime', index=2,
number=3, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nIndex', full_name='TranseArray.TranseData.m_nIndex', index=3,
number=4, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chFunctionCode', full_name='TranseArray.TranseData.m_chFunctionCode', index=4,
number=5, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chOrderKind', full_name='TranseArray.TranseData.m_chOrderKind', index=5,
number=6, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_chBSFlag', full_name='TranseArray.TranseData.m_chBSFlag', index=6,
number=7, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTradePrice', full_name='TranseArray.TranseData.m_nTradePrice', index=7,
number=8, type=1, cpp_type=5, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nTradeVolume', full_name='TranseArray.TranseData.m_nTradeVolume', index=8,
number=9, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nAskOrder', full_name='TranseArray.TranseData.m_nAskOrder', index=9,
number=10, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='m_nBidOrder', full_name='TranseArray.TranseData.m_nBidOrder', index=10,
number=11, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=84,
serialized_end=320,
)
_TRANSEARRAY = _descriptor.Descriptor(
name='TranseArray',
full_name='TranseArray',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dataArray', full_name='TranseArray.dataArray', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TRANSEARRAY_TRANSEDATA, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=24,
serialized_end=320,
)
_TRANSEARRAY_TRANSEDATA.containing_type = _TRANSEARRAY
_TRANSEARRAY.fields_by_name['dataArray'].message_type = _TRANSEARRAY_TRANSEDATA
DESCRIPTOR.message_types_by_name['TranseArray'] = _TRANSEARRAY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TranseArray = _reflection.GeneratedProtocolMessageType('TranseArray', (_message.Message,), {
'TranseData' : _reflection.GeneratedProtocolMessageType('TranseData', (_message.Message,), {
'DESCRIPTOR' : _TRANSEARRAY_TRANSEDATA,
'__module__' : 'TranseMessage_pb2'
# @@protoc_insertion_point(class_scope:TranseArray.TranseData)
})
,
'DESCRIPTOR' : _TRANSEARRAY,
'__module__' : 'TranseMessage_pb2'
# @@protoc_insertion_point(class_scope:TranseArray)
})
_sym_db.RegisterMessage(TranseArray)
_sym_db.RegisterMessage(TranseArray.TranseData)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,108 @@
import pymssql
import gzip
from ProtoBuffEntitys import TickMessage_pb2
from ProtoBuffEntitys import OrderMessage_pb2
from ProtoBuffEntitys import TranseMessage_pb2
from ProtoBuffEntitys import TickQueueMessage_pb2
from ProtoBuffEntitys import KLineMessage_pb2
def ReadTick(ip,user,pw,dbName,code,date):
conn = pymssql.connect(host=ip,user=user,password=pw,database=dbName)
cursor = conn.cursor()
sql = '''SELECT [S_INFO_WINDCODE]
,[TRADE_DT]
,[Bytes]
FROM [{0}].[dbo].[Tick]
WHERE [TRADE_DT]='{1}' AND [S_INFO_WINDCODE]='{2}' '''.format(dbName, date,code)
cursor.execute(sql)
rs = cursor.fetchall()
if len(rs) == 0:
return []
f_all=rs[0][2]
f_all=gzip.decompress(f_all)
dataArray=TickMessage_pb2.TickArray()
dataArray.ParseFromString(f_all)
return dataArray.dataArray
array=ReadTick('10.10.11.61','yuhaomiao','yhm9591','Level2BytesTick','000001.SZ',20200911)
print('''Tick Code:{0}'''.format(array[0].code))
def ReadTickQue(ip,user,pw,dbName,code,date):
conn = pymssql.connect(host=ip,user=user,password=pw,database=dbName)
cursor = conn.cursor()
sql = '''SELECT [S_INFO_WINDCODE]
,[TRADE_DT]
,[Bytes]
FROM [{0}].[dbo].[TickQue]
WHERE [TRADE_DT]='{1}' AND [S_INFO_WINDCODE]='{2}' '''.format(dbName, date,code)
cursor.execute(sql)
rs = cursor.fetchall()
if len(rs) == 0:
return []
f_all=rs[0][2]
f_all=gzip.decompress(f_all)
dataArray=TickQueueMessage_pb2.TickQueueArray()
dataArray.ParseFromString(f_all)
return dataArray.dataArray
array=ReadTickQue('10.10.11.61','yuhaomiao','yhm9591','Level2BytesTickQue','000001.SZ',20200911)
print('''TickQue Code:{0}'''.format(array[0].code))
def ReadTranse(ip,user,pw,dbName,code,date):
conn = pymssql.connect(host=ip,user=user,password=pw,database=dbName)
cursor = conn.cursor()
sql = '''SELECT [S_INFO_WINDCODE]
,[TRADE_DT]
,[Bytes]
FROM [{0}].[dbo].[Transe]
WHERE [TRADE_DT]='{1}' AND [S_INFO_WINDCODE]='{2}' '''.format(dbName, date,code)
cursor.execute(sql)
rs = cursor.fetchall()
if len(rs) == 0:
return []
f_all=rs[0][2]
f_all=gzip.decompress(f_all)
dataArray=TranseMessage_pb2.TranseArray()
dataArray.ParseFromString(f_all)
return dataArray.dataArray
array=ReadTranse('10.10.11.61','yuhaomiao','yhm9591','Level2BytesTranse','000001.SZ',20200911)
print('''Transe Code:{0}'''.format(array[0].code))
def ReadOrder(ip,user,pw,dbName,code,date):
conn = pymssql.connect(host=ip,user=user,password=pw,database=dbName)
cursor = conn.cursor()
sql = '''SELECT [S_INFO_WINDCODE]
,[TRADE_DT]
,[Bytes]
FROM [{0}].[dbo].[Order]
WHERE [TRADE_DT]='{1}' AND [S_INFO_WINDCODE]='{2}' '''.format(dbName, date,code)
cursor.execute(sql)
rs = cursor.fetchall()
if len(rs) == 0:
return []
f_all=rs[0][2]
f_all=gzip.decompress(f_all)
dataArray=OrderMessage_pb2.OrderArray()
dataArray.ParseFromString(f_all)
return dataArray.dataArray
array=ReadOrder('10.10.11.61','yuhaomiao','yhm9591','Level2BytesOrder','000001.SZ',20200911)
print('''Order Code:{0}'''.format(array[0].code))
def ReadKLine(ip,user,pw,dbName,code,date):
conn = pymssql.connect(host=ip,user=user,password=pw,database=dbName)
cursor = conn.cursor()
sql = '''SELECT [S_INFO_WINDCODE]
,[TRADE_DT]
,[Bytes]
FROM [{0}].[dbo].[KLine]
WHERE [TRADE_DT]='{1}' AND [S_INFO_WINDCODE]='{2}' '''.format(dbName, date,code)
cursor.execute(sql)
rs = cursor.fetchall()
if len(rs) == 0:
return []
f_all=rs[0][2]
f_all=gzip.decompress(f_all)
dataArray=KLineMessage_pb2.KLineArray()
dataArray.ParseFromString(f_all)
return dataArray.dataArray
array=ReadKLine('10.10.11.61','yuhaomiao','yhm9591','Level2BytesKLine','000001.SZ',20200911)
print('''KLine Code:{0}'''.format(array[0].code))

@ -0,0 +1,3 @@
Python语言从数据库中读取字节流程序并解码还原示例程序在ReadTickFromDB.py中。
该函数需要pymssql、gzip、google.protobuf可用pip install protobuf安装三个外部依赖库需要在运行程序。
成功读取后,每个标的会读出并解析成一个结构体数组的数据结构,可以对于该数据进行进一步操作。

@ -0,0 +1,12 @@
n=1000000
date=take(2006.01.01..2006.01.31, n);
x=rand(10.0, n);
t=table(date, x);
login("admin","123456")
db=database("dfs://valuedb", VALUE, 2006.01.01..2006.01.31)
pt = db.createPartitionedTable(t, `pt, `date);
pt.append!(t);
pt=loadTable("dfs://valuedb","pt")
select top 100 * from pt

@ -0,0 +1 @@
db=database("dfs://L2_ORDER", VALUE, 2019.01.01..2022.01.31)

@ -0,0 +1,13 @@
symbol = take(`AAPL, 6) join take(`FB, 5)
time = 2019.02.27T09:45:01.000000000 + [146, 278, 412, 445, 496, 789, 212, 556, 598, 712, 989]
price=173.27 173.26 173.24 173.25 173.26 173.27 161.51 161.50 161.49 161.50 161.51
quotes=table(symbol, time, price)
print(quotes)
weights = dict(`AAPL`FB, 0.6 0.4)
print(weights)
ETF = select symbol, time, price*weights[symbol] as price from quotes;
print(ETF)
select rowSum(ffill(last(price))) from ETF pivot by time, symbol;

@ -0,0 +1,10 @@
login('admin', '123456')
tt = table(100:0, `code`m_nDate`m_nTime`m_nSide`m_nPrice`m_nOrderItems`m_nABItems`m_nABVolume, [SYMBOL, DATE, TIME, INT, FLOAT, INT, INT, INT[]])
share t as gt
share tt as gtt
dropStreamTable(gt)

@ -0,0 +1,5 @@
login("admin","123456")
pt=loadTable("dfs://valuedb","pt")
getTabletsMeta("/valuedb/%", `pt, true);
select top 100 * from pt

File diff suppressed because one or more lines are too long

@ -0,0 +1,439 @@
import importlib
import gzip
import pickle
from pprint import pprint
from tqdm import tqdm
from tqdm.contrib.concurrent import process_map
import numpy as np
import pandas as pd
import dolphindb as ddb
import dolphindb.settings as keys
import sqlalchemy as sa
import ProtoBuffEntitys
def make_symbol(series):
return series.astype('int32').astype('str')\
.apply(str.zfill, args=(6,))\
.apply(lambda code : \
code + '.SH' if code[:2] == '60' \
else code + '.SZ')
def make_date(series):
return pd.to_datetime(
series.astype(str), format='%Y%m%d')
def make_nparray(series):
return series.apply(lambda x : np.array(x))
def make_time(series):
s_hr = series // 10000000 * 3600000
s_min = series % 1000000 // 100000 * 60000
s_sec = series % 100000 // 1000
s_ms = series % 1000
return pd.to_timedelta(s_hr + s_min + s_sec + s_ms, unit='ms')
class DDBLoader(object):
"""
0. 从sql-server中读取calendar数据并创建成员变量df_calendardf_calendar可以保存在本地pickle作为缓存
|- `def make_calendar_df(self) -> df_calendar`
1. 创建ddb中的数据库分区性质从calendar数据中获取
|- `def create_ddb_database(self, df_calendar) -> void`
|- `def load_ddb_database(self) -> void`
2. 在ddb数据库中创建calendar表
|- `def create_ddb_calendar(self, df_calendar) -> void`
3. 创建ddb的分布式表结构
|- `create_ddb_partition_table(self, hft_type_name)`
|- `_make_table_skeleton(self, hft_type_name, capacity) -> memory_table_name`
4. 从sql server的高频数据转录到dolpindb数据库中
|- `dump_hft_to_ddb(self, type_name, stock_id, trade_date=None)`
"""
hft_type_list = ['KLine', 'Order', 'Tick', 'TickQueue', 'Transe']
protobuff_name_dict = {
name : f"{name}Message_pb2" for name in hft_type_list
}
protobuff_module_dict = {
type_name : importlib.import_module(f".{module_name}", package='ProtoBuffEntitys')
for type_name, module_name in protobuff_name_dict.items()
}
protobuff_desc_dict = {
type_name : eval(f"ProtoBuffEntitys.{module_name}.{type_name}Array.{type_name}Data.DESCRIPTOR")
for type_name, module_name in protobuff_name_dict.items()
}
mssql_name_dict = {
type_name : (
f"{type_name}" if type_name != 'TickQueue' \
else f"TickQue"
) for type_name in hft_type_list
}
# 数据库路径和数据库名可以不一致
ddb_path = "dfs://hft_ts_stock"
ddb_dbname = "db_ts_stock"
ddb_memory_table_suffix = "Memroy"
ddb_partition_table_suffix = "Partitioned"
# calendar表不需要分区因此需要创建一个新的数据库
# 该数据库可以是一个简单的csv现在还不清楚两者的差别
#ddb_calendar_path = "dfs://daily_calendar"
#ddb_calendar_dbname = "db_calendar"
ddb_calendar_table_name = "Calendar"
col_type_mapping = {
'code' : 'SYMBOL',
'm_nDate' : 'DATE',
'm_nTime' : 'TIME',
1 : 'FLOAT',
3 : 'INT',
5 : 'INT',
13 : 'INT',
}
mssql_config = {
'host' : '192.168.1.7',
'username' : 'sa',
'password' : 'passw0rd!'
}
ddb_config = {
'host' : '192.168.1.167',
'username' : 'admin',
'password' : '123456'
}
default_table_capacity = 10000
def __init__(self):
self.mssql_engine = sa.create_engine(
"mssql+pyodbc://{username}:{password}@{host}/master?driver=ODBC+Driver+18+for+SQL+Server".format(**self.mssql_config),
connect_args = {
"TrustServerCertificate": "yes"
}, echo=False
)
self.ddb_sess = ddb.session(self.ddb_config['host'], 8848)
self.ddb_sess.login(self.ddb_config['username'], self.ddb_config['password'])
def init_ddb_database(self, df_calendar):
"""
1. 创建ddb_database
2. 创建calendar表
3. 创建数据分区表
"""
# df_calendar还是由外部输入比较方便
#df_calendar = self.make_calendar_df()
self.create_ddb_database(df_calendar)
self.create_ddb_calendar(df_calendar)
for hft_type_name in self.hft_type_list:
self.create_ddb_partition_table(hft_type_name)
def init_ddb_table_data(self, df_calendar):
"""
对每个股票进行循环转录数据到分区表
"""
stock_list = df_calendar['code'].unique().astype('str')
for hft_type_name in self.hft_type_list:
print('Will work on hft type:', hft_type_name)
with tqdm(stock_list) as pbar:
for stock_id in pbar:
pbar.set_description(f"Working on stock {stock_id}")
self.dump_hft_to_ddb(hft_type_name, stock_id, pbar=pbar)
def _get_stock_date_list(self, cache=False):
"""
Deprecated: This function is replaced by `create_ddb_calendar()`.
"""
if cache:
with open('tmp.pkl', 'rb') as fin:
stock_list, date_list = pickle.load(fin)
else:
with self.mssql_engine.connect() as conn:
# 从KLine表查询主要是因为KLine表最小
stat = "select distinct S_INFO_WINDCODE, TRADE_DT from Level2BytesKline.dbo.KLine"
rs = conn.execute(stat)
stock_date_list = [(stock_name, date) for stock_name, date in rs.fetchall()]
stock_list, date_list = zip(*stock_date_list)
# cache
#with open('tmp.pkl', 'wb') as fout:
# pickle.dump((stock_list, date_list), fout)
return pd.Series(stock_list, dtype='str').unique(), \
pd.Series(date_list, dtype='datetime64[D]').unique()
def create_ddb_database(self, pd_calendar):
# 从`pd_calendar`中创建`stock_list`和`date_list`
stock_list = pd_calendar['code'].unique().astype('str')
date_list = pd_calendar['m_nDate'].unique().astype('datetime64[D]')
# 可以把所有股票高频数据放在一个数据库中不同的表
# 分区策略是跟数据库绑定的,因此需要保证同一个数据库中的表都使用同样的分区额策略
# 对于股票高频数据我们可以使用COMPO的分区策略并且两个子db的分区策略都是VALUE类型的code和m_nDate字段
if self.ddb_sess.existsDatabase(self.ddb_path):
print('Wiil drop database:', self.ddb_path)
self.ddb_sess.dropDatabase(self.ddb_path)
# 要创建一个COMPO分区的数据库需要首先创建两个简单分区的子数据库
# 这里我们使用先按日期,然后按股票分区的子数据库
# Please note that when creating a DFS database with COMPO domain,
# the parameter dbPath for each partition level must be either an empty string or unspecified.
db_date = self.ddb_sess.database('db_date', partitionType=keys.VALUE, partitions=date_list, dbPath='')
# 这里看起来直接使用dolphindb的脚本语句更方便一些
#db_stock = self.ddb_sess.database('db_stock', partitionType=keys.VALUE, partitions=stock_list, dbPath='')
self.ddb_sess.run("""
db_stock = database("", 1, symbol({partitions}))
""".format(
partitions = '`' + '`'.join(stock_list)
))
self.ddb_sess.run("""
{dbName} = database(
directory = '{dbPath}',
partitionType = COMPO,
partitionScheme = [db_date, db_stock],
engine = "TSDB")
""".format(
dbName = self.ddb_dbname,
dbPath = self.ddb_path
))
def load_ddb_database(self):
db_date = self.ddb_sess.database('db_date', dbPath='')
db_stock = self.ddb_sess.database('db_stock', dbPath='')
self.ddb_sess.run("{dbName} = database(directory='{dbPath}')".format(
dbName = self.ddb_dbname,
dbPath = self.ddb_path
))
def create_ddb_calendar(self, df_calendar):
mem_table = self.ddb_calendar_table_name + self.ddb_memory_table_suffix
per_table = self.ddb_calendar_table_name
# 1. 创建临时内存表
# calendar的行数大概是股票数量 * 交易日数量
self.ddb_sess.run("""
{table_name} = table({capacity}:0, {col_names}, [{col_types}]);
""".format(
table_name = mem_table,
capacity = 5000 * 1000,
col_names = '`code`m_nDate',
col_types = "SYMBOL, DATE"
))
print('Did create the memory table')
# 2. 向内存表中插入所有(code, date)数据
appender = ddb.tableAppender(tableName=mem_table, ddbSession=self.ddb_sess)
num = appender.append(df_calendar)
print('Did append calendar data into ddb memory table, return code', num)
# 3. 创建持久化表格之前需要先根据路径创建一个database对象
# 但研究了一下发现好像一个database里面可以同时存在分区表和非分区表
# 所以在这里暂时就不创建新的database了
# 但因为原database设置成了TSDB所以必须在createTable的时候指定sortKey
#self.ddb_sess.run("""
# {db_name} =
#""")
# 4. 直接从内存表创建一个持久化表格
if self.ddb_sess.existsTable(self.ddb_path, per_table):
self.ddb_sess.dropTable(self.ddb_path, per_table)
self.ddb_sess.run("""
tableInsert(createTable(
dbHandle={ddb_dbname},
table={mem_table},
tableName=`{per_table},
sortCOlumns=`code`m_nDate,
compressMethods={{"m_nDate":"delta"}}
), {mem_table})
""".format(
ddb_dbname = self.ddb_dbname,
mem_table = mem_table,
per_table = per_table
))
print('Did create the persistent table with the memory table')
def make_calendar_df(self):
# 从KLine表查询主要是因为KLine表最小
with self.mssql_engine.connect() as conn:
stat = "select distinct S_INFO_WINDCODE, TRADE_DT from Level2BytesKline.dbo.KLine"
rs = conn.execute(stat)
stock_date_list = [(stock_name, date) for stock_name, date in rs.fetchall()]
df_calendar = pd.DataFrame(stock_date_list, columns=['code', 'm_nDate'])
df_calendar['m_nDate'] = make_date(df_calendar['m_nDate'])
print('Did make the DataFrame for calendar')
return df_calendar
def _make_table_skeleton(self, hft_type_name, table_capacity=default_table_capacity):
def _make_tbl_config(field_list):
"""
根据ProtoBuffEntity对象的Descriptor.fields创建ddb标准的列名列表和列类型列表
"""
col_name_list, col_type_list = [], []
for desc in field_list:
col_name_list.append(desc.name)
# 如果对列明有特殊设定,目前仅包括`code`m_nDate和`m_nTime三个字段
if desc.name in self.col_type_mapping:
col_type_list.append(self.col_type_mapping[desc.name])
# 通过对ProtoBuffEntity的类型编号映射到ddb的类型编号
# 如果默认值是一个数组那么ddb类型要额外增加说明是数组
# ProtoBuffEntity的类型编号只针对基本类型数组需要通过`default_value`来判断
else:
col_type = self.col_type_mapping[desc.type]
if isinstance(desc.default_value, list):
col_type += '[]'
col_type_list.append(col_type)
return col_name_list, col_type_list
desc_obj = self.protobuff_desc_dict[hft_type_name]
col_name_list, col_type_list = _make_tbl_config(desc_obj.fields)
table_name = hft_type_name + self.ddb_memory_table_suffix
print('-' * 80)
print('Will create table structure:', table_name)
self.ddb_sess.run("""
{table_name} = table({capacity}:0, {col_names}, [{col_types}]);
""".format(
table_name = table_name,
capacity = table_capacity,
col_names = '`' + '`'.join(col_name_list),
col_types = ', '.join([f"'{type_name}'" for type_name in col_type_list])
))
res = self.ddb_sess.run(f"schema({table_name}).colDefs")
pprint(res)
print('-' * 80)
return table_name
def create_ddb_partition_table(self, hft_type_name):
memory_table_name = self._make_table_skeleton(hft_type_name, 10)
partition_table_name = hft_type_name + self.ddb_partition_table_suffix
print('-' * 80)
print('Will create partitioned table:', partition_table_name)
self.ddb_sess.run("""
db_ts_stock.createPartitionedTable(
table = {memory_table_name},
tableName = `{partition_table_name},
partitionColumns = `m_nDate`code,
sortColumns = `code`m_nDate`m_nTime,
compressMethods = {{m_nDate:"delta", m_nTime:"delta"}}
)
""".format(
memory_table_name = memory_table_name,
partition_table_name = partition_table_name
))
res = self.ddb_sess.run(f"schema(loadTable('{self.ddb_path}', '{partition_table_name}')).colDefs")
pprint(res)
print('-' * 80)
def _make_stock_daily_df(self, blob, type_name):
blob = gzip.decompress(blob)
dataArray = eval(f"ProtoBuffEntitys.{type_name}Message_pb2.{type_name}Array()")
dataArray.ParseFromString(blob)
data_dict_list = [
{field.name : val for field, val in entry.ListFields()}
for entry in dataArray.dataArray
]
array_type_list = [
field.name
for field, val in dataArray.dataArray[0].ListFields()
if isinstance(field.default_value, list)
]
#pprint(array_type_list)
df = pd.DataFrame(data_dict_list)
df['code'] = make_symbol(df['code'])
df['m_nDate'] = make_date(df['m_nDate'])
df['m_nTime'] = df['m_nDate'] + make_time(df['m_nTime'])
for field_name in array_type_list:
df[field_name] = make_nparray(df[field_name])
#print(f"Did create ddb table for dataframe of shape {df.shape}")
# self.make_table_skeleton(type_name, df.shape[0])
self.ddb_sess.upload({type_name : df})
return type_name
def dump_hft_to_ddb(self, type_name, stock_id, trade_date=None, pbar=None, num_workers=4):
def _dump_stock_daily(row):
df_table_name = self._make_stock_daily_df(row[2], type_name)
self.ddb_sess.run("tableInsert(loadTable('{dbPath}', `{partitioned_table_name}), {df_table_name})".format(
dbPath = self.ddb_path,
partitioned_table_name = type_name + self.ddb_partition_table_suffix,
df_table_name = df_table_name
))
# 经过尝试按个股来做batch查询效率还是可以接受的
# mssql中索引字段是(S_INFO_WINDCODE, TRADE_DT)
with self.mssql_engine.connect() as conn:
stat = """
select * from [Level2Bytes{mssql_type_name}].dbo.[{mssql_type_name}]
where S_INFO_WINDCODE='{stock_id}'
""".format(
mssql_type_name = self.mssql_name_dict[type_name],
stock_id = stock_id
)
rs = conn.execute(stat)
if pbar:
pbar.set_description(f"Did get the result set for stock {stock_id} from mssql")
else:
print(f"Did get the result set for stock {stock_id} from mssql")
# 每一行是当个个股某一日的所有高频交易信息
# 使用多进程来加快速度
#with tqdm(rs.fetchall(), leave=False) as pbar:
# for row in pbar:
# pbar.set_description(f"Working on {row[0]} {row[1]}")
process_map(_dump_stock_daily, rs.fetchall(), max_workers=num_workers)
def main():
loader = DDBLoader()
df_calendar = loader.make_calendar_df()
loader.init_ddb_database(df_calendar)
print('Did finish init_ddb_database')
loader.init_ddb_table_data(df_calendar)
print('Did finish init_table_data')
if __name__ == '__main__':
main()

@ -0,0 +1,829 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
" import sqlalchemy as sa\n",
" engine = sa.create_engine(\n",
" 'mssql+pyodbc://sa:passw0rd!@192.168.1.7/master?driver=ODBC+Driver+18+for+SQL+Server',\n",
" connect_args = {\n",
" \"TrustServerCertificate\": \"yes\"\n",
" }, echo=False)"
]
},
{
"cell_type": "code",
"execution_count": 39,
"metadata": {},
"outputs": [],
"source": [
"with engine.connect() as conn:\n",
" stat = \"select distinct S_INFO_WINDCODE, TRADE_DT from Level2BytesKLine.dbo.KLine\"\n",
" rs = conn.execute(stat)\n",
" stock_date_list = [(stock_name, date) for stock_name, date in rs.fetchall()]"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {},
"outputs": [],
"source": [
"stock_list, trade_list = zip(*stock_date_list)"
]
},
{
"cell_type": "code",
"execution_count": 45,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"4843"
]
},
"execution_count": 45,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(set(stock_list))"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"ename": "NameError",
"evalue": "name 'blob' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m/home/guofu/Workspaces/dolphin-dev/mssql.ipynb Cell 3\u001b[0m in \u001b[0;36m<cell line: 4>\u001b[0;34m()\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2B7b22686f73744e616d65223a2247756f66752d5043227d/home/guofu/Workspaces/dolphin-dev/mssql.ipynb#W2sdnNjb2RlLXJlbW90ZQ%3D%3D?line=0'>1</a>\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mgzip\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2B7b22686f73744e616d65223a2247756f66752d5043227d/home/guofu/Workspaces/dolphin-dev/mssql.ipynb#W2sdnNjb2RlLXJlbW90ZQ%3D%3D?line=1'>2</a>\u001b[0m \u001b[39mfrom\u001b[39;00m \u001b[39mProtoBuffEntitys\u001b[39;00m \u001b[39mimport\u001b[39;00m TranseMessage_pb2\n\u001b[0;32m----> <a href='vscode-notebook-cell://ssh-remote%2B7b22686f73744e616d65223a2247756f66752d5043227d/home/guofu/Workspaces/dolphin-dev/mssql.ipynb#W2sdnNjb2RlLXJlbW90ZQ%3D%3D?line=3'>4</a>\u001b[0m f_all \u001b[39m=\u001b[39m gzip\u001b[39m.\u001b[39mdecompress(blob)\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2B7b22686f73744e616d65223a2247756f66752d5043227d/home/guofu/Workspaces/dolphin-dev/mssql.ipynb#W2sdnNjb2RlLXJlbW90ZQ%3D%3D?line=4'>5</a>\u001b[0m dataArray \u001b[39m=\u001b[39m TranseMessage_pb2\u001b[39m.\u001b[39mTranseArray()\n\u001b[1;32m <a href='vscode-notebook-cell://ssh-remote%2B7b22686f73744e616d65223a2247756f66752d5043227d/home/guofu/Workspaces/dolphin-dev/mssql.ipynb#W2sdnNjb2RlLXJlbW90ZQ%3D%3D?line=5'>6</a>\u001b[0m dataArray\u001b[39m.\u001b[39mParseFromString(f_all)\n",
"\u001b[0;31mNameError\u001b[0m: name 'blob' is not defined"
]
}
],
"source": [
"import gzip\n",
"from ProtoBuffEntitys import TranseMessage_pb2\n",
"\n",
"f_all = gzip.decompress(blob)\n",
"dataArray = TranseMessage_pb2.TranseArray()\n",
"dataArray.ParseFromString(f_all)\n",
"\n",
"print(dataArray.dataArray)"
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"code 600843\n",
"m_nDate 20130104\n",
"m_nTime 92506510\n",
"m_nIndex 0\n",
"m_chFunctionCode 0\n",
"m_chOrderKind 0\n",
"m_chBSFlag 83\n",
"m_nTradePrice 6.8\n",
"m_nTradeVolume 200\n",
"m_nAskOrder 0\n",
"m_nBidOrder 0\n"
]
}
],
"source": [
"for item in dataArray.dataArray:\n",
" fields = item.ListFields()\n",
" for desc, val in fields:\n",
" print(desc.name, val)\n",
" break\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 62,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<class 'sqlalchemy.engine.row.LegacyRow'>\n"
]
}
],
"source": [
"with engine.connect() as conn:\n",
" stat = \"select top 1 * from Level2BytesTickQue.dbo.TickQue\"\n",
" rs = conn.execute(stat)\n",
"\n",
" for row in rs.fetchall():\n",
" print(type(row))\n",
" blob = row[2]\n"
]
},
{
"cell_type": "code",
"execution_count": 63,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1499694"
]
},
"execution_count": 63,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import gzip\n",
"from ProtoBuffEntitys import TickQueueMessage_pb2\n",
"\n",
"f_all = gzip.decompress(blob)\n",
"dataArray = TickQueueMessage_pb2.TickQueueArray()\n",
"dataArray.ParseFromString(f_all)"
]
},
{
"cell_type": "code",
"execution_count": 80,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n",
"<class 'google.protobuf.pyext._message.RepeatedScalarContainer'>\n",
"[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n",
" 0 0 0 0 0 0 0 0 0 0 0 0 0]\n"
]
}
],
"source": [
"import numpy as np\n",
"print(dataArray.dataArray[0].m_nABVolume)\n",
"print(type(dataArray.dataArray[0].m_nABVolume))\n",
"print(np.array(dataArray.dataArray[0].m_nABVolume))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import urllib\n",
"\n",
"server = 'serverName\\instanceName,port' # to specify an alternate port\n",
"database = 'mydb' \n",
"username = 'myusername' \n",
"password = 'mypassword'\n",
"\n",
"params = urllib.parse.quote_plus(\"'DRIVER={ODBC Driver 17 for SQL Server};SERVER='+server+';DATABASE='+database+';UID='+username+';PWD='+ password\")\n",
"\n",
"engine = sa.ceate_engine(\"mssql+pyodbc:///?odbc_connect=%s\" % params)"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [],
"source": [
"from ProtoBuffEntitys import KLineMessage_pb2, OrderMessage_pb2, TickMessage_pb2, TickQueueMessage_pb2, TranseMessage_pb2"
]
},
{
"cell_type": "code",
"execution_count": 33,
"metadata": {},
"outputs": [],
"source": [
"HFT_TYPE_LIST = [\n",
" 'KLine', 'Order', 'Tick', 'TickQueue', 'Transe'\n",
"]\n",
"PROTOBUFF_NAME_LIST = [f\"{name}Message_pb2\" for name in PROTOBUFF_NAME_LIST]\n",
"\n",
"import importlib\n",
"\n",
"PROTOBUFF_MODULE_LIST = [importlib.import_module(f\".{name}\", package='ProtoBuffEntitys') for name in PROTOBUFF_MODULE_LIST]"
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [
{
"ename": "SyntaxError",
"evalue": "invalid syntax (99226286.py, line 1)",
"output_type": "error",
"traceback": [
"\u001b[0;36m Input \u001b[0;32mIn [35]\u001b[0;36m\u001b[0m\n\u001b[0;31m from ProtoBuffEntitys import KLineMessage_pb2.KLineArray.KLineData.DESCRIPTOR\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
]
}
],
"source": [
"import ProtoBuffEntitys.KLineMessage_pb2.KLineArray.KLineData.DESCRIPTOR "
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"'code'\n",
"'m_nDate'\n",
"'m_nTime'\n",
"'m_nOpen'\n",
"'m_nHigh'\n",
"'m_nLow'\n",
"'m_nClose'\n",
"'m_iVolume'\n",
"'m_iTurover'\n",
"'m_nMatchItems'\n"
]
}
],
"source": [
"from pprint import pprint\n",
"\n",
"for field in KLineMessage_pb2.KLineArray.KLineData.DESCRIPTOR.fields:\n",
" pprint(field.name)\n"
]
},
{
"cell_type": "code",
"execution_count": 46,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'\\n db_ts_stock.createPartitionedTable(\\n table = t, \\n tableName = abd, \\n partitionColumns = `code`m_nDate, \\n sortColumns = `code`m_nDate`m_nTime,\\n compressMethods = {m_nDate:\"delta\", m_nTime:\"delta\"}\\n )\\n'"
]
},
"execution_count": 46,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"\"\"\n",
" db_ts_stock.createPartitionedTable(\n",
" table = t, \n",
" tableName = {hft_type_name}, \n",
" partitionColumns = `code`m_nDate, \n",
" sortColumns = `code`m_nDate`m_nTime,\n",
" compressMethods = {{m_nDate:\"delta\", m_nTime:\"delta\"}}\n",
" )\n",
"\"\"\".format(\n",
" hft_type_name = \"abd\"\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 61,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"('600519.SH', '20210531', b'\\x1f\\x8b\\x08\\x00\\x00\\x00\\x00\\x00\\x04\\x00\\x8c\\xddy\\\\^Yz\\x1f\\xf8tuUw\\xed%$\\xb4K\\x15\\x12\\x97+\\xd7V}\\x9cr\\xb7\\xa7\\xd3\\xd3^\\xd2\\xe5`\\xbb\\xd3\\xe9\\xe9\\xd8\\ ... (1454851 characters truncated) ... 17\\xb2\\x87\\xe4\\xce\\x94T\\xe8\\xe2O\\xc8\\xe6\\x10\\x1c=\\r\\xd9+\\x02\\xcc\\x9c!\\xc56\\xdb\\xef\\xb3uf\\xe5,d\\x9f\\xc8\\xd1\\xbc$\\xe7~)\\xe4\\xff\\x075\\xea@$i\\x1c\\x1a\\x00')\n"
]
}
],
"source": [
"with engine.connect() as conn:\n",
" stat = \"select * from Level2BytesOrder.dbo.[Order] where S_INFO_WINDCODE='600519.SH'\"\n",
" rs = conn.execute(stat)\n",
" for row in rs.fetchall():\n",
" print(row)\n",
" break"
]
},
{
"cell_type": "code",
"execution_count": 53,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(4708,)\n"
]
}
],
"source": [
"with engine.connect() as conn:\n",
" stat = \"select count(*) from Level2BytesKLine.dbo.KLine where TRADE_DT='20220608'\"\n",
" rs = conn.execute(stat)\n",
" for row in rs.fetchall():\n",
" print(row)"
]
},
{
"cell_type": "code",
"execution_count": 58,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"('NonClusteredIndex-Order', 'nonclustered located on PRIMARY', 'S_INFO_WINDCODE, TRADE_DT')\n"
]
}
],
"source": [
"engine = sa.create_engine(\n",
" 'mssql+pyodbc://sa:passw0rd!@192.168.1.7/Level2BytesOrder?driver=ODBC+Driver+18+for+SQL+Server',\n",
" connect_args = {\n",
" \"TrustServerCertificate\": \"yes\"\n",
" }, echo=False)\n",
" \n",
"with engine.connect() as conn:\n",
" stat = \"EXEC sp_helpindex 'Level2BytesOrder.dbo.[Order]'\"\n",
" rs = conn.execute(stat)\n",
" for row in rs.fetchall():\n",
" print(row)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"('master', 1, datetime.datetime(2003, 4, 8, 9, 13, 36, 390000))\n",
"('tempdb', 2, datetime.datetime(2022, 8, 3, 4, 47, 56, 987000))\n",
"('model', 3, datetime.datetime(2003, 4, 8, 9, 13, 36, 390000))\n",
"('msdb', 4, datetime.datetime(2022, 5, 29, 16, 33, 42, 60000))\n",
"('test', 5, datetime.datetime(2022, 8, 3, 4, 52, 46, 450000))\n",
"('Level1BytesIndexFutureKLine', 6, datetime.datetime(2022, 8, 3, 5, 2, 17, 660000))\n",
"('Level2BytesConvBondKLine', 7, datetime.datetime(2022, 8, 3, 5, 2, 30, 837000))\n",
"('Level2BytesConvBondOrder', 8, datetime.datetime(2022, 8, 3, 5, 2, 39, 987000))\n",
"('Level2BytesConvBondTick', 9, datetime.datetime(2022, 8, 3, 5, 2, 54, 587000))\n",
"('Level2BytesConvBondTickQue', 10, datetime.datetime(2022, 8, 3, 5, 3, 58, 270000))\n",
"('Level2BytesConvBondTranse', 11, datetime.datetime(2022, 8, 3, 5, 4, 14, 500000))\n",
"('Level2BytesETFKLine', 12, datetime.datetime(2022, 8, 3, 5, 4, 27, 270000))\n",
"('Level2BytesETFOrder', 13, datetime.datetime(2022, 8, 3, 5, 4, 43, 457000))\n",
"('Level2BytesTick', 14, datetime.datetime(2022, 8, 3, 8, 51, 40, 633000))\n",
"('Level2BytesTickQue', 15, datetime.datetime(2022, 8, 3, 8, 51, 58, 650000))\n",
"('Level2BytesTranse', 16, datetime.datetime(2022, 8, 3, 8, 52, 14, 103000))\n",
"('Level2BytesOrder', 17, datetime.datetime(2022, 8, 3, 8, 52, 27, 740000))\n",
"('Level2BytesKLine', 18, datetime.datetime(2022, 8, 3, 8, 52, 44, 610000))\n",
"('Level2BytesIndexTick', 19, datetime.datetime(2022, 8, 3, 9, 22, 36, 850000))\n",
"('Level2BytesIndexKLine', 20, datetime.datetime(2022, 8, 3, 9, 22, 57, 527000))\n",
"('Level2BytesETFTranse', 21, datetime.datetime(2022, 8, 3, 9, 23, 53, 713000))\n",
"('Level2BytesETFTickQue', 22, datetime.datetime(2022, 8, 3, 9, 24, 9, 87000))\n",
"('Level2BytesETFTick', 23, datetime.datetime(2022, 8, 3, 9, 24, 26, 267000))\n"
]
}
],
"source": [
"stat = \"\"\"SELECT name, database_id, create_date FROM sys.databases; \n",
"\"\"\"\n",
"\n",
"with engine.connect() as conn:\n",
" rs = conn.execute(stat)\n",
" for row in rs.fetchall():\n",
" print(row)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"a = [1,2,3]\n",
"b = {x : (x + 1 if x != 3 else x + 2) for x in a}"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"\n",
"with engine.connect() as conn:\n",
" stat = \"select distinct S_INFO_WINDCODE, TRADE_DT from Level2BytesKline.dbo.KLine\"\n",
" rs = conn.execute(stat)\n",
" stock_date_list = [(stock_name, date) for stock_name, date in rs.fetchall()]\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>code</th>\n",
" <th>m_nDate</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>000001.SZ</td>\n",
" <td>20130104</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>000001.SZ</td>\n",
" <td>20130107</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>000001.SZ</td>\n",
" <td>20130108</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>000001.SZ</td>\n",
" <td>20130109</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>000001.SZ</td>\n",
" <td>20130110</td>\n",
" </tr>\n",
" <tr>\n",
" <th>...</th>\n",
" <td>...</td>\n",
" <td>...</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7379201</th>\n",
" <td>689009.SH</td>\n",
" <td>20220704</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7379202</th>\n",
" <td>689009.SH</td>\n",
" <td>20220705</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7379203</th>\n",
" <td>689009.SH</td>\n",
" <td>20220706</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7379204</th>\n",
" <td>689009.SH</td>\n",
" <td>20220707</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7379205</th>\n",
" <td>689009.SH</td>\n",
" <td>20220708</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>7379206 rows × 2 columns</p>\n",
"</div>"
],
"text/plain": [
" code m_nDate\n",
"0 000001.SZ 20130104\n",
"1 000001.SZ 20130107\n",
"2 000001.SZ 20130108\n",
"3 000001.SZ 20130109\n",
"4 000001.SZ 20130110\n",
"... ... ...\n",
"7379201 689009.SH 20220704\n",
"7379202 689009.SH 20220705\n",
"7379203 689009.SH 20220706\n",
"7379204 689009.SH 20220707\n",
"7379205 689009.SH 20220708\n",
"\n",
"[7379206 rows x 2 columns]"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"pd.DataFrame(stock_date_list, columns=['code', 'm_nDate'])"
]
},
{
"cell_type": "code",
"execution_count": 51,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"\n",
"df = pd.read_csv('ddb_dump_journal.csv')"
]
},
{
"cell_type": "code",
"execution_count": 52,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>type_name</th>\n",
" <th>stock_id</th>\n",
" <th>status</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
"Empty DataFrame\n",
"Columns: [type_name, stock_id, status]\n",
"Index: []"
]
},
"execution_count": 52,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df"
]
},
{
"cell_type": "code",
"execution_count": 53,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/tmp/ipykernel_259767/4140820348.py:1: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\n",
" df = df.append({'type_name':'KLine', 'stock_id':'000001.SZ', 'status':'OK'},ignore_index=True)\n"
]
}
],
"source": [
"df = df.append({'type_name':'KLine', 'stock_id':'000001.SZ', 'status':'OK'},ignore_index=True)"
]
},
{
"cell_type": "code",
"execution_count": 54,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th></th>\n",
" <th></th>\n",
" </tr>\n",
" <tr>\n",
" <th>type_name</th>\n",
" <th>stock_id</th>\n",
" <th>status</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>KLine</th>\n",
" <th>000001.SZ</th>\n",
" <th>OK</th>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
"Empty DataFrame\n",
"Columns: []\n",
"Index: [(KLine, 000001.SZ, OK)]"
]
},
"execution_count": 54,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df.set_index(['type_name', 'stock_id', 'status'], inplace=True)\n",
"df"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"Series([], Name: (KLine, 000001.SZ, OK), dtype: float64)"
]
},
"execution_count": 56,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df.loc[('KLine', '000001.SZ', 'OK')]"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 57,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"('KLine', '000001.SZ', 'OK') in df.index"
]
},
{
"cell_type": "code",
"execution_count": 60,
"metadata": {},
"outputs": [],
"source": [
"import pickle as pkl\n",
"with open('tmp.pkl', 'rb') as fin:\n",
" stock_list, date_list = pkl.load(fin)"
]
},
{
"cell_type": "code",
"execution_count": 61,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"('20130104',\n",
" '20130107',\n",
" '20130108',\n",
" '20130109',\n",
" '20130110',\n",
" '20130111',\n",
" '20130114',\n",
" '20130115',\n",
" '20130116',\n",
" '20130117')"
]
},
"execution_count": 61,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"date_list[:10]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.13"
},
"vscode": {
"interpreter": {
"hash": "5a0c795ff324b912f12ad95b94c9d776ccc7a75bdf6a126a4f44b3067472979e"
}
}
},
"nbformat": 4,
"nbformat_minor": 4
}

@ -0,0 +1,94 @@
import importlib
import gzip
import pickle
from pprint import pprint
from tqdm import tqdm
import numpy as np
import pandas as pd
import dolphindb as ddb
import dolphindb.settings as keys
import sqlalchemy as sa
import ProtoBuffEntitys
def main():
from ProtoBuffEntitys import TickQueueMessage_pb2
engine = sa.create_engine(
"mssql+pyodbc://{username}:{password}@{host}/master?driver=ODBC+Driver+18+for+SQL+Server".format(**DDBLoader.mssql_config),
connect_args = {
"TrustServerCertificate": "yes"
}, echo=False
)
s = ddb.session(DDBLoader.ddb_config['host'], 8848)
s.login(DDBLoader.ddb_config['username'], DDBLoader.ddb_config['password'])
with engine.connect() as conn:
stat = "select top 1 * from Level2BytesTickQue.dbo.TickQue"
rs = conn.execute(stat)
for row in rs.fetchall():
print(type(row))
blob = gzip.decompress(row[2])
dataArray = TickQueueMessage_pb2.TickQueueArray()
dataArray.ParseFromString(blob)
data_dict_list = [
{field.name : val for field, val in entry.ListFields()}
for entry in dataArray.dataArray
]
df = pd.DataFrame(data_dict_list)
df['code'] = make_symbol(df['code'])
df['m_nDate'] = make_date(df['m_nDate'])
df['m_nTime'] = df['m_nDate'] + make_time(df['m_nTime'])
df['m_nABVolume'] = make_nparray(df['m_nABVolume'])
pprint(df[['code', 'm_nDate', 'm_nTime']].head())
s.upload({'tb' : df})
res = s.run("typestr(tb)")
pprint(res)
stat = """
syms = symbol(exec code from tb)
replaceColumn!(tb, `code, syms)
"""
res = s.run(stat)
pprint(res)
pprint(s.run("schema(tb).colDefs"))
res = s.run("append!(gtt, tb)")
s.undef("tb", "VAR")
print(res)
break
def main2():
loader = DDBLoader()
#pprint(loader.mssql_name_dict)
stock_list, date_list = loader.get_stock_date_list(True)
print('Did get stock list and date list from mssql')
#pprint(date_list[:5])
#pprint(stock_list[:5])
loader.create_ddb_database(stock_list, date_list)
print('Did create ddb database')
for type_name in loader.hft_type_list:
loader.create_ddb_partition_table(type_name)
print(f"Did create ddb table for {type_name}")
loader.do_load_from_mssql(type_name, stock_list[0], date_list[0])
if __name__ == '__main__':
main3()

@ -0,0 +1,47 @@
/**
stcokMarketReplay.txt
Script to replay stock market data
DolphinDB Inc.
DolphinDB server version: 2.00.6 2022.05.31
Storage engine: TSDB
Last modification time: 2022.07.07
*/
//login account
login("admin", "123456")
//create stream table: messageStream
def createStreamTableFunc(){
colName = `msgTime`msgType`msgBody
colType = [TIMESTAMP,SYMBOL, BLOB]
messageTemp = streamTable(1000000:0, colName, colType)
enableTableShareAndPersistence(table=messageTemp, tableName="messageStream", asynWrite=true, compress=true, cacheSize=1000000, retentionMinutes=1440, flushMode=0, preCache=10000)
messageTemp = NULL
}
createStreamTableFunc()
go
//replay history data
def replayStockMarketData(){
timeRS = cutPoints(09:15:00.000..15:00:00.000, 100)
orderDS = replayDS(sqlObj=<select * from loadTable("dfs://order", "order") where Date = 2020.12.31>, dateColumn=`Date, timeColumn=`Time, timeRepartitionSchema=timeRS)
tradeDS = replayDS(sqlObj=<select * from loadTable("dfs://trade", "trade") where Date = 2020.12.31>, dateColumn=`Date, timeColumn=`Time, timeRepartitionSchema=timeRS)
snapshotDS = replayDS(sqlObj=<select * from loadTable("dfs://snapshot", "snapshot") where Date =2020.12.31>, dateColumn=`Date, timeColumn=`Time, timeRepartitionSchema=timeRS)
inputDict = dict(["order", "trade", "snapshot"], [orderDS, tradeDS, snapshotDS])
submitJob("replay", "replay stock market", replay, inputDict, messageStream, `Date, `Time, , , 3)
}
replayStockMarketData()
getRecentJobs()
////load text and replay memory table
//def loadTextAndReplay(){
// orderTable= select * from loadText("/yourDataPath/replayData/order.csv") order by Time
// tradeTable = select * from loadText("/yourDataPath/replayData/trade.csv") order by Time
// snapshotTable = select * from loadText("/yourDataPath/replayData/snapshot.csv") order by Time
// inputDict = dict(["order", "trade", "snapshot"], [orderTable, tradeTable, snapshotTable])
//
// submitJob("replay", "replay memory table", replay, inputDict, messageStream, `Date, `Time, , , 1)
//}
//loadCSVAndReplay()
Loading…
Cancel
Save