Skip to content

Commit

Permalink
v3.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
chen-001 committed Sep 26, 2022
1 parent 44ed5af commit 70cc8c8
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 5 deletions.
4 changes: 2 additions & 2 deletions pure_ocean_breeze/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
一个量化多因子研究的框架,包含数据、回测、因子加工等方面的功能
"""

__updated__ = "2022-09-26 14:23:15"
__version__ = "3.2.9"
__updated__ = "2022-09-26 17:35:44"
__version__ = "3.3.0"
__author__ = "chenzongwei"
__author_email__ = "[email protected]"
__url__ = "https://github.com/chen-001/pure_ocean_breeze"
Expand Down
17 changes: 15 additions & 2 deletions pure_ocean_breeze/data/write_data.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__updated__ = "2022-09-18 01:53:55"
__updated__ = "2022-09-26 17:36:12"

try:
import rqdatac
Expand Down Expand Up @@ -284,7 +284,7 @@ def database_update_minute_data_to_questdb(kind: str) -> None:
ts.num = ts.num.astype(int).astype(str)
# 数据写入数据库
qdb = Questdb()
qdb.write_via_csv(df, f"minute_data_{kind}")
qdb.write_via_csv(ts, f"minute_data_{kind}")
# 获取剩余使用额
user2 = round(rqdatac.user.get_quota()["bytes_used"] / 1024 / 1024, 2)
user12 = round(user2 - user1, 2)
Expand Down Expand Up @@ -533,6 +533,14 @@ def download_calendar(startdate, enddate):
return df0


def drop_duplicates_index(new):
new = new.reset_index()
new = new.rename(columns={list(new.columns)[0]: "date"})
new = new.drop_duplicates(subset=["date"], keep="first")
new = new.set_index("date")
return new


def database_update_daily_files() -> None:
"""更新数据库中的日频数据
Expand Down Expand Up @@ -611,6 +619,7 @@ def to_mat(df, row, name, ind="date", col="code"):
"date"
)
new = pd.concat([old, df]).drop_duplicates()
new = drop_duplicates_index(new)
new = new[sorted(list(new.columns))]
new.reset_index().to_feather(homeplace.daily_data_file + name + ".feather")
logger.success(name + "已更新")
Expand Down Expand Up @@ -651,6 +660,7 @@ def to_mat(df, row, name, ind="date", col="code"):
part2_new = part2_new.drop_duplicates()
part2_new = part2_new[closes.columns]
part2_new = part2_new[sorted(list(part2_new.columns))]
part2_new = drop_duplicates_index(part2_new)
part2_new.reset_index().to_feather(homeplace.daily_data_file + "trs.feather")
logger.success("换手率更新完成")

Expand All @@ -665,6 +675,7 @@ def to_mat(df, row, name, ind="date", col="code"):
).set_index("date")
part3_new = pd.concat([part3_old, part3]).drop_duplicates()
part3_new = part3_new[closes.columns]
part3_new = drop_duplicates_index(part3_new)
part3_new = part3_new[sorted(list(part3_new.columns))]
part3_new.reset_index().to_feather(homeplace.daily_data_file + "sharenums.feather")
logger.success("流通股数更新完成")
Expand Down Expand Up @@ -706,6 +717,7 @@ def single(df):
part4_0 = part4_0.T
part4_0 = part4_0[closes.columns]
part4_0 = part4_0.drop_duplicates()
part4_0 = drop_duplicates_index(part4_0)
part4_0 = part4_0[sorted(list(part4_0.columns))]
part4_0.reset_index().to_feather(homeplace.daily_data_file + "sts.feather")
logger.success("st更新完了")
Expand All @@ -723,6 +735,7 @@ def single(df):
part5 = part5[part5.index.isin(list(part2_new.columns))]
part5 = part5.T
part5 = part5[closes.columns]
part5 = drop_duplicates_index(part5)
part5 = part5[sorted(list(part5.columns))]
part5.reset_index().to_feather(homeplace.daily_data_file + "ages.feather")
logger.success("上市天数更新完了")
Expand Down
2 changes: 1 addition & 1 deletion pure_ocean_breeze/labor/process.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__updated__ = "2022-09-26 14:18:10"
__updated__ = "2022-09-26 17:21:48"

import warnings

Expand Down
4 changes: 4 additions & 0 deletions 更新日志/version3.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
## 更新日志🗓 — v3

* v3.3.0 — 2022.09.26
> 1. 修复了单独更新questdb内分钟数据的函数database_update_minute_data_to_questdb中的bug
> 1. 新增了依据index去重的函数drop_duplicates_index
> 1. 修复了更新日频数据可能重复的潜在bug
* v3.2.9 — 2022.09.26
> 1. 给pure_helper增加说明
> 1. 修复了以mysql分钟数据更新因子值的类pure_fall的出现重复数据的潜在bug
Expand Down

0 comments on commit 70cc8c8

Please sign in to comment.