Skip to content

Commit

Permalink
fix typo, staticmethod etc. (microsoft#1402)
Browse files Browse the repository at this point in the history
* config.py: fix typo; static method

* fix typo in qlib/utils/paral

* 1) limit numpy version as numba support for 1.24+ has not been released; 2) no need to use custom numba version for pytest.

* remove useless argument

Co-authored-by: you-n-g <you-n-g@users.noreply.github.com>
  • Loading branch information
qianyun210603 and you-n-g committed Mar 23, 2023
1 parent ba5d6e0 commit 962eb97
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 8 deletions.
5 changes: 3 additions & 2 deletions qlib/config.py
Expand Up @@ -75,7 +75,8 @@ def update(self, *args, **kwargs):
def set_conf_from_C(self, config_c):
self.update(**config_c.__dict__["_config"])

def register_from_C(self, config, skip_register=True):
@staticmethod
def register_from_C(config, skip_register=True):
from .utils import set_log_with_config # pylint: disable=C0415

if C.registered and skip_register:
Expand Down Expand Up @@ -202,7 +203,7 @@ def register_from_C(self, config, skip_register=True):
"task_url": "mongodb://localhost:27017/",
"task_db_name": "default_task_db",
},
# Shift minute for highfreq minite data, used in backtest
# Shift minute for highfreq minute data, used in backtest
# if min_data_shift == 0, use default market time [9:30, 11:29, 1:00, 2:59]
# if min_data_shift != 0, use shifted market time [9:30, 11:29, 1:00, 2:59] - shift*minute
"min_data_shift": 0,
Expand Down
4 changes: 2 additions & 2 deletions qlib/contrib/report/data/ana.py
Expand Up @@ -139,8 +139,8 @@ def plot_single(self, col, ax):

class FeaSkewTurt(NumFeaAnalyser):
def calc_stat_values(self):
self._skew = datetime_groupby_apply(self._dataset, "skew", skip_group=True)
self._kurt = datetime_groupby_apply(self._dataset, pd.DataFrame.kurt, skip_group=True)
self._skew = datetime_groupby_apply(self._dataset, "skew")
self._kurt = datetime_groupby_apply(self._dataset, pd.DataFrame.kurt)

def plot_single(self, col, ax):
self._skew[col].plot(ax=ax, label="skew")
Expand Down
8 changes: 4 additions & 4 deletions qlib/utils/paral.py
Expand Up @@ -24,7 +24,7 @@ def __init__(self, *args, **kwargs):


def datetime_groupby_apply(
df, apply_func: Union[Callable, Text], axis=0, level="datetime", resample_rule="M", n_jobs=-1, skip_group=False
df, apply_func: Union[Callable, Text], axis=0, level="datetime", resample_rule="M", n_jobs=-1
):
"""datetime_groupby_apply
This function will apply the `apply_func` on the datetime level index.
Expand Down Expand Up @@ -116,7 +116,7 @@ def wrapper(self, *args, **kwargs):
# The code are for implementing following workflow
# - Construct complex data structure nested with delayed joblib tasks
# - For example, {"job": [<delayed_joblib_task>, {"1": <delayed_joblib_task>}]}
# - executing all the tasks and replace all the <deplayed_joblib_task> with its return value
# - executing all the tasks and replace all the <delayed_joblib_task> with its return value

# This will make it easier to convert some existing code to a parallel one

Expand Down Expand Up @@ -160,7 +160,7 @@ class DelayedDict(DelayedTask):
It is designed for following feature:
Converting following existing code to parallel
- constructing a dict
- key can be get instantly
- key can be gotten instantly
- computation of values tasks a lot of time.
- AND ALL the values are calculated in a SINGLE function
"""
Expand Down Expand Up @@ -280,7 +280,7 @@ def complex_parallel(paral: Parallel, complex_iter):

class call_in_subproc:
"""
When we repeating run functions, it is hard to avoid memory leakage.
When we repeatedly run functions, it is hard to avoid memory leakage.
So we run it in the subprocess to ensure it is OK.
NOTE: Because local object can't be pickled. So we can't implement it via closure.
Expand Down

0 comments on commit 962eb97

Please sign in to comment.