@@ -147,7 +147,7 @@ def _build_model(self, data):
147
147
mu = mu_backtest ,
148
148
sd = sigma_backtest ,
149
149
shape = self .num_backtests ,
150
- observed = data .perf_sharpe_ratio_is )
150
+ observed = data .sharpe_ratio )
151
151
152
152
return model
153
153
@@ -181,7 +181,7 @@ def fit_authors(data,
181
181
backtests), indexed by user, algorithm and code ID.
182
182
Note that currently, backtests are deduplicated based on code id.
183
183
::
184
- meta_user_id meta_algorithm_id meta_code_id perf_sharpe_ratio_is
184
+ meta_user_id meta_algorithm_id meta_code_id sharpe_ratio
185
185
0 abcdef123456 ghijkl789123 abcdef000000 0.919407
186
186
1 abcdef123456 ghijkl789123 abcdef000001 1.129353
187
187
2 abcdef123456 ghijkl789123 abcdef000002 -0.005934
@@ -267,16 +267,16 @@ def _check_data(data):
267
267
if data .meta_code_id .nunique () != data .shape [0 ]:
268
268
warnings .warn ('Data set contains duplicate backtests.' )
269
269
270
- if (data .groupby ('meta_algorithm_id' )['perf_sharpe_ratio_is ' ]
270
+ if (data .groupby ('meta_algorithm_id' )['sharpe_ratio ' ]
271
271
.count () < 5 ).any ():
272
272
warnings .warn ('Data set contains algorithms with fewer than 5 '
273
273
'backtests.' )
274
274
275
275
if (data .groupby ('meta_user_id' )['meta_algorithm_id' ].nunique () < 5 ).any ():
276
276
warnings .warn ('Data set contains users with fewer than 5 algorithms.' )
277
277
278
- if ((data .perf_sharpe_ratio_is > 20 )
279
- | (data .perf_sharpe_ratio_is < - 20 )).any ():
278
+ if ((data .sharpe_ratio > 20 )
279
+ | (data .sharpe_ratio < - 20 )).any ():
280
280
raise ValueError ('Data set contains unrealistic Sharpes: greater than '
281
281
'20 in magnitude.' )
282
282
0 commit comments