zipline/tests/test_security_list.py

283 lines
9.5 KiB
Python
Raw Normal View History

from datetime import timedelta
MAINT: make the data loading apis more consistent. Changes BcolzDailyBarWriter to not be an abc, data is passed as an iterator of (sid, dataframe) pairs to the write method. Changes the AssetsDBWriter to be a single class which accepts an engine at construction time and has a `write` method for writing dataframes for the various tables. We no longer support writing the various other data types, callers should coerce their data into a dataframe themselves. See zipline.assets.synthetic for some helpers to do this. Adds many new fixtures and updates some existing fixtures to use the new ones: WithDefaultDateBounds A fixture that provides the suite a START_DATE and END_DATE. This is meant to make it easy for other fixtures to synchronize their date ranges without depending on eachother in strange ways. For example, WithBcolzMinuteBarReader and WithBcolzDailyBarReader by default should both have data for the same dates, so they may use depend on WithDefaultDates without forcing a dependency between them. WithTmpDir, WithInstanceTmpDir Provides the suite or individual test case a temporary directory. WithBcolzDailyBarReader Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzDailyBarWriter.write WithBcolzDailyBarReaderFromCSVs Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from a collection of CSV files and then converted into the bcolz data through BcolzDailyBarWriter.write_csvs WithBcolzMinuteBarReader Provides the suite a BcolzMinuteBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzMinuteBarWriter.write WithAdjustmentReader Provides the suite a SQLiteAdjustmentReader which reads from an in memory sqlite database. The data will be read from dataframes and then converted into sqlite with SQLiteAdjustmentWriter.write WithDataPortal Provides each test case a DataPortal object with data from temporary resources.
2016-03-31 19:26:57 +00:00
import pandas as pd
from nose_parameterized import parameterized
2015-02-03 23:41:21 +00:00
from zipline.algorithm import TradingAlgorithm
from zipline.errors import TradingControlViolation
from zipline.testing import (
add_security_data,
security_list_copy,
)
from zipline.testing.fixtures import (
2018-05-20 16:00:12 +00:00
WithMakeAlgo,
ZiplineTestCase,
)
2015-02-03 23:41:21 +00:00
from zipline.utils import factory
from zipline.utils.security_list import (
SecurityListSet,
load_from_directory,
)
2015-02-03 23:41:21 +00:00
LEVERAGED_ETFS = load_from_directory('leveraged_etf_list')
class RestrictedAlgoWithCheck(TradingAlgorithm):
def initialize(self, symbol):
self.rl = SecurityListSet(self.get_datetime, self.asset_finder)
self.set_asset_restrictions(self.rl.restrict_leveraged_etfs)
self.order_count = 0
self.sid = self.symbol(symbol)
2015-02-03 23:41:21 +00:00
def handle_data(self, data):
if not self.order_count:
if self.sid not in \
self.rl.leveraged_etf_list.\
current_securities(self.get_datetime()):
2015-02-03 23:41:21 +00:00
self.order(self.sid, 100)
self.order_count += 1
class RestrictedAlgoWithoutCheck(TradingAlgorithm):
def initialize(self, symbol):
self.rl = SecurityListSet(self.get_datetime, self.asset_finder)
self.set_asset_restrictions(self.rl.restrict_leveraged_etfs)
self.order_count = 0
self.sid = self.symbol(symbol)
def handle_data(self, data):
self.order(self.sid, 100)
self.order_count += 1
class RestrictedAlgoWithoutCheckSetDoNotOrderList(TradingAlgorithm):
def initialize(self, symbol):
self.rl = SecurityListSet(self.get_datetime, self.asset_finder)
2015-02-03 23:41:41 +00:00
self.set_do_not_order_list(self.rl.leveraged_etf_list)
2015-02-03 23:41:21 +00:00
self.order_count = 0
self.sid = self.symbol(symbol)
2015-02-03 23:41:21 +00:00
def handle_data(self, data):
self.order(self.sid, 100)
self.order_count += 1
class IterateRLAlgo(TradingAlgorithm):
def initialize(self, symbol):
self.rl = SecurityListSet(self.get_datetime, self.asset_finder)
self.set_asset_restrictions(self.rl.restrict_leveraged_etfs)
self.order_count = 0
self.sid = self.symbol(symbol)
self.found = False
2015-02-03 23:41:21 +00:00
def handle_data(self, data):
for stock in self.rl.leveraged_etf_list.\
current_securities(self.get_datetime()):
2015-02-03 23:41:21 +00:00
if stock == self.sid:
self.found = True
2018-05-20 16:00:12 +00:00
class SecurityListTestCase(WithMakeAlgo, ZiplineTestCase):
# XXX: This suite uses way more than it probably needs.
START_DATE = pd.Timestamp('2002-01-03', tz='UTC')
assert START_DATE == sorted(list(LEVERAGED_ETFS.keys()))[0], \
"START_DATE should match start of LEVERAGED_ETF data."
END_DATE = pd.Timestamp('2015-02-17', tz='utc')
2015-02-03 23:41:21 +00:00
2018-05-20 16:00:12 +00:00
extra_knowledge_date = pd.Timestamp('2015-01-27', tz='utc')
trading_day_before_first_kd = pd.Timestamp('2015-01-23', tz='utc')
2018-05-20 16:00:12 +00:00
SIM_PARAMS_END = pd.Timestamp('2002-01-08', tz='UTC')
2018-05-20 16:00:12 +00:00
SIM_PARAMS_DATA_FREQUENCY = 'daily'
DATA_PORTAL_USE_MINUTE_DATA = False
2018-05-20 16:00:12 +00:00
ASSET_FINDER_EQUITY_SIDS = (1, 2, 3, 4, 5)
ASSET_FINDER_EQUITY_SYMBOLS = ('AAPL', 'GOOG', 'BZQ', 'URTY', 'JFT')
2018-05-20 16:00:12 +00:00
def test_iterate_over_restricted_list(self):
algo = self.make_algo(
algo_class=IterateRLAlgo,
symbol='BZQ',
)
algo.run()
2015-02-03 23:41:21 +00:00
self.assertTrue(algo.found)
def test_security_list(self):
2015-02-03 23:41:21 +00:00
# set the knowledge date to the first day of the
# leveraged etf knowledge date.
def get_datetime():
2018-05-20 16:00:12 +00:00
return self.START_DATE
2015-02-03 23:41:21 +00:00
2018-05-20 16:00:12 +00:00
rl = SecurityListSet(get_datetime, self.asset_finder)
2015-02-03 23:41:21 +00:00
# assert that a sample from the leveraged list are in restricted
should_exist = [
asset.sid for asset in
2018-05-20 16:00:12 +00:00
[self.asset_finder.lookup_symbol(
symbol,
as_of_date=self.extra_knowledge_date)
for symbol in ["BZQ", "URTY", "JFT"]]
]
for sid in should_exist:
self.assertIn(
sid, rl.leveraged_etf_list.current_securities(get_datetime()))
2015-02-03 23:41:21 +00:00
# assert that a sample of allowed stocks are not in restricted
shouldnt_exist = [
asset.sid for asset in
2018-05-20 16:00:12 +00:00
[self.asset_finder.lookup_symbol(
symbol,
as_of_date=self.extra_knowledge_date)
for symbol in ["AAPL", "GOOG"]]
]
for sid in shouldnt_exist:
self.assertNotIn(
sid, rl.leveraged_etf_list.current_securities(get_datetime()))
def test_security_add(self):
2015-02-03 23:41:21 +00:00
def get_datetime():
return pd.Timestamp("2015-01-27", tz='UTC')
2018-05-20 16:00:12 +00:00
with security_list_copy():
2015-02-03 23:41:41 +00:00
add_security_data(['AAPL', 'GOOG'], [])
2018-05-20 16:00:12 +00:00
rl = SecurityListSet(get_datetime, self.asset_finder)
should_exist = [
asset.sid for asset in
2018-05-20 16:00:12 +00:00
[self.asset_finder.lookup_symbol(
symbol,
as_of_date=self.extra_knowledge_date
) for symbol in ["AAPL", "GOOG", "BZQ", "URTY"]]
]
for sid in should_exist:
self.assertIn(
sid,
rl.leveraged_etf_list.current_securities(get_datetime())
)
2015-02-03 23:41:21 +00:00
def test_security_add_delete(self):
with security_list_copy():
2015-02-03 23:41:21 +00:00
def get_datetime():
return pd.Timestamp("2015-01-27", tz='UTC')
2018-05-20 16:00:12 +00:00
rl = SecurityListSet(get_datetime, self.asset_finder)
self.assertNotIn(
"BZQ",
rl.leveraged_etf_list.current_securities(get_datetime())
)
self.assertNotIn(
"URTY",
rl.leveraged_etf_list.current_securities(get_datetime())
)
2015-02-03 23:41:21 +00:00
def test_algo_without_rl_violation_via_check(self):
2018-05-20 16:00:12 +00:00
self.run_algorithm(algo_class=RestrictedAlgoWithCheck, symbol="BZQ")
2015-02-03 23:41:21 +00:00
def test_algo_without_rl_violation(self):
2018-05-20 16:00:12 +00:00
self.run_algorithm(
algo_class=RestrictedAlgoWithoutCheck, symbol="AAPL",
)
2015-02-03 23:41:21 +00:00
@parameterized.expand([
('using_set_do_not_order_list',
RestrictedAlgoWithoutCheckSetDoNotOrderList),
('using_set_restrictions', RestrictedAlgoWithoutCheck),
])
def test_algo_with_rl_violation(self, name, algo_class):
2018-05-20 16:00:12 +00:00
algo = self.make_algo(algo_class=algo_class, symbol='BZQ')
2015-02-03 23:41:21 +00:00
with self.assertRaises(TradingControlViolation) as ctx:
2018-05-20 16:00:12 +00:00
algo.run()
2015-02-03 23:41:21 +00:00
self.check_algo_exception(algo, ctx, 0)
# repeat with a symbol from a different lookup date
2018-05-20 16:00:12 +00:00
algo = self.make_algo(
algo_class=RestrictedAlgoWithoutCheck, symbol='JFT',
)
with self.assertRaises(TradingControlViolation) as ctx:
2018-05-20 16:00:12 +00:00
algo.run()
self.check_algo_exception(algo, ctx, 0)
2015-02-03 23:41:21 +00:00
def test_algo_with_rl_violation_after_knowledge_date(self):
2018-05-20 16:00:12 +00:00
start = self.START_DATE + timedelta(days=7)
end = start + self.trading_calendar.day * 4
algo = self.make_algo(
algo_class=RestrictedAlgoWithoutCheck,
symbol='BZQ',
sim_params=self.make_simparams(
start_session=start,
end_session=end,
)
2015-02-03 23:41:21 +00:00
)
2015-02-03 23:41:21 +00:00
with self.assertRaises(TradingControlViolation) as ctx:
2018-05-20 16:00:12 +00:00
algo.run()
2015-02-03 23:41:21 +00:00
self.check_algo_exception(algo, ctx, 0)
def test_algo_with_rl_violation_cumulative(self):
"""
Add a new restriction, run a test long after both
knowledge dates, make sure stock from original restriction
set is still disallowed.
"""
sim_params = factory.create_simulation_parameters(
2018-05-20 16:00:12 +00:00
start=self.START_DATE + timedelta(days=7),
num_days=4
)
2015-02-03 23:41:21 +00:00
with security_list_copy():
2015-02-03 23:41:41 +00:00
add_security_data(['AAPL'], [])
2018-05-20 16:00:12 +00:00
algo = self.make_algo(
algo_class=RestrictedAlgoWithoutCheck,
symbol='BZQ',
sim_params=sim_params,
)
2015-02-03 23:41:21 +00:00
with self.assertRaises(TradingControlViolation) as ctx:
2018-05-20 16:00:12 +00:00
algo.run()
2015-02-03 23:41:21 +00:00
self.check_algo_exception(algo, ctx, 0)
def test_algo_without_rl_violation_after_delete(self):
MAINT: make the data loading apis more consistent. Changes BcolzDailyBarWriter to not be an abc, data is passed as an iterator of (sid, dataframe) pairs to the write method. Changes the AssetsDBWriter to be a single class which accepts an engine at construction time and has a `write` method for writing dataframes for the various tables. We no longer support writing the various other data types, callers should coerce their data into a dataframe themselves. See zipline.assets.synthetic for some helpers to do this. Adds many new fixtures and updates some existing fixtures to use the new ones: WithDefaultDateBounds A fixture that provides the suite a START_DATE and END_DATE. This is meant to make it easy for other fixtures to synchronize their date ranges without depending on eachother in strange ways. For example, WithBcolzMinuteBarReader and WithBcolzDailyBarReader by default should both have data for the same dates, so they may use depend on WithDefaultDates without forcing a dependency between them. WithTmpDir, WithInstanceTmpDir Provides the suite or individual test case a temporary directory. WithBcolzDailyBarReader Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzDailyBarWriter.write WithBcolzDailyBarReaderFromCSVs Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from a collection of CSV files and then converted into the bcolz data through BcolzDailyBarWriter.write_csvs WithBcolzMinuteBarReader Provides the suite a BcolzMinuteBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzMinuteBarWriter.write WithAdjustmentReader Provides the suite a SQLiteAdjustmentReader which reads from an in memory sqlite database. The data will be read from dataframes and then converted into sqlite with SQLiteAdjustmentWriter.write WithDataPortal Provides each test case a DataPortal object with data from temporary resources.
2016-03-31 19:26:57 +00:00
sim_params = factory.create_simulation_parameters(
start=self.extra_knowledge_date,
num_days=4,
)
2018-05-20 16:00:12 +00:00
with security_list_copy():
MAINT: make the data loading apis more consistent. Changes BcolzDailyBarWriter to not be an abc, data is passed as an iterator of (sid, dataframe) pairs to the write method. Changes the AssetsDBWriter to be a single class which accepts an engine at construction time and has a `write` method for writing dataframes for the various tables. We no longer support writing the various other data types, callers should coerce their data into a dataframe themselves. See zipline.assets.synthetic for some helpers to do this. Adds many new fixtures and updates some existing fixtures to use the new ones: WithDefaultDateBounds A fixture that provides the suite a START_DATE and END_DATE. This is meant to make it easy for other fixtures to synchronize their date ranges without depending on eachother in strange ways. For example, WithBcolzMinuteBarReader and WithBcolzDailyBarReader by default should both have data for the same dates, so they may use depend on WithDefaultDates without forcing a dependency between them. WithTmpDir, WithInstanceTmpDir Provides the suite or individual test case a temporary directory. WithBcolzDailyBarReader Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzDailyBarWriter.write WithBcolzDailyBarReaderFromCSVs Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from a collection of CSV files and then converted into the bcolz data through BcolzDailyBarWriter.write_csvs WithBcolzMinuteBarReader Provides the suite a BcolzMinuteBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzMinuteBarWriter.write WithAdjustmentReader Provides the suite a SQLiteAdjustmentReader which reads from an in memory sqlite database. The data will be read from dataframes and then converted into sqlite with SQLiteAdjustmentWriter.write WithDataPortal Provides each test case a DataPortal object with data from temporary resources.
2016-03-31 19:26:57 +00:00
# add a delete statement removing bzq
# write a new delete statement file to disk
add_security_data([], ['BZQ'])
2018-05-20 16:00:12 +00:00
algo = self.make_algo(
algo_class=RestrictedAlgoWithoutCheck,
symbol='BZQ',
sim_params=sim_params,
MAINT: make the data loading apis more consistent. Changes BcolzDailyBarWriter to not be an abc, data is passed as an iterator of (sid, dataframe) pairs to the write method. Changes the AssetsDBWriter to be a single class which accepts an engine at construction time and has a `write` method for writing dataframes for the various tables. We no longer support writing the various other data types, callers should coerce their data into a dataframe themselves. See zipline.assets.synthetic for some helpers to do this. Adds many new fixtures and updates some existing fixtures to use the new ones: WithDefaultDateBounds A fixture that provides the suite a START_DATE and END_DATE. This is meant to make it easy for other fixtures to synchronize their date ranges without depending on eachother in strange ways. For example, WithBcolzMinuteBarReader and WithBcolzDailyBarReader by default should both have data for the same dates, so they may use depend on WithDefaultDates without forcing a dependency between them. WithTmpDir, WithInstanceTmpDir Provides the suite or individual test case a temporary directory. WithBcolzDailyBarReader Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzDailyBarWriter.write WithBcolzDailyBarReaderFromCSVs Provides the suite a BcolzDailyBarReader which reads from bcolz data written to a temporary directory. The data will be read from a collection of CSV files and then converted into the bcolz data through BcolzDailyBarWriter.write_csvs WithBcolzMinuteBarReader Provides the suite a BcolzMinuteBarReader which reads from bcolz data written to a temporary directory. The data will be read from dataframes and then converted to bcolz files with BcolzMinuteBarWriter.write WithAdjustmentReader Provides the suite a SQLiteAdjustmentReader which reads from an in memory sqlite database. The data will be read from dataframes and then converted into sqlite with SQLiteAdjustmentWriter.write WithDataPortal Provides each test case a DataPortal object with data from temporary resources.
2016-03-31 19:26:57 +00:00
)
2018-05-20 16:00:12 +00:00
algo.run()
2015-02-03 23:41:21 +00:00
def test_algo_with_rl_violation_after_add(self):
2018-05-20 16:00:12 +00:00
sim_params = factory.create_simulation_parameters(
start=self.trading_day_before_first_kd,
num_days=4,
)
with security_list_copy():
2015-02-03 23:41:41 +00:00
add_security_data(['AAPL'], [])
2018-05-20 16:00:12 +00:00
algo = self.make_algo(
algo_class=RestrictedAlgoWithoutCheck,
symbol='AAPL',
sim_params=sim_params,
)
2015-02-03 23:41:21 +00:00
with self.assertRaises(TradingControlViolation) as ctx:
2018-05-20 16:00:12 +00:00
algo.run()
2015-02-03 23:41:21 +00:00
self.check_algo_exception(algo, ctx, 2)
def check_algo_exception(self, algo, ctx, expected_order_count):
self.assertEqual(algo.order_count, expected_order_count)
exc = ctx.exception
self.assertEqual(TradingControlViolation, type(exc))
exc_msg = str(ctx.exception)
self.assertTrue("RestrictedListOrder" in exc_msg)