Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def winning_writer():
#will attempt to write version 2 as well
with ArcticTransaction(library, 'FOO', 'user', 'log') as cwb:
cwb.write('FOO', ts2, metadata={'foo': 'bar'})
e2.wait()
def losing_writer():
#will attempt to write version 2, should find that version 2 is there and it ends up writing version 3
with pytest.raises(ConcurrentModificationException):
with ArcticTransaction(library, 'FOO', 'user', 'log') as cwb:
cwb.write('FOO', ts1_append, metadata={'foo': 'bar'})
e1.wait()
def test_ArticTransaction_no_audit():
vs = Mock(spec=VersionStore)
ts1 = pd.DataFrame(index=[1, 2], data={'a':[1.0, 2.0]})
vs.read.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=1, metadata=None, data=ts1)
vs.write.return_value = VersionedItem(symbol=sentinel.symbol, library=sentinel.library, version=2,
metadata=None, data=None)
vs.list_versions.return_value = [{'version': 2}, {'version': 1}]
with ArcticTransaction(vs, sentinel.symbol, sentinel.user, sentinel.log, audit=False) as cwb:
cwb.write(sentinel.symbol, pd.DataFrame(index=[3, 4], data={'a': [1.0, 2.0]}), metadata=sentinel.meta)
assert vs.write.call_count == 1
assert vs._write_audit.call_count == 0
assert_array_equal(df['b'].values, np.array([3., 5., 7., 9.]))
assert mongo_count(tickstore_lib._collection, filter=f.call_args_list[-1][0][0]) == 2
df = tickstore_lib.read('SYM', date_range=DateRange(20130103, 20130104), columns=None)
assert_array_equal(df['b'].values, np.array([5., 7.]))
assert mongo_count(tickstore_lib._collection, filter=f.call_args_list[-1][0][0]) == 2
df = tickstore_lib.read('SYM', date_range=DateRange(20130103, 20130105), columns=None)
assert_array_equal(df['b'].values, np.array([5., 7., 9.]))
assert mongo_count(tickstore_lib._collection, filter=f.call_args_list[-1][0][0]) == 2
df = tickstore_lib.read('SYM', date_range=DateRange(20130104, 20130105), columns=None)
assert_array_equal(df['b'].values, np.array([7., 9.]))
assert mongo_count(tickstore_lib._collection, filter=f.call_args_list[-1][0][0]) == 1
# Test the different open-closed behaviours
df = tickstore_lib.read('SYM', date_range=DateRange(20130104, 20130105, CLOSED_CLOSED), columns=None)
assert_array_equal(df['b'].values, np.array([7., 9.]))
df = tickstore_lib.read('SYM', date_range=DateRange(20130104, 20130105, CLOSED_OPEN), columns=None)
assert_array_equal(df['b'].values, np.array([7.]))
df = tickstore_lib.read('SYM', date_range=DateRange(20130104, 20130105, OPEN_CLOSED), columns=None)
assert_array_equal(df['b'].values, np.array([9.]))
df = tickstore_lib.read('SYM', date_range=DateRange(20130104, 20130105, OPEN_OPEN), columns=None)
assert_array_equal(df['b'].values, np.array([]))
def test_daterange_fails_with_timezone_start(library):
df = read_csv(StringIO("""2015-08-10 00:00:00,200005,1.0
2015-08-11 00:00:00,200016,3.0"""), parse_dates=[0],
names=['date', 'security_id', 'value']).set_index(['date', 'security_id'])
library.write('MYARR', df)
with pytest.raises(ValueError):
library.read('MYARR', date_range=DateRange(start=dt(2015, 1, 1, tzinfo=mktz())))
def test_raise_exception_if_date_range_does_not_contain_end_date():
store = TopLevelTickStore(Mock())
dr = DateRange(start=dt(2011, 1, 1), end=None)
with pytest.raises(Exception) as e:
store._get_library_metadata(dr)
assert "The date range {0} must contain a start and end date".format(dr) in str(e.value)
from datetime import datetime as dt
import operator
import pytest
import itertools
import six
from arctic.date import DateRange, string_to_daterange, CLOSED_CLOSED, CLOSED_OPEN, OPEN_CLOSED, OPEN_OPEN
test_ranges_for_bounding = {
"unbounded": (DateRange(),
None, None, True, None, None),
"unbounded_right": (DateRange('20110101'),
dt(2011, 1, 1), None, True, True, None),
"unbounded_left": (DateRange(None, '20111231'),
None, dt(2011, 12, 31), True, None, True),
"closed_by_default": (DateRange('20110101', '20111231'),
dt(2011, 1, 1), dt(2011, 12, 31), False, True, True),
"closed_explicitly": (DateRange('20110101', '20111231', CLOSED_CLOSED),
dt(2011, 1, 1), dt(2011, 12, 31), False, True, True),
"closed_open": (DateRange('20110101', '20111231', CLOSED_OPEN),
dt(2011, 1, 1), dt(2011, 12, 31), False, True, False),
"open_closed": (DateRange('20110101', '20111231', OPEN_CLOSED),
dt(2011, 1, 1), dt(2011, 12, 31), False, False, True),
"open_open": (DateRange('20110101', '20111231', OPEN_OPEN),
dt(2011, 1, 1), dt(2011, 12, 31), False, False, False),
}
test_ranges_for_bounding = sorted(six.iteritems(test_ranges_for_bounding), key=operator.itemgetter(1))
def eq_nan(*args):
def test_pandas_datetime_index_store_series(chunkstore_lib):
df = Series(data=[1, 2, 3],
index=Index(data=[dt(2016, 1, 1),
dt(2016, 1, 2),
dt(2016, 1, 3)],
name='date'),
name='data')
chunkstore_lib.write('chunkstore_test', df, chunk_size='D')
s = chunkstore_lib.read('chunkstore_test', chunk_range=DateRange(dt(2016, 1, 1), dt(2016, 1, 3)))
assert_series_equal(s, df)
def test_daterange_closedclosed():
date_range = DateRange(dt(2013, 1, 1, tzinfo=mktz('Europe/London')),
dt(2014, 2, 1, tzinfo=mktz('Europe/London')), OPEN_OPEN)
expected = DateRange(dt(2013, 1, 1, 0, 0, 0, 1000, tzinfo=mktz('Europe/London')),
dt(2014, 1, 31, 23, 59, 59, 999000, tzinfo=mktz('Europe/London')),
CLOSED_CLOSED)
act = to_pandas_closed_closed(date_range)
assert act == expected
)
# OPEN - CLOSED
assert_frame_equal(c.filter(df, DateRange(None, dt(2016, 1, 3))), df)
# CLOSED - OPEN
assert_frame_equal(c.filter(df, DateRange(dt(2016, 1, 1), None)), df)
# OPEN - OPEN
assert_frame_equal(c.filter(df, DateRange(None, None)), df)
# CLOSED - OPEN (far before data range)
assert_frame_equal(c.filter(df, DateRange(dt(2000, 1, 1), None)), df)
# CLOSED - OPEN (far after range)
assert(c.filter(df, DateRange(dt(2020, 1, 2), None)).empty)
# OPEN - CLOSED
assert_frame_equal(c.filter(df, DateRange(None, dt(2020, 1, 1))), df)
# CLOSED - CLOSED (after range)
assert(c.filter(df, DateRange(dt(2017, 1, 1), dt(2018, 1, 1))).empty)