Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def init():
"""
setups our persistence for this module
:return:
"""
engine = create_engine(os.environ.get("TECHNICAL_HISTORY_DB", 'sqlite://'))
session = scoped_session(sessionmaker(bind=engine, autoflush=True, autocommit=True))
OHLCV.session = session
OHLCV.query = session.query_property()
_DECL_BASE.metadata.create_all(engine)
close=row[4],
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
# return all data to user
result = []
# filter by exchange, currency and other pairs
for row in OHLCV.session.query(OHLCV).filter(
OHLCV.exchange == ccxt_api.name,
OHLCV.pair == "{}/{}".format(asset.upper(), stake.upper()),
OHLCV.interval == interval,
OHLCV.timestamp >= from_date * 1000,
OHLCV.timestamp <= till_date * 1000,
).all():
result.append([row.timestamp, row.open, row.high, row.low, row.close, row.volume])
return result
open=row[1],
close=row[4],
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
# return all data to user
result = []
# filter by exchange, currency and other pairs
for row in OHLCV.session.query(OHLCV).filter(
OHLCV.exchange == ccxt_api.name,
OHLCV.pair == "{}/{}".format(asset.upper(), stake.upper()),
OHLCV.interval == interval,
OHLCV.timestamp >= from_date * 1000,
OHLCV.timestamp <= till_date * 1000,
).all():
result.append([row.timestamp, row.open, row.high, row.low, row.close, row.volume])
return result
latest_time = OHLCV.session.query(func.max(OHLCV.timestamp)).filter(
OHLCV.exchange == ccxt_api.name,
OHLCV.pair == "{}/{}".format(asset.upper(), stake.upper()),
OHLCV.interval == interval
).one()[0]
if force:
print("forcing database refresh and downloading all data!")
latest_time = None
# add additional data on top
if latest_time is None:
# store data for all
for row in historical_data(stake, asset, interval, from_date, ccxt_api):
o = OHLCV(
id="{}-{}-{}/{}:{}".format(ccxt_api.name, interval, asset.upper(), stake.upper(), row[0]),
exchange=ccxt_api.name,
pair="{}/{}".format(asset.upper(), stake.upper()),
interval=interval,
open=row[1],
close=row[4],
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
else:
# calculate the difference in days and download and merge the data files
pair="{}/{}".format(asset.upper(), stake.upper()),
interval=interval,
open=row[1],
close=row[4],
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
else:
# calculate the difference in days and download and merge the data files
for row in historical_data(stake, asset, interval, latest_time, ccxt_api):
o = OHLCV(
id="{}-{}-{}/{}:{}".format(ccxt_api.name, interval, asset.upper(), stake.upper(), row[0]),
exchange=ccxt_api.name,
pair="{}/{}".format(asset.upper(), stake.upper()),
interval=interval,
open=row[1],
close=row[4],
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
# return all data to user
result = []
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
# return all data to user
result = []
# filter by exchange, currency and other pairs
for row in OHLCV.session.query(OHLCV).filter(
OHLCV.exchange == ccxt_api.name,
OHLCV.pair == "{}/{}".format(asset.upper(), stake.upper()),
OHLCV.interval == interval,
OHLCV.timestamp >= from_date * 1000,
OHLCV.timestamp <= till_date * 1000,
).all():
result.append([row.timestamp, row.open, row.high, row.low, row.close, row.volume])
return result
interval=interval,
open=row[1],
close=row[4],
high=row[2],
low=row[3],
volume=row[5],
timestamp=row[0]
)
OHLCV.session.merge(o)
# return all data to user
result = []
# filter by exchange, currency and other pairs
for row in OHLCV.session.query(OHLCV).filter(
OHLCV.exchange == ccxt_api.name,
OHLCV.pair == "{}/{}".format(asset.upper(), stake.upper()),
OHLCV.interval == interval,
OHLCV.timestamp >= from_date * 1000,
OHLCV.timestamp <= till_date * 1000,
).all():
result.append([row.timestamp, row.open, row.high, row.low, row.close, row.volume])
return result