Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""
logzero.logfile(..) should be able to use a custom loglevel
"""
logzero.reset_default_logger()
temp = tempfile.NamedTemporaryFile()
try:
# Set logfile with custom loglevel
logzero.logfile(temp.name, loglevel=logging.WARN)
logzero.logger.info("info1")
logzero.logger.warn("warn1")
# If setting a loglevel with logzero.loglevel(..) it will not overwrite
# the custom loglevel of the file handler
logzero.loglevel(logging.INFO)
logzero.logger.info("info2")
logzero.logger.warn("warn2")
with open(temp.name) as f:
content = f.read()
assert "] info1" not in content
assert "] warn1" in content
assert "] info2" not in content
assert "] warn2" in content
finally:
temp.close()
def test_api_logfile_custom_loglevel():
"""
logzero.logfile(..) should be able to use a custom loglevel
"""
logzero.reset_default_logger()
temp = tempfile.NamedTemporaryFile()
try:
# Set logfile with custom loglevel
logzero.logfile(temp.name, loglevel=logging.WARN)
logzero.logger.info("info1")
logzero.logger.warn("warn1")
# If setting a loglevel with logzero.loglevel(..) it will not overwrite
# the custom loglevel of the file handler
logzero.loglevel(logging.INFO)
logzero.logger.info("info2")
logzero.logger.warn("warn2")
with open(temp.name) as f:
content = f.read()
assert "] info1" not in content
assert "] warn1" in content
assert "] info2" not in content
assert "] warn2" in content
finally:
temp.close()
def test_api_loglevel(capsys):
"""
Should reconfigure the internal logger loglevel
"""
logzero.reset_default_logger()
temp = tempfile.NamedTemporaryFile()
try:
logzero.logfile(temp.name)
logzero.logger.info("info1")
logzero.loglevel(logging.WARN)
logzero.logger.info("info2")
logzero.logger.warn("warn1")
with open(temp.name) as f:
content = f.read()
assert "] info1" in content
assert "] info2" not in content
assert "] warn1" in content
finally:
temp.close()
base_dir = Path(__file__).absolute().parent.parent.parent.parent
SECRET_KEY = os.environ.get('SECRET_KEY')
_hostname = os.environ.get('HOOVER_HOSTNAME')
if _hostname:
HOOVER_BASE_URL = 'https://' + _hostname
ALLOWED_HOSTS = [_hostname]
def bool_env(value):
return (value or '').lower() in ['on', 'true']
DEBUG = bool_env(os.environ.get('DEBUG'))
if DEBUG:
log.warn('DEBUG mode on')
INSTALLED_APPS += (
'hoover.contrib.ratelimit',
)
HOOVER_RATELIMIT_USER = (30, 60)
if bool_env(os.environ.get('HOOVER_TWOFACTOR_ENABLED')):
INSTALLED_APPS += (
'hoover.contrib.twofactor',
'django_otp',
'django_otp.plugins.otp_totp',
)
MIDDLEWARE_CLASSES += (
'django_otp.middleware.OTPMiddleware',
# request data
logger.info('requesting data from iex...')
data = (web.DataReader(symbols,'iex-tops')
.assign(lastSaleTime=lambda df:pd.to_datetime(df.lastSaleTime,unit='ms'))
.assign(lastUpdated=lambda df:pd.to_datetime(df.lastUpdated,unit='ms'))
.pipe(split_timestamp, timestamp=now)
.dropna())
# force float conversion for the following columns
# this is due to a problem reading in the data when schema changes
# for example when these columns are populated the data is float, when not,
# value is 0, then int64 dtypes causes schema change and read error
to_float = ['askPrice','bidPrice','lastSalePrice','marketPercent']
data.loc[:,to_float] = data.loc[:,to_float].astype(float)
if data.empty: logger.warn('data df is empty!')
#==========================================================================
# store data
logger.info('storing data to interim intraday_store')
outfp = PurePath(data_dir/'interim'/'intraday_store').as_posix()
write_to_parquet(data, outfp, logger=logger)
else:
logger.warn('system outside of market hours, no data queried')
# force float conversion for the following columns
# this is due to a problem reading in the data when schema changes
# for example when these columns are populated the data is float, when not,
# value is 0, then int64 dtypes causes schema change and read error
to_float = ['askPrice','bidPrice','lastSalePrice','marketPercent']
data.loc[:,to_float] = data.loc[:,to_float].astype(float)
if data.empty: logger.warn('data df is empty!')
#==========================================================================
# store data
logger.info('storing data to interim intraday_store')
outfp = PurePath(data_dir/'interim'/'intraday_store').as_posix()
write_to_parquet(data, outfp, logger=logger)
else:
logger.warn('system outside of market hours, no data queried')