Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import pandas as pd
import sqlalchemy
from sqlalchemy.exc import ResourceClosedError
from tohu import *
import argparse
import datetime
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
import structlog
import json
structlog.configure(
processors=[
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(serializer=json.dumps),
]
)
logger = structlog.get_logger(__name__)
parser = argparse.ArgumentParser(description="Flowminder Synthetic CDR Generator\n")
parser.add_argument(
"--n-subscribers", type=int, default=4000, help="Number of subscribers to generate."
)
parser.add_argument(
"--n-cells", type=int, default=1000, help="Number of cells to generate."
)
parser.add_argument(
"--n-calls", type=int, default=200_000, help="Number of calls to generate per day."
import argparse
import datetime
from concurrent.futures.thread import ThreadPoolExecutor
from contextlib import contextmanager
from multiprocessing import cpu_count
import sqlalchemy as sqlalchemy
from sqlalchemy.exc import ResourceClosedError
import structlog
import json
structlog.configure(
processors=[
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(serializer=json.dumps),
]
)
logger = structlog.get_logger(__name__)
parser = argparse.ArgumentParser(description="Flowminder Synthetic CDR Generator\n")
parser.add_argument(
"--n-subscribers", type=int, default=4000, help="Number of subscribers to generate."
)
parser.add_argument(
"--n-tacs", type=int, default=4000, help="Number of phone models to generate."
)
parser.add_argument(
"--n-sites", type=int, default=1000, help="Number of sites to generate."
def test_exception_on_py3(self, monkeypatch):
"""
Passing exceptions as exc_info is valid on Python 3.
"""
monkeypatch.setattr(
structlog.processors,
"_format_exception",
lambda exc_info: exc_info,
)
try:
raise ValueError("test")
except ValueError as e:
d = format_exc_info(None, None, {"exc_info": e})
assert {"exception": (ValueError, e, e.__traceback__)} == d
else:
pytest.fail("Exception not raised.")
import os
import sys
import structlog
from .sbds_json import dumps
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
# structlog.processors.JSONRenderer(serializer=dumps)
structlog.dev.ConsoleRenderer()
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
logging.basicConfig(
format="%(message)s",
stream=sys.stdout,
level=os.environ.get('LOG_LEVEL', 'INFO'),
_sl_processor_timestamper = structlog.processors.TimeStamper(utc=True)
_sl_foreign_pre_chain = [
structlog.stdlib.add_log_level,
_sl_processor_timestamper,
_sl_processor_add_source_context,
_sl_processor_add_process_context,
]
_sl_processors = [
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
_sl_processor_timestamper,
_sl_processor_add_source_context,
_sl_processor_add_process_context,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
]
structlog.configure(
processors=_sl_processors,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
def init_logging(logfile: Optional[str],
console_level: str,
console_formatter: str = "console-plain",
AWS_ACCESS_KEY_ID = ""
AWS_SECRET_ACCESS_KEY = ""
AWS_S3_BUCKET_NAME = "pdfs.contratospr.com"
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.KeyValueRenderer(),
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
'class': 'logging.StreamHandler',
'stream': sys.stderr,
},
},
}
logging.config.dictConfig(logging_config)
structlog.configure(
processors=[
contextvars.merge_contextvars,
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt='iso'),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.KeyValueRenderer(
key_order=['timestamp', 'logger', 'level', 'event']
),
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
def override_sanic_loggers():
# Override Sanic loggers with structlog loggers. Unfortunately
# List of context manager instances that will be called in each
# forked child process. Useful to do things like close file handles
# or reinitialize crypto libraries.
'CHILD_CONTEXT_MANAGERS': [],
}
if config:
self.config.update(config)
if setup_structlog:
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt='iso', utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer()
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
self.log = structlog.get_logger(
self.config['LOGGER_NAME'],
).bind()
if setup_structlog:
self.log.setLevel(logging.DEBUG)
logging.basicConfig(format='%(message)s')
handlers = log_config.get('handlers', None)
if isinstance(handlers, dict):
for _, defs in handlers.iteritems():
if isinstance(defs, dict):
if defs.get('class', '').endswith('FluentHandler'):
defs['host'] = fluentd_host
defs['port'] = fluentd_port
# Configure standard logging
logging.config.dictConfig(log_config)
logging.root.level -= 10 * verbosity_adjust
processors = [
add_exc_info_flag_for_exception,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
add_instance_id,
FluentRenderer(),
]
structlog.configure(logger_factory=structlog.stdlib.LoggerFactory(),
context_class=PlainRenderedOrderedDict,
wrapper_class=BoundLogger,
processors=processors)
# Mark first line of log
log = structlog.get_logger()
log.info("first-line")
return log
from pythonjsonlogger.jsonlogger import JsonFormatter
# pylint: disable=c-extension-no-member
import rapidjson
from jussi.typedefs import WebApp
# pylint: disable=no-member
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
# structlog.processors.TimeStamper(fmt="iso",utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
# structlog.dev.ConsoleRenderer(colors=True)
structlog.processors.JSONRenderer(serializer=rapidjson.dumps)
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
# pylint: enable=no-member
LOG_DATETIME_FORMAT = r'%Y-%m-%dT%H:%M:%S.%s%Z'
os.environ['TZ'] = 'UTC'
time.tzset()
# JsonFormatter.converter = time.gmtime