Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_setup_logger_defaults():
logger = structlog.get_logger('synse_server')
logger.setLevel(logging.DEBUG)
log.setup_logger()
assert logger.getEffectiveLevel() == logging.INFO
import logging
import os
import sys
from collections import OrderedDict
from contextlib import contextmanager
from textwrap import dedent
import structlog
from .config import _config_init, get_config
from .types import Region
from .utils import mkdir_p
_debug_level = None
_logger = structlog.get_logger("stbt")
_trace_logger = structlog.get_logger("stbt.trace")
def debug(msg, *args):
"""Print the given string to stderr if stbt run `--verbose` was given."""
_logger.debug(msg, *args)
def ddebug(msg, *args):
"""Extra verbose debug for stbt developers, not end users"""
_trace_logger.debug(msg, *args)
def warn(msg, *args):
_logger.warning(msg, *args)
import pandas as pd
import sqlalchemy
from sqlalchemy.exc import ResourceClosedError
from tohu import *
import argparse
import datetime
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
import structlog
import json
structlog.configure(
processors=[
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(serializer=json.dumps),
]
)
logger = structlog.get_logger(__name__)
parser = argparse.ArgumentParser(description="Flowminder Synthetic CDR Generator\n")
parser.add_argument(
"--n-subscribers", type=int, default=4000, help="Number of subscribers to generate."
)
parser.add_argument(
"--n-cells", type=int, default=1000, help="Number of cells to generate."
)
parser.add_argument(
"--n-calls", type=int, default=200_000, help="Number of calls to generate per day."
import argparse
import datetime
from concurrent.futures.thread import ThreadPoolExecutor
from contextlib import contextmanager
from multiprocessing import cpu_count
import sqlalchemy as sqlalchemy
from sqlalchemy.exc import ResourceClosedError
import structlog
import json
structlog.configure(
processors=[
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(serializer=json.dumps),
]
)
logger = structlog.get_logger(__name__)
parser = argparse.ArgumentParser(description="Flowminder Synthetic CDR Generator\n")
parser.add_argument(
"--n-subscribers", type=int, default=4000, help="Number of subscribers to generate."
)
parser.add_argument(
"--n-tacs", type=int, default=4000, help="Number of phone models to generate."
)
parser.add_argument(
"--n-sites", type=int, default=1000, help="Number of sites to generate."
def test_exception_on_py3(self, monkeypatch):
"""
Passing exceptions as exc_info is valid on Python 3.
"""
monkeypatch.setattr(
structlog.processors,
"_format_exception",
lambda exc_info: exc_info,
)
try:
raise ValueError("test")
except ValueError as e:
d = format_exc_info(None, None, {"exc_info": e})
assert {"exception": (ValueError, e, e.__traceback__)} == d
else:
pytest.fail("Exception not raised.")
import logging.config
import logging
import structlog
logging.basicConfig(
level=logging.DEBUG,
format='%(levelname)s[%(threadName)s] %(message)s',
)
logging.getLogger('urllib3').setLevel(logging.CRITICAL)
logging.getLogger('botocore').setLevel(logging.CRITICAL)
structlog.configure_once(
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.stdlib.render_to_log_kwargs]
)
level_map = {
'CRITICAL': 50,
'ERROR': 40,
'WARNING': 30,
def test_foreign_pre_chain(self, configure_for_pf, capsys):
"""
If foreign_pre_chain is an iterable, it's used to pre-process
non-structlog log entries.
"""
configure_logging((add_log_level,))
configure(
processors=[ProcessorFormatter.wrap_for_formatter],
logger_factory=LoggerFactory(),
wrapper_class=BoundLogger,
)
logging.getLogger().warning("foo")
assert (
"",
"[warning ] foo [in test_foreign_pre_chain]\n",
) == capsys.readouterr()
stdlib_logger.setLevel(logging.DEBUG)
# structlog configuration:
# https://www.structlog.org/en/stable/standard-library.html#rendering-using-logging-based-formatters
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.render_to_log_kwargs,
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True)
def test_foreign_pre_chain_add_logger_name(self, configure_for_pf, capsys):
"""
foreign_pre_chain works with add_logger_name processor.
"""
configure_logging((add_logger_name,))
configure(
processors=[ProcessorFormatter.wrap_for_formatter],
logger_factory=LoggerFactory(),
wrapper_class=BoundLogger,
)
logging.getLogger("sample-name").warning("foo")
assert (
"",
"foo [sample-name] [in test_foreign_pr"
"e_chain_add_logger_name]\n",
) == capsys.readouterr()
def __new_oyente_analyzer(args="", storage_dir="/tmp", timeout_sec=60):
logger = getLogger("test")
oyente_wrapper = Wrapper(
wrappers_dir="{0}/analyzers/wrappers".format(project_root()),
analyzer_name="oyente",
args="-ce",
storage_dir=storage_dir,
timeout_sec=timeout_sec,
logger=logger,
)
return Analyzer(oyente_wrapper, logger)