Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
stdlib_logger.setLevel(logging.DEBUG)
# structlog configuration:
# https://www.structlog.org/en/stable/standard-library.html#rendering-using-logging-based-formatters
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.render_to_log_kwargs,
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True)
def test_foreign_pre_chain_add_logger_name(self, configure_for_pf, capsys):
"""
foreign_pre_chain works with add_logger_name processor.
"""
configure_logging((add_logger_name,))
configure(
processors=[ProcessorFormatter.wrap_for_formatter],
logger_factory=LoggerFactory(),
wrapper_class=BoundLogger,
)
logging.getLogger("sample-name").warning("foo")
assert (
"",
"foo [sample-name] [in test_foreign_pr"
"e_chain_add_logger_name]\n",
) == capsys.readouterr()
# structlog config
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.JSONRenderer(),
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.ExceptionPrettyPrinter(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
# structlog.processors.JSONRenderer(serializer=dumps)
structlog.dev.ConsoleRenderer()
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
logging.basicConfig(
format="%(message)s",
stream=sys.stdout,
level=os.environ.get('LOG_LEVEL', 'INFO'),
)
def session_to_dict(session):
return dict(
session_info=session.info,
session_dirty_count=len(session.dirty),
structlog.stdlib.add_logger_name,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
]
if log_json:
processors = shared_processors + [structlog.processors.JSONRenderer()]
else:
processors = shared_processors + [structlog.dev.ConsoleRenderer()]
structlog.configure(
processors=processors,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.ExceptionPrettyPrinter(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
MIDDLEWARE += [
"django_structlog.middlewares.RequestMiddleware",
"django_structlog.middlewares.CeleryMiddleware",
]
"foreign_pre_chain": processors,
},
"debug": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": _chain(structlog.processors.JSONRenderer(), redact),
"foreign_pre_chain": processors,
},
},
"handlers": handlers,
"loggers": {"": {"handlers": handlers.keys(), "propagate": True}},
}
)
structlog.configure(
processors=processors + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
wrapper_class=structlog.stdlib.BoundLogger,
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=cache_logger_on_first_use,
)
# set logging level of the root logger to DEBUG, to be able to intercept
# all messages, which are then be filtered by the `RaidenFilter`
structlog.get_logger("").setLevel(logger_level_config.get("", DEFAULT_LOG_LEVEL))
for package in _first_party_packages:
structlog.get_logger(package).setLevel("DEBUG")
# rollover RotatingFileHandler on startup, to split logs also per-session
root = logging.getLogger()
for handler in root.handlers:
if isinstance(handler, logging.handlers.RotatingFileHandler):
handler.flush()
if os.stat(handler.baseFilename).st_size > 0:
handler.doRollover()
stdlib.filter_by_level,
stdlib.add_logger_name,
stdlib.add_log_level,
fix_logger_name,
format_request,
ensure_event,
stdlib.PositionalArgumentsFormatter(),
processors.TimeStamper(fmt="ISO", key='@timestamp'),
processors.StackInfoRenderer(),
processors.format_exc_info,
] + renderers
configure(
processors=structlog_processors,
context_class=dict,
logger_factory=stdlib.LoggerFactory(),
wrapper_class=stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
structlog = {'handlers': ['raw'],
'level': level,
'propagate': False}
null = {'handlers': ['null'],
'propagate': False}
loggers = {l: root(level_overrides.get(l, level))
for l, _, _ in logging_tree.tree()[2]}
loggers['feedhq'] = structlog
for nulled_logger in silenced_loggers:
loggers[nulled_logger] = null
def configure(level: str, *processors):
"""
Configure `structlog` globally.
.. _structlog Configuration
http://www.structlog.org/en/stable/configuration.html
"""
logging.basicConfig(format='%(message)s', level=level)
logging.getLogger('asyncio').disabled = True
structlog.configure(
processors=processors,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)