Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_formatter_unsets_exc_info(self, configure_for_pf, capsys, keep):
"""
Stack traces doesn't get printed outside of the json document when
keep_exc_info are set to False but preserved if set to True.
"""
configure_logging(None)
logger = logging.getLogger()
def format_exc_info_fake(logger, name, event_dict):
event_dict = collections.OrderedDict(event_dict)
del event_dict["exc_info"]
event_dict["exception"] = "Exception!"
return event_dict
formatter = ProcessorFormatter(
processor=JSONRenderer(),
keep_stack_info=keep,
keep_exc_info=keep,
foreign_pre_chain=[format_exc_info_fake],
)
logger.handlers[0].setFormatter(formatter)
try:
raise RuntimeError("oh noo")
except Exception:
logging.getLogger().exception("seen worse")
out, err = capsys.readouterr()
assert "" == out
# https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 1025
# Your stuff...
# ------------------------------------------------------------------------------
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"plain": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(colors=False),
},
"colored": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(colors=True),
},
},
"filters": {},
"handlers": {
"structured_stream": {"class": "logging.StreamHandler", "formatter": "colored"},
"structured_file": {
"class": "logging.handlers.WatchedFileHandler",
"filename": "test.log",
"formatter": "plain",
},
},
"loggers": {"": {"handlers": ["structured_stream"], "level": "INFO"}},
}
structlog.configure(
"()": structlog.stdlib.ProcessorFormatter,
"processor": _chain(structlog.dev.ConsoleRenderer(colors=False), redact),
"foreign_pre_chain": processors,
},
"json": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": _chain(structlog.processors.JSONRenderer(), redact),
"foreign_pre_chain": processors,
},
"colorized": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": _chain(structlog.dev.ConsoleRenderer(colors=True), redact),
"foreign_pre_chain": processors,
},
"debug": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": _chain(structlog.processors.JSONRenderer(), redact),
"foreign_pre_chain": processors,
},
},
"handlers": handlers,
"loggers": {"": {"handlers": handlers.keys(), "propagate": True}},
}
)
structlog.configure(
processors=processors + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
wrapper_class=structlog.stdlib.BoundLogger,
logger_factory=structlog.stdlib.LoggerFactory(),
cache_logger_on_first_use=cache_logger_on_first_use,
)
# set logging level of the root logger to DEBUG, to be able to intercept
from node_launcher.constants import NODE_LAUNCHER_DATA_PATH, OPERATING_SYSTEM
timestamper = structlog.processors.TimeStamper(fmt='%Y-%m-%d %H:%M:%S')
pre_chain = [
# Add the log level and a timestamp to the event_dict if the log entry
# is not from structlog.
structlog.stdlib.add_log_level,
timestamper,
]
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'plain': {
'()': structlog.stdlib.ProcessorFormatter,
'processor': structlog.dev.ConsoleRenderer(colors=False),
'foreign_pre_chain': pre_chain,
},
'colored': {
'()': structlog.stdlib.ProcessorFormatter,
'processor': structlog.dev.ConsoleRenderer(colors=True),
'foreign_pre_chain': pre_chain,
},
},
'handlers': {
'default': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'colored',
},
'file': {
return self.log(AIMETRICS, msg, *args, **kw)
structlog.stdlib._FixedFindCallerLogger.aimetrics = ( # pylint: disable=protected-access
aimetrics
)
structlog.stdlib.BoundLogger.aimetrics = aimetrics
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
formatter = structlog.stdlib.ProcessorFormatter(
processor=structlog.processors.JSONRenderer(indent=2, sort_keys=True)
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.addHandler(handler)
logger.setLevel(level)
return structlog.wrap_logger(logger)
add_thread_info,
structlog.stdlib.PositionalArgumentsFormatter(),
timestamper,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.stdlib.render_to_log_kwargs,
#eventrenamer
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
formatter = structlog.stdlib.ProcessorFormatter(
processor=structlog.processors.JSONRenderer(),
foreign_pre_chain=shared_processors,
)
if log_dir:
log_dir = Path(log_dir)
log_dir.mkdir(parents=True, exist_ok=True)
log_file = log_dir / ("%s.json.log" % log_name)
handler = RotatingFileHandler(log_file, maxBytes=10 * 1024 * 1024, backupCount=5)
else:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
root_logger = logging.getLogger()
root_logger.addHandler(handler)
root_logger.setLevel(log_level)
loggingConfig = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'aws': {
# No time b.c. CloudWatch logs times
'format': u"[%(levelname)-8s] %(message)s "
u"{%(module)s.%(funcName)s():%(lineno)s %(pathname)s}",
'datefmt': "%Y-%m-%d %H:%M:%S"
},
"colored": {
'format': '{Time: %(asctime)s, '
'Level: [%(levelname)s], '
'function: %(module)s.%(funcName)s():%(lineno)s, '
'message: %(message)s}',
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(colors=True),
'datefmt': '%Y-%m-%d %H:%M:%S',
}},
'handlers': loggingHandlersConfig,
'root': {
'level': 'INFO',
'propagate': True,
'handlers': loggingHandlers
}
}
dictConfig(loggingConfig)
app = Flask(__name__)
# HTTP security header middleware for Flask
talisman = Talisman(app)
talisman.force_https = False
from ._base import *
import structlog
DEBUG = True
WEBSITE_URL = "http://127.0.0.1:8000" # without trailing slash
MEDIA_URL = f"{WEBSITE_URL}/media/"
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"json_formatter": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.processors.JSONRenderer(),
},
"plain_console": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(),
},
"key_value": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.processors.KeyValueRenderer(key_order=['timestamp', 'level', 'event', 'logger']),
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "plain_console",
},
key_order = ['message', 'event', 'level']
timestamper = structlog.processors.TimeStamper(fmt='ISO')
processors = [
event_enum_to_str,
ProcessStructuredErrors(),
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
rename_level_to_severity,
timestamper,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
add_func_name,
add_message,
order_keys(key_order),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
]
if for_humans:
renderer = structlog.dev.ConsoleRenderer() # <===
else:
# Make it so that 0 ⇒ None
indent = json_indent or None
renderer = structlog.processors.JSONRenderer(
indent=indent,
serializer=serialize.dumps
)
foreign_pre_chain = [
# Add the log level and a timestamp to the event_dict if the log entry
# is not from structlog.
structlog.processors.StackInfoRenderer(),
},
},
}
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.ExceptionPrettyPrinter(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
MIDDLEWARE += [
"django_structlog.middlewares.RequestMiddleware",
"django_structlog.middlewares.CeleryMiddleware",
]