Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Set werkzeug logging level
werkzeug_logger = logging.getLogger('werkzeug')
werkzeug_logger.setLevel(level=levels[EQ_WERKZEUG_LOG_LEVEL])
def parse_exception(_, __, event_dict):
if EQ_DEVELOPER_LOGGING:
return event_dict
exception = event_dict.get('exception')
if exception:
event_dict['exception'] = exception.replace("\"", "'").split("\n")
return event_dict
# setup file logging
renderer_processor = ConsoleRenderer() if EQ_DEVELOPER_LOGGING else JSONRenderer()
processors = [add_log_level, TimeStamper(key='created', fmt='iso'), add_service, format_exc_info, parse_exception, renderer_processor]
configure(context_class=wrap_dict(dict), logger_factory=LoggerFactory(), processors=processors, cache_logger_on_first_use=True)
log = logging.getLogger(logger)
log.setLevel(logging.ERROR)
log.disabled = True
self.app.logger.disabled = True
logging.basicConfig(
level=self.log_level, stream=sys.stdout, format="%(message)s"
)
chain = [
filter_by_level,
add_log_level,
add_logger_name,
TimeStamper(fmt="iso"),
StackInfoRenderer(),
format_exc_info,
JSONRenderer(indent=1, sort_keys=True),
]
logger = logging.getLogger(__name__)
if self.testing:
chain = []
logger = structlog.ReturnLogger()
log = structlog.wrap_logger(
logger,
processors=chain,
context_class=dict,
wrapper_class=structlog.stdlib.BoundLogger,
# cache_logger_on_first_use=True,
)
)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(log_formatter)
root_logger = logging.getLogger()
root_logger.addHandler(handler)
root_logger.setLevel(loglevel)
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.stdlib.render_to_log_kwargs,
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True
)
def default_structlog_conf(**overrides):
'''Generate a default configuration for structlog'''
conf = {
"logger_factory": structlog.stdlib.LoggerFactory(),
"wrapper_class":structlog.stdlib.BoundLogger,
"cache_logger_on_first_use": True,
"processors": [
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.TimeStamper(fmt='iso', utc=True),
structlog.processors.JSONRenderer()]
}
conf.update(**overrides)
return conf
def getLogger(name):
return wrap_logger(
logging.getLogger(name),
processors=[
filter_by_level,
add_logger_name,
add_caller_info,
#local_var_info,
unorder_dict,
TimeStamper(fmt="ISO", utc=False),
format_exc_info,
PositionalArgumentsFormatter(),
alternate_dev_formatter()
],
wrapper_class=BoundLogger,
)
# Configure standard logging
logging.config.dictConfig(log_config)
logging.root.level -= 10 * verbosity_adjust
# Add TRACE log level (lower than DEBUG:10)
TRACE_LOGLVL = 5
logging.addLevelName(TRACE_LOGLVL, "TRACE")
def trace_loglevel(self, message, *args, **kws):
if self.isEnabledFor(TRACE_LOGLVL):
self._log(TRACE_LOGLVL, message, args, **kws)
logging.Logger.trace = trace_loglevel
processors = [
add_exc_info_flag_for_exception,
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
add_instance_id,
StructuredLogRenderer(),
]
structlog.configure(logger_factory=structlog.stdlib.LoggerFactory(),
context_class=PlainRenderedOrderedDict,
wrapper_class=BoundLogger,
processors=processors)
# Mark first line of log
log = structlog.get_logger()
log.info("first-line")
return log
"django_structlog": {
"handlers": ["console", "flat_line_file", "json_file"],
"level": "INFO",
},
}
}
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.ExceptionPrettyPrinter(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
if for_humans:
renderer = structlog.dev.ConsoleRenderer() # <===
else:
# Make it so that 0 ⇒ None
indent = json_indent or None
renderer = structlog.processors.JSONRenderer(
indent=indent,
serializer=serialize.dumps
)
foreign_pre_chain = [
# Add the log level and a timestamp to the event_dict if the log entry
# is not from structlog.
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
foreign_event_to_message,
rename_level_to_severity,
timestamper,
]
if level_name == 'DEBUG':
root_logger_level = 'DEBUG'
else:
root_logger_level = 'ERROR'
logging_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
# Store traceback in execution history for failed tasks. This can
# increase Redis storage requirements and therefore can be disabled
# if that is a concern.
'STORE_TRACEBACKS': True,
}
if config:
self.config.update(config)
if setup_structlog:
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt='iso', utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(),
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
self.log = structlog.get_logger(self.config['LOGGER_NAME']).bind()
if setup_structlog:
self.log.setLevel(logging.DEBUG)
logging.basicConfig(format='%(message)s')
self.connection = connection or redis.Redis(decode_responses=True)
self.scripts = RedisScripts(self.connection)
def getLogger(name):
return wrap_logger(
logging.getLogger(name),
processors=[
PositionalArgumentsFormatter(),
filter_by_level,
add_logger_name,
add_caller_info,
#local_var_info,
unorder_dict,
TimeStamper(fmt="ISO", utc=False),
format_exc_info,
alternate_dev_formatter()
],
wrapper_class=BoundLogger,
)