Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_foreign_pre_chain(self, configure_for_pf, capsys):
"""
If foreign_pre_chain is an iterable, it's used to pre-process
non-structlog log entries.
"""
configure_logging((add_log_level,))
configure(
processors=[ProcessorFormatter.wrap_for_formatter],
logger_factory=LoggerFactory(),
wrapper_class=BoundLogger,
)
logging.getLogger().warning("foo")
assert (
"",
"[warning ] foo [in test_foreign_pre_chain]\n",
) == capsys.readouterr()
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'stream': sys.stdout,
},
'error': {
'class': 'logging.StreamHandler',
'stream': sys.stderr,
},
},
}
logging.config.dictConfig(logging_config)
structlog.configure(
processors=[
contextvars.merge_contextvars,
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt='iso'),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.KeyValueRenderer(
key_order=['timestamp', 'logger', 'level', 'event']
),
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
class Testing(Common):
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
CELERY_TASK_IGNORE_RESULT = True
CELERY_TASK_ALWAYS_EAGER = True
CELERY_TASK_EAGER_PROPAGATES = True
SECRET_KEY = "dont-tell-eve"
AWS_ACCESS_KEY_ID = ""
AWS_SECRET_ACCESS_KEY = ""
AWS_S3_BUCKET_NAME = "pdfs.contratospr.com"
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.KeyValueRenderer(),
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
"handlers": {
"default": {
"level": os.environ.get('LOG_LEVEL','DEBUG'),
"class": "logging.StreamHandler",
"formatter": "colored",
},
},
"loggers": {
"": {
"handlers": ["default"],
"level": os.environ.get('LOG_LEVEL','DEBUG'),
"propagate": True,
},
}
})
structlog.configure(
processors=[
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt='iso'),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
Lambda function to check the authorizer cookie.
This function verifies the cookie to be valid. It either returns a 400, or a 302
to the given `return_to` URL.
"""
import os
from http import cookies
from urllib.parse import urlsplit, urlunsplit, urlencode
import jwt
import structlog
import attr
from utils import canonicalize_headers, get_jwt_secret, get_config
structlog.configure(processors=[structlog.processors.JSONRenderer()])
@attr.s(slots=True, auto_attribs=True)
class VerifyAccessRequest:
raw_token: str
access_token: dict
return_to: str
def validate_request(event: dict) -> VerifyAccessRequest:
headers = canonicalize_headers(event['headers'])
request_cookies = cookies.BaseCookie(headers['cookie'][0])
access_token = request_cookies[get_config().cookie_name].value
token = jwt.decode( # may raise
access_token,
def setup_structlog(tty=False):
processors = [
structlog.stdlib.filter_by_level,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.processors.TimeStamper(fmt='iso', utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
]
if tty:
processors.append(structlog.dev.ConsoleRenderer())
else:
processors.append(structlog.processors.JSONRenderer())
structlog.configure(
processors=processors,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
"formatter": "key_value",
},
},
"loggers": {
"django_structlog": {
"handlers": ["colored_stream", "flat_line_file", "json_file"],
"level": "INFO",
},
"django_structlog_demo_project": {
"handlers": ["colored_stream", "flat_line_file", "json_file"],
"level": "INFO",
},
},
}
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.processors.TimeStamper(fmt="iso"),
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.processors.ExceptionPrettyPrinter(),
structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
],
context_class=structlog.threadlocal.wrap_dict(dict),
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
def configure(profiles=None, docker_url=None):
global IS_CONFIGURED, PROFILES, DOCKER_URL
IS_CONFIGURED = True
if isinstance(profiles, dict):
profiles_map = {name: Profile(name, **profile_kwargs)
for name, profile_kwargs in profiles.items()}
else:
profiles_map = {profile.name: profile for profile in profiles or []}
PROFILES.update(profiles_map)
DOCKER_URL = docker_url
# structlog.is_configured() was added in 18.1
if not structlog._config._CONFIG.is_configured:
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt='iso'),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.KeyValueRenderer(key_order=['event']),
],
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
def _configure_logger(logger_factory=None, wrapper_class=None):
if not logger_factory:
logger_factory = structlog.stdlib.LoggerFactory()
if not wrapper_class:
wrapper_class = structlog.stdlib.BoundLogger
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
add_request_ids_from_environment,
structlog.stdlib.add_log_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso"),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.JSONRenderer(sort_keys=True),
],
context_class=WRAPPED_DICT_CLASS,
logger_factory=logger_factory,
wrapper_class=wrapper_class,
cache_logger_on_first_use=True,
)
import logging
import sys
import click
import structlog
from sbds.chain.cli import chain
from sbds.server.cli import server
from sbds.storages.db.cli import db
from sbds.storages.s3.cli import s3
from sbds.storages.fs.cli import fs
from sbds.codegen.cli import codegen
structlog.configure(
processors=[
structlog.stdlib.filter_by_level,
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.TimeStamper(fmt="iso", utc=True),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog.dev.ConsoleRenderer()
],
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)