FastAPI gunicon uvicorn access_log format customization - gunicorn

We are using the https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker FastAPI and were able to customize our logging with a gunicorn logging file.
However, we are not able to change the details of the %(message)s attribute as defined in the documentation access log - https://docs.gunicorn.org/en/stable/settings.html#accesslog.
We receive an error postet below, that the keys are unknown.
A similar question has been asked before and received many upvotes.
gunicorn log-config access_log_format
What are we doing wrong?
#start.sh
# Start Gunicorn
exec gunicorn -k uvicorn.workers.UvicornWorker -c "$GUNICORN_CONF" "$APP_MODULE" --log-config "/logging.conf"
[loggers]
keys=root, gunicorn.error, gunicorn.access,uvicorn.error,uvicorn.access
[handlers]
keys=console, error_file, access_file, access_filegunicorn
[formatters]
keys=generic, access, accessgunicorn
[logger_root]
level=INFO
handlers=console
propagate=1
[logger_gunicorn.error]
level=INFO
handlers=error_file
propagate=0
qualname=gunicorn.error
[logger_gunicorn.access]
level=INFO
handlers=access_filegunicorn
propagate=0
qualname=gunicorn.access
[logger_uvicorn.error]
level=INFO
handlers=error_file
propagate=0
qualname=uvicorn.error
[logger_uvicorn.access]
level=INFO
handlers=access_file
propagate=0
qualname=uvicorn.access
[handler_console]
class=StreamHandler
formatter=generic
args=(sys.stdout, )
[handler_error_file]
class=StreamHandler
formatter=generic
args=(sys.stdout, )
[handler_access_file]
class=StreamHandler
formatter=access
args=(sys.stdout, )
[handler_access_filegunicorn]
class=StreamHandler
formatter=accessgunicorn
args=(sys.stdout, )
[formatter_generic]
format=[%(levelname)s]: %(message)s
datefmt=%Y-%m-%dT%H:%M:%S
class=logging.Formatter
[formatter_access]
format=[%(levelname)s]: %(message)s
datefmt=%Y-%m-%dT%H:%M:%S
class=logging.Formatter
[formatter_accessgunicorn]
format=[%(levelname)s]: '{"remote_ip":"%(h)s","session_id":"%({X-Session-Id}i)s","status":"%(s)s","request_method":"%(m)s","request_path":"%(U)s","request_querystring":"%(q)s","request_timetaken":"%(D)s","response_length":"%(B)s", "remote_addr": "%(h)s"}'
datefmt=%Y-%m-%dT%H:%M:%S
class=logging.Formatter
Message: '%s - "%s %s HTTP/%s" %d'
Arguments: ('213.3.14.24:53374', 'GET', '/v1/docs', '1.1', 200)
--- Logging error ---
Traceback (most recent call last):
File "/usr/local/lib/python3.7/logging/__init__.py", line 1025, in emit
msg = self.format(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 869, in format
return fmt.format(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 611, in format
s = self.formatMessage(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 580, in formatMessage
return self._style.format(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 422, in format
return self._fmt % record.__dict__
KeyError: 'h'
Call stack:
File "/usr/local/bin/gunicorn", line 8, in <module>
sys.exit(run())
File "/usr/local/lib/python3.7/site-packages/gunicorn/app/wsgiapp.py", line 58, in run
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run()
File "/usr/local/lib/python3.7/site-packages/gunicorn/app/base.py", line 228, in run
super().run()
File "/usr/local/lib/python3.7/site-packages/gunicorn/app/base.py", line 72, in run
Arbiter(self).run()
File "/usr/local/lib/python3.7/site-packages/gunicorn/arbiter.py", line 202, in run
self.manage_workers()
File "/usr/local/lib/python3.7/site-packages/gunicorn/arbiter.py", line 545, in manage_workers
self.spawn_workers()
File "/usr/local/lib/python3.7/site-packages/gunicorn/arbiter.py", line 616, in spawn_workers
self.spawn_worker()
File "/usr/local/lib/python3.7/site-packages/gunicorn/arbiter.py", line 583, in spawn_worker
worker.init_process()
File "/usr/local/lib/python3.7/site-packages/uvicorn/workers.py", line 61, in init_process
super(UvicornWorker, self).init_process()
File "/usr/local/lib/python3.7/site-packages/gunicorn/workers/base.py", line 140, in init_process
self.run()
File "/usr/local/lib/python3.7/site-packages/uvicorn/workers.py", line 70, in run
loop.run_until_complete(server.serve(sockets=self.sockets))
File "/usr/local/lib/python3.7/site-packages/uvicorn/protocols/http/httptools_impl.py", line 385, in run_asgi
result = await app(self.scope, self.receive, self.send)
File "/usr/local/lib/python3.7/site-packages/uvicorn/middleware/proxy_headers.py", line 45, in __call__
return await self.app(scope, receive, send)
File "/usr/local/lib/python3.7/site-packages/fastapi/applications.py", line 171, in __call__
await super().__call__(scope, receive, send)
File "/usr/local/lib/python3.7/site-packages/starlette/applications.py", line 102, in __call__
await self.middleware_stack(scope, receive, send)
File "/usr/local/lib/python3.7/site-packages/starlette/middleware/errors.py", line 159, in __call__
await self.app(scope, receive, _send)
File "/usr/local/lib/python3.7/site-packages/starlette/middleware/cors.py", line 78, in __call__
await self.app(scope, receive, send)
File "/usr/local/lib/python3.7/site-packages/starlette/exceptions.py", line 71, in __call__
await self.app(scope, receive, sender)
File "/usr/local/lib/python3.7/site-packages/starlette/routing.py", line 550, in __call__
await route.handle(scope, receive, send)

I found very useful information here https://github.com/tiangolo/fastapi/issues/1508
I needed to add the request datetime , and the solution that I implemented was:
#app.on_event("startup")
async def startup_event():
logger = logging.getLogger("uvicorn.access")
console_formatter = uvicorn.logging.ColourizedFormatter(
"{asctime} {levelprefix} : {message}",
style="{", use_colors=True)
logger.handlers[0].setFormatter(console_formatter)
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)

Our solution was a customer logger written in python that is referenced in a logging.conf file
logging.conf
[loggers]
keys=root, gunicorn.error, gunicorn.access,uvicorn.error,uvicorn.access
[handlers]
keys=console, error_file, access_file, accesscustom
[formatters]
keys=generic, access, AccessFormatter
[logger_root]
level=INFO
handlers=console
propagate=1
[logger_gunicorn.error]
level=INFO
handlers=error_file
propagate=0
qualname=gunicorn.error
[logger_gunicorn.access]
level=INFO
handlers=accesscustom
propagate=0
qualname=gunicorn.access
[logger_uvicorn.error]
level=INFO
handlers=error_file
propagate=0
qualname=uvicorn.error
[logger_uvicorn.access]
level=INFO
handlers=accesscustom
propagate=0
qualname=uvicorn.access
[handler_console]
class=StreamHandler
formatter=generic
args=(sys.stdout, )
[handler_error_file]
class=StreamHandler
formatter=generic
args=(sys.stdout, )
[handler_access_file]
class=StreamHandler
formatter=access
args=(sys.stdout, )
[handler_accesscustom]
class=StreamHandler
formatter=AccessFormatter
args=(sys.stdout, )
[formatter_generic]
format=%(levelname)s: %(message)s
datefmt=%Y-%m-%dT%H:%M:%S
class=uvicorn.logging.DefaultFormatter
[formatter_access]
format=%(levelname)s: %(message)s
datefmt=%Y-%m-%dT%H:%M:%S
class=customlogger.CustomFormatter
[formatter_AccessFormatter]
format={"event":"access_log","ip":"%(h)s","status":"%(s)s","method":"%(m)s","path":"%(U)s","referer":"%(f)s","x_session_id":"%(x-session-id)s","x_google_id":"%(x-google-id)s","x_server_time":"%(x-server-time)s","agent":"%(a)s"}
datefmt=%Y-%m-%dT%H:%M:%S
class=customlogger.CustomFormatter
customlogger.py
import base64
import binascii
import http
import logging
import os
import sys
import time
from copy import copy
from datetime import datetime
from pprint import pprint
import click
TRACE_LOG_LEVEL = 5
class ColourizedFormatter(logging.Formatter):
"""
A custom log formatter class that:
* Outputs the LOG_LEVEL with an appropriate color.
* If a log call includes an `extras={"color_message": ...}` it will be used
for formatting the output, instead of the plain text message.
"""
level_name_colors = {
TRACE_LOG_LEVEL: lambda level_name: click.style(str(level_name), fg="blue"),
logging.DEBUG: lambda level_name: click.style(str(level_name), fg="cyan"),
logging.INFO: lambda level_name: click.style(str(level_name), fg="green"),
logging.WARNING: lambda level_name: click.style(str(level_name), fg="yellow"),
logging.ERROR: lambda level_name: click.style(str(level_name), fg="red"),
logging.CRITICAL: lambda level_name: click.style(
str(level_name), fg="bright_red"
),
}
def __init__(self, fmt=None, datefmt=None, style="%", use_colors=None):
if use_colors in (True, False):
self.use_colors = use_colors
else:
self.use_colors = sys.stdout.isatty()
super().__init__(fmt=fmt, datefmt=datefmt, style=style)
def color_level_name(self, level_name, level_no):
default = lambda level_name: str(level_name)
func = self.level_name_colors.get(level_no, default)
return func(level_name)
def should_use_colors(self):
return True
def formatMessage(self, record):
recordcopy = copy(record)
levelname = recordcopy.levelname
seperator = " " * (8 - len(recordcopy.levelname))
if self.use_colors:
levelname = self.color_level_name(levelname, recordcopy.levelno)
if "color_message" in recordcopy.__dict__:
recordcopy.msg = recordcopy.__dict__["color_message"]
recordcopy.__dict__["message"] = recordcopy.getMessage()
recordcopy.__dict__["levelprefix"] = levelname + ":" + seperator
return super().formatMessage(recordcopy)
class DefaultFormatter(ColourizedFormatter):
def should_use_colors(self):
return sys.stderr.isatty()
class AccessFormatter(ColourizedFormatter):
status_code_colours = {
1: lambda code: click.style(str(code), fg="bright_white"),
2: lambda code: click.style(str(code), fg="green"),
3: lambda code: click.style(str(code), fg="yellow"),
4: lambda code: click.style(str(code), fg="red"),
5: lambda code: click.style(str(code), fg="bright_red"),
}
def get_client_addr(self, scope):
client = scope.get("client")
if not client:
return ""
return "%s:%d" % (client[0], client[1])
def get_path(self, scope):
return scope.get("root_path", "") + scope["path"]
def get_full_path(self, scope):
path = scope.get("root_path", "") + scope["path"]
query_string = scope.get("query_string", b"").decode("ascii")
if query_string:
return path + "?" + query_string
return path
def get_status_code(self, record):
status_code = record.__dict__["status_code"]
try:
status_phrase = http.HTTPStatus(status_code).phrase
except ValueError:
status_phrase = ""
status_and_phrase = "%s %s" % (status_code, status_phrase)
if self.use_colors:
default = lambda code: status_and_phrase
func = self.status_code_colours.get(status_code // 100, default)
return func(status_and_phrase)
return status_and_phrase
def formatMessage(self, record):
recordcopy = copy(record)
scope = recordcopy.__dict__["scope"]
method = scope["method"]
path = self.get_path(scope)
full_path = self.get_full_path(scope)
client_addr = self.get_client_addr(scope)
status_code = self.get_status_code(recordcopy)
http_version = scope["http_version"]
request_line = "%s %s HTTP/%s" % (method, full_path, http_version)
if self.use_colors:
request_line = click.style(request_line, bold=True)
recordcopy.__dict__.update(
{
"method": method,
"path": path,
"full_path": full_path,
"client_addr": client_addr,
"request_line": request_line,
"status_code": status_code,
"http_version": http_version,
}
)
return super().formatMessage(recordcopy)
class SafeAtoms(dict):
def __init__(self, atoms):
dict.__init__(self)
for key, value in atoms.items():
if isinstance(value, str):
self[key] = value.replace('"', '\\"')
else:
self[key] = value
def __getitem__(self, k):
if k.startswith("{"):
kl = k.lower()
if kl in self:
return super().__getitem__(kl)
else:
return "-"
if k in self:
return super().__getitem__(k)
else:
return '-'
class CustomFormatter(AccessFormatter):
atoms_wrapper_class = SafeAtoms
def now(self):
""" return date in Apache Common Log Format """
return time.strftime('[%d/%b/%Y:%H:%M:%S %z]')
def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth and http_auth.lower().startswith('basic'):
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode('utf-8'))
# b64decode returns a byte string
auth = auth.decode('utf-8')
auth = auth.split(":", 1)
except (TypeError, binascii.Error, UnicodeDecodeError) as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user
def atoms(self, environ, request_time, scope, statuscode, created):
headers = dict(scope.get('headers',[('-','-')]))
response_headers = dict(scope.get('response_headers',[('-','-')]))
atoms = {
'h': scope.get("client", ('-', ''))[0],
'l': '-',
's': statuscode,
'u': self._get_user(environ) or '-',
't': created,
'm': str(scope.get("method", "-")),
'U': scope.get("path", "-"),
'q': scope.get("query_string", "-").decode("utf-8"),
'H': str(scope.get("type", "-")),
'f': headers.get(b"referer", b"-").decode("utf-8"),
'a': headers.get(b"user-agent", b"-").decode("utf-8"),
'x-session-id': headers.get(b"x-session-id", b"-").decode("utf-8"),
'x-google-id': headers.get(b"x-google-id", b"-").decode("utf-8"),
'x-server-time': response_headers.get(b"x-server-time", b"").decode("utf-8"),
'p': "<%s>" % os.getpid()
}
return atoms
def formatMessage(self, record):
recordcopy = copy(record)
scope = recordcopy.__dict__["scope"]
#pprint(vars(recordcopy))
safe_atoms = self.atoms_wrapper_class(
self.atoms(os.environ, datetime.now(), scope, recordcopy.status_code, recordcopy.created)
)
recordcopy.__dict__.update(safe_atoms)
# pprint(vars(os.environ))
return super().formatMessage(recordcopy)

I'm also using the FastAPI - Uvicorn - Gunicorn stack.
To modify the format of the uvicorn logging, I first inspected its current configuration :
>>> from pprint import pprint
>>> import uvicorn.config
>>> pprint(uvicorn.config.LOGGING_CONFIG)
{'disable_existing_loggers': False,
'formatters': {'access': {'()': 'uvicorn.logging.AccessFormatter',
'fmt': '%(levelprefix)s %(client_addr)s - '
'"%(request_line)s" %(status_code)s'},
'default': {'()': 'uvicorn.logging.DefaultFormatter',
'fmt': '%(levelprefix)s %(message)s',
'use_colors': None}},
'handlers': {'access': {'class': 'logging.StreamHandler',
'formatter': 'access',
'stream': 'ext://sys.stdout'},
'default': {'class': 'logging.StreamHandler',
'formatter': 'default',
'stream': 'ext://sys.stderr'}},
'loggers': {'uvicorn': {'handlers': ['default'], 'level': 'INFO'},
'uvicorn.access': {'handlers': ['access'],
'level': 'INFO',
'propagate': False},
'uvicorn.error': {'level': 'INFO'}},
'version': 1}
And I created my own logging configuration based on the default configuration of uvicorn. I added the date/time of the log, and my own custom logger :
import logging
LOGGER_NAME = "myapp"
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
'access': {
'()': 'uvicorn.logging.AccessFormatter',
'fmt': '%(levelprefix)s %(asctime)s - %(client_addr)s - "%(request_line)s" %(status_code)s',
"datefmt": "%Y-%m-%d %H:%M:%S",
"use_colors": True
},
"default": {
"()": "uvicorn.logging.DefaultFormatter",
"fmt": "%(levelprefix)s %(asctime)s - %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
"use_colors": True
},
},
"handlers": {
'access': {
'class': 'logging.StreamHandler',
'formatter': 'access',
'stream': 'ext://sys.stdout'
},
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stderr",
},
},
"loggers": {
LOGGER_NAME: {
"handlers": ["default"],
"level": "DEBUG",
"propagate": False
},
"uvicorn": {
"handlers": ["default"],
"level": "DEBUG",
"propagate": True
},
'uvicorn.access': {
'handlers': ['access'],
'level': 'INFO',
'propagate': False
},
'uvicorn.error': {
'level': 'INFO',
'propagate': False
}
},
}
def get_logger():
return logging.getLogger(LOGGER_NAME)
Then in my main.py file, where I defined app = FastAPI(...), I configure the logging just after my imports lines with :
logging.config.dictConfig(log_config)
And I do custom logging in my app by using the custom logger that I defined :
logger = get_logger()
logger.info("Hello World!")

Related

How to use marshmallow-sqlalchemy with async code?

I'm trying to use marshmallow-sqlalchemy with aiohttp and I have followed their docs with the basic example and I'm getting an error.
I have this schema:
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from db.customer import Customer
class CustomerSchema(SQLAlchemyAutoSchema):
class Meta:
model = Customer
include_relationships = True
load_instance = True
And then the following code for the query:
from sqlalchemy import select
from db import db_conn
from db.customer import Customer
from queries.schema import CustomerSchema
customer_schema = CustomerSchema()
async def get_all_users():
async with db_conn.get_async_sa_session() as session:
statement = select(Customer)
results = await session.execute(statement)
_ = (results.scalars().all())
print(_)
response = customer_schema.dump(_, many=True)
print(response)
For the first print statement I'm getting
[<db.customer.Customer object at 0x10a183340>, <db.customer.Customer object at 0x10a183940>, <db.customer.Customer object at 0x10b0cd9d0>]
But then it fails with
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 60, in await_only
raise exc.MissingGreenlet(
sqlalchemy.exc.MissingGreenlet: greenlet_spawn has not been called; can't call await_() here. Was IO attempted in an unexpected place? (Background on this error at: http://sqlalche.me/e/14/xd2s)
So how can I use marshmallow-sqlalchemy to serialize the SqlAlchemy reponse?
Another options (packages, etc) or a generic custom solutions are OK too.
For the time being I'm using this:
statement = select(Customer)
results = await session.execute(statement)
_ = (results.scalars().all())
response = {}
for result in _:
value = {k: (v if not isinstance(v, sqlalchemy.orm.state.InstanceState) else '_') for k, v in result.__dict__.items()}
response[f'customer {value["id"]}'] = value
return response
Full traceback:
Traceback (most recent call last):
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/aiohttp/web_protocol.py", line 422, in _handle_request
resp = await self._request_handler(request)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/aiohttp/web_app.py", line 499, in _handle
resp = await handler(request)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/aiohttp/web_urldispatcher.py", line 948, in _iter
resp = await method()
File "/Users/ruslan/OneDrive/Home/Dev/projects/code/education/other/cft/views/user.py", line 24, in get
await get_all_users()
File "/Users/ruslan/OneDrive/Home/Dev/projects/code/education/other/cft/queries/user.py", line 18, in get_all_users
response = customer_schema.dump(_, many=True)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/schema.py", line 547, in dump
result = self._serialize(processed_obj, many=many)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/schema.py", line 509, in _serialize
return [
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/schema.py", line 510, in <listcomp>
self._serialize(d, many=False)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/schema.py", line 515, in _serialize
value = field_obj.serialize(attr_name, obj, accessor=self.get_attribute)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/fields.py", line 310, in serialize
value = self.get_value(obj, attr, accessor=accessor)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow_sqlalchemy/fields.py", line 27, in get_value
return super(fields.List, self).get_value(obj, attr, accessor=accessor)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/fields.py", line 239, in get_value
return accessor_func(obj, check_key, default)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/schema.py", line 472, in get_attribute
return get_value(obj, attr, default)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/utils.py", line 239, in get_value
return _get_value_for_key(obj, key, default)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/marshmallow/utils.py", line 253, in _get_value_for_key
return getattr(obj, key, default)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/orm/attributes.py", line 480, in __get__
return self.impl.get(state, dict_)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/orm/attributes.py", line 931, in get
value = self.callable_(state, passive)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/orm/strategies.py", line 879, in _load_for_state
return self._emit_lazyload(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/orm/strategies.py", line 1036, in _emit_lazyload
result = session.execute(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/orm/session.py", line 1689, in execute
result = conn._execute_20(statement, params or {}, execution_options)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/engine/base.py", line 1582, in _execute_20
return meth(self, args_10style, kwargs_10style, execution_options)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/sql/lambdas.py", line 481, in _execute_on_connection
return connection._execute_clauseelement(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/engine/base.py", line 1451, in _execute_clauseelement
ret = self._execute_context(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/engine/base.py", line 1813, in _execute_context
self._handle_dbapi_exception(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/engine/base.py", line 1998, in _handle_dbapi_exception
util.raise_(exc_info[1], with_traceback=exc_info[2])
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/util/compat.py", line 207, in raise_
raise exception
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/engine/base.py", line 1770, in _execute_context
self.dialect.do_execute(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/engine/default.py", line 717, in do_execute
cursor.execute(statement, parameters)
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/dialects/postgresql/asyncpg.py", line 449, in execute
self._adapt_connection.await_(
File "/Users/ruslan/.local/share/virtualenvs/cft-RKlbQ9iX/lib/python3.9/site-packages/sqlalchemy/util/_concurrency_py3k.py", line 60, in await_only
raise exc.MissingGreenlet(
sqlalchemy.exc.MissingGreenlet: greenlet_spawn has not been called; can't call await_() here. Was IO attempted in an unexpected place? (Background on this error at: http://sqlalche.me/e/14/xd2s)
The problem in this case is that the Marshmallow schema is configured to load related models (include_relationships=True). Since the initial query doesn't load them automatically, the schema triggers a query to fetch them, and this causes the error.
The simplest solution, demonstrated in the docs, is to eagerly load the related objects with their "parent":
async def get_all_users():
async with db_conn.get_async_sa_session() as session:
# Let's assume a Customer has a 1 to many relationship with an Order model
statement = select(Customer).options(orm.selectinload(Customer.orders))
results = await session.execute(statement)
_ = (results.scalars().all())
print(_)
response = customer_schema.dump(_, many=True)
print(response)
There is more discussion in the Preventing Implicit IO when Using AsyncSession section of the docs.

Why the keras do not take my data from CSV

I am following the tutorial about import CVS data into tensorflow.
I followed every step.
How ever it does not works out.
It showed that one of my feature is not in the dictionary.
If so, how can i put it into dictionary
The link to my colab
The code is write below and the error information is written in the end
import functools
import numpy as np
import tensorflow as tf
import tensorflow_datasets as tfds
train_file_path = "/content/Productivity training.csv"
test_file_path = "/content/Productivity Testing.csv"
LABEL_COLUMN = 'Productivity'
def get_dataset(file_path):
dataset = tf.data.experimental.make_csv_dataset(
file_path,
batch_size=12,
label_name=LABEL_COLUMN,
na_value="?",
num_epochs=1,
ignore_errors=True)
return dataset
raw_train_data = get_dataset(train_file_path)
raw_test_data = get_dataset(test_file_path)
examples, labels = next(iter(raw_train_data)) # 第一个批次
print("EXAMPLES: \n", examples, "\n")
print("LABELS: \n", labels)
CATEGORIES = {
'over working hours': ['0', '2'],
'experience' : ['0.5', '0.75', '1'],
'absent' : ['0.5', '0.25', '0']
}
categorical_columns = []
for feature, vocab in CATEGORIES.items():
cat_col = tf.feature_column.categorical_column_with_vocabulary_list(
key=feature, vocabulary_list=vocab)
categorical_columns.append(tf.feature_column.indicator_column(cat_col))
def process_continuous_data(mean, data):
# 标准化数据
data = tf.cast(data, tf.float32) * 1/(2*mean)
return tf.reshape(data, [-1, 1])
MEANS = {
'Weekday' : 4,
'highest' : 9.2540,
'lowest' : 3.47,
'Weather' : 2.63,
'Wind speed': 2.31
}
numerical_columns = []
for feature in MEANS.keys():
num_col = tf.feature_column.numeric_column(feature, normalizer_fn=functools.partial(process_continuous_data, MEANS[feature]))
numerical_columns.append(num_col)
preprocessing_layer = tf.keras.layers.DenseFeatures(categorical_columns+numerical_columns)
model = tf.keras.Sequential([
preprocessing_layer,
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid'),
])
model.compile(
loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
train_data = raw_train_data
test_data = raw_test_data
model.fit(train_data, epochs=20)
Here is the error info:
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
975 except Exception as e: # pylint:disable=broad-except
976 if hasattr(e, "ag_error_metadata"):
--> 977 raise e.ag_error_metadata.to_exception(e)
978 else:
979 raise
ValueError: in user code:
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:805 train_function *
return step_function(self, iterator)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:795 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:1259 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:2730 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:3417 _call_for_each_replica
return fn(*args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:788 run_step **
outputs = model.train_step(data)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:754 train_step
y_pred = self(x, training=True)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1012 __call__
outputs = call_fn(inputs, *args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/sequential.py:389 call
outputs = layer(inputs, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/base_layer.py:1012 __call__
outputs = call_fn(inputs, *args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/feature_column/dense_features.py:169 call **
self._state_manager)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/feature_column/feature_column_v2.py:2592 get_dense_tensor
return transformation_cache.get(self, state_manager)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/feature_column/feature_column_v2.py:2355 get
transformed = column.transform_feature(self, state_manager)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/feature_column/feature_column_v2.py:2564 transform_feature
input_tensor = transformation_cache.get(self.key, state_manager)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/feature_column/feature_column_v2.py:2339 get
raise ValueError('Feature {} is not in features dictionary.'.format(key))
ValueError: Feature Weather is not in features dictionary.

Trying to parse access.log

Good afternoon, I'm trying to find the top 10 ip in access.log (standard log of the Apache server).
There is a code like this:
import argparse
import json
import re
from collections import defaultdict, Counter
parser = argparse.ArgumentParser(description='parser script')
parser.add_argument('-f', dest='logfile', action='store', default='access.log')
args = parser.parse_args()
regul_ip = (r"^(?P<ips>.*?)")
regul_method = (r"\"(?P<request_method>GET|POST|PUT|DELETE|HEAD)")
def req_by_method():
dict_ip = defaultdict(lambda: {"GET": 0, "POST": 0, "PUT": 0, "DELETE": 0, "HEAD": 0})
with open(args.logfile) as file:
for index, line in enumerate(file.readlines()):
try:
ip = re.search(regul_ip, line).group()
method = re.search(regul_method, line).groups()[0]
return Counter(dict_ip).most_common(10)
except AttributeError:
pass
dict_ip[ip][method] += 1
print(json.dumps(dict_ip, indent=4))
with open("final_log.json", "w") as jsonfile:
json.dump(dict_ip, jsonfile, indent=5)
When the code is executed, I only get: []
How can I fix this code to make it work?
I also need to output to the final json file a set of such lines: "ip", "method", "status code", "url" and the duration of the request

JSONDecodeError : Sending file and json string to Django view in POST request

I am sending a file and json object in string format as in the POST request in multipart/form-data below. I am unable to identify what I am doing wrong here. Request, Django view code and the stacktrace -
Request:
curl -X POST \
http://127.0.0.1:8000/api/capshots/send-capshot-new-capset/ \
-H 'authorization: Token 534cec0c2d7f81808600fee15751b4de2b7c1bd1' \
-H 'cache-control: no-cache' \
-H 'content-type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW' \
-F media_file=#/Users/ankit/Desktop/logo.png \
-F thumbnail_image=#/Users/ankit/Desktop/logo.png \
-F 'payload='\''{"allow_capset_sharing": true, "description": "Bla blah", "media_type": 1, "content_type_list": ["group", "directmessagegroup", "temporarygroup"], "object_id_list": [3, 2, 4], "userprofiles_pk": [2, 4, 6], "direct": false}'\'''
Code
#api_view(['POST'])
#permission_classes((IsAuthenticated,))
#authentication_classes((TokenAuthentication,))
def send_capshot_new_capset(request):
# content_type could be `group`, `temporarygroup`, `directmessagegroup`
"""
{
"content_type_list" : ['group', 'directmessagegroup', 'temporarygroup'],
"object_id_list" : ['3', '2', '4'],
"userprofiles_pk" : [2, 4, 6],
"direct" : false
}
"""
created_by = request.user.userprofile
payload_json_str = request.data['payload']
print(payload_json_str)
payload_json = json.loads(payload_json_str)
content_type_list = payload_json['content_type_list']
object_id_list = payload_json['object_id_list']
allow_sharing = payload_json['allow_capset_sharing']
description=payload_json['description']
media_type=payload_json['media_type']
# Upload capshot's media file and thumbnail
from boto.s3.connection import S3Connection
from boto.s3.key import Key
media_file = request.FILES['media_file']
thumbnail_image = request.FILES['thumbnail_image']
conn = S3Connection(settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY)
bucket = conn.get_bucket('capshot-data')
k = Key(bucket)
k.key = "capshots/media/" + datetime.datetime.now().strftime("%d-%m-%y-%H-%M-%S.%f")
k.set_contents_from_file(media_file)
media_url = "https://some-url/" + k.key
k.key = "capshots/thumbnail/" + datetime.datetime.now().strftime("%d-%m-%y-%H-%M-%S.%f")
k.set_contents_from_file(thumbnail_image)
thumbnail_image_url = "https://some-url/" + k.key
# Create Capsets for existing conversations
for content_type, object_id in zip(content_type_list, object_id_list):
content_type = ContentType.objects.get(app_label='groups', model=content_type)
object_id = object_id
capset = Capset.objects.create(created_by=created_by,
allow_sharing=allow_sharing,
content_type=content_type,
object_id=object_id)
capshot = Capshot.objects.create(created_by=created_by,
capset=capset,
order=1,
description=description,
media_type=media_type,
media_url=media_url,
thumbnail_image_url=thumbnail_image_url
)
# Create CapsetReadActivity
CapsetReadActivity.objects.create(userprofile=request.user.userprofile, capset=capset)
try:
userprofiles_pk = payload_json["userprofiles_pk"]
if len(userprofiles_pk) > 0:
# DirectMessageGroup
if payload_json["direct"] is True:
for userprofile_pk in userprofiles_pk:
sender_directmessagegroups_pk = set(UserProfileDirectMessageGroupMembership.objects.filter(userprofile=created_by).values_list('directmessagegroup', flat=True))
receiver_directmessagegroups_pk = set(UserProfileDirectMessageGroupMembership.objects.filter(userprofile__pk=userprofile_pk).values_list('directmessagegroup', flat=True))
common_directmessagegroup_pk_list = list(sender_directmessagegroups_pk & receiver_directmessagegroups_pk)
# if direct message group exists:
if len(common_directmessagegroup_pk_list) == 1:
# get dmg
directmessagegroup = DirectMessageGroup.objects.get(pk=common_directmessagegroup_pk_list[0])
else:
# create dmg
directmessagegroup = DirectMessageGroup.objects.create(created_by=created_by)
UserProfileDirectMessageGroupMembership.objects.create(userprofile=created_by, directmessagegroup=directmessagegroup)
UserProfileDirectMessageGroupMembership.objects.create(userprofile=UserProfile.objects.get(pk=userprofile_pk), directmessagegroup=directmessagegroup)
# create capset, capshot
content_type = ContentType.objects.get(app_label='groups', model='directmessagegroup')
object_id = directmessagegroup.pk
capset = Capset.objects.create(created_by=created_by,
allow_sharing=allow_sharing,
content_type=content_type,
object_id=object_id)
capshot = Capshot.objects.create(created_by=created_by,
capset=capset,
order=1,
description=description,
media_type=media_type,
media_url=media_url,
thumbnail_image_url=thumbnail_image_url
)
# Create CapsetReadActivity
CapsetReadActivity.objects.create(userprofile=created_by, capset=capset)
# TemporaryGroup
else:
common_temporarygroup_pk_list = None
for userprofile_pk in userprofiles_pk:
if common_temporarygroup_pk_list is None:
common_temporarygroup_pk_list = set(UserProfileTemporaryGroupMembership.objects.filter(userprofile__pk=userprofile_pk).values_list('temporarygroup', flat=True))
else:
common_temporarygroup_pk_list = common_temporarygroup_pk_list & set(UserProfileTemporaryGroupMembership.objects.filter(userprofile__pk=userprofile_pk).values_list('temporarygroup', flat=True))
common_temporarygroup_pk_list = list(common_temporarygroup_pk_list)
# if common TG exists
if len(common_temporarygroup_pk_list) == 1:
# get TG
temporarygroup = TemporaryGroup.objects.get(pk=common_temporarygroup_pk_list[0])
else:
temporarygroup = TemporaryGroup.objects.create(created_by=created_by)
UserProfileTemporaryGroupMembership.objects.create(userprofile=created_by, temporarygroup=temporarygroup)
for userprofile_pk in userprofiles_pk:
UserProfileTemporaryGroupMembership.objects.create(userprofile=UserProfile.objects.get(pk=userprofile_pk), temporarygroup=temporarygroup)
# create capset, capshot
content_type = ContentType.objects.get(app_label='groups', model='temporarygroup')
object_id = temporarygroup.pk
capset = Capset.objects.create(created_by=created_by,
allow_sharing=allow_sharing,
content_type=content_type,
object_id=object_id)
capshot = Capshot.objects.create(created_by=created_by,
capset=capset,
order=1,
description=description,
media_type=media_type,
media_url=media_url,
thumbnail_image_url=thumbnail_image_url
)
# Create CapsetReadActivity
CapsetReadActivity.objects.create(userprofile=created_by, capset=capset)
except: # key userprofiles_pk does not exist
pass
return Response({}, status=status.HTTP_201_CREATED)
Stacktrace :
Starting development server at http://127.0.0.1:8000/
Quit the server with CONTROL-C.
'{"allow_capset_sharing": true, "description": "Bla blah", "media_type": 1, "content_type_list": ["group", "directmessagegroup", "temporarygroup"], "object_id_list": [3, 2, 4], "userprofiles_pk": [2, 4, 6], "direct": false}'
Internal Server Error: /api/capshots/send-capshot-new-capset/
Traceback (most recent call last):
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/django/core/handlers/exception.py", line 41, in inner
response = get_response(request)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 187, in _get_response
response = self.process_exception_by_middleware(e, request)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 185, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/django/views/decorators/csrf.py", line 58, in wrapped_view
return view_func(*args, **kwargs)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/django/views/generic/base.py", line 68, in view
return self.dispatch(request, *args, **kwargs)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/rest_framework/views.py", line 483, in dispatch
response = self.handle_exception(exc)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/rest_framework/views.py", line 443, in handle_exception
self.raise_uncaught_exception(exc)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/rest_framework/views.py", line 480, in dispatch
response = handler(request, *args, **kwargs)
File "/Users/ankit/projects/indiez/capshot/capshot-be/venv/lib/python3.6/site-packages/rest_framework/decorators.py", line 52, in handler
return func(*args, **kwargs)
File "/Users/ankit/projects/indiez/capshot/capshot-be/capshotbackend/capshots/views.py", line 46, in send_capshot_new_capset
payload_json = json.loads(payload_json_str)
File "/usr/local/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/json/__init__.py", line 354, in loads
return _default_decoder.decode(s)
File "/usr/local/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/json/decoder.py", line 339, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/usr/local/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/json/decoder.py", line 357, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)

json encoding issue in Python

I am attempting a custom encode, but get an error. The following code sample generates an error:
#!/usr/bin/python3
import json
class Contact:
def __init__(self, first, last):
self.first = first
self.last = last
#property
def full_name(self):
return ("{} {}".format(self.first, self.last))
class ContactEncoder(json.JSONEncoder):
def defualt(self, obj):
if isinstance(obj, Contact):
return {"is_contact": 'T'
,"first": obj.first
,"last": obj.last
,"full_name": obj.full_name}
return super().defualt(obj)
if __name__ == "__main__":
c = Contact("Jay", "Loophole")
print(json.dumps(c.__dict__))
print(json.dumps(c, cls=ContactEncoder))
The error generated is:
{"first": "Jay", "last": "Loophole"}
Traceback (most recent call last):
File "json_dump.py", line 26, in <module>
print(json.dumps(c, cls=ContactEncoder))
File "/usr/lib/python3.5/json/__init__.py", line 237, in dumps
**kw).encode(obj)
File "/usr/lib/python3.5/json/encoder.py", line 198, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/usr/lib/python3.5/json/encoder.py", line 256, in iterencode
return _iterencode(o, 0)
File "/usr/lib/python3.5/json/encoder.py", line 179, in default
raise TypeError(repr(o) + " is not JSON serializable")
TypeError: <__main__.Contact object at 0x7ffb3445a400> is not JSON serializable
The default dictionary is successfully displayed, but when a custom encode is passed as a cls parameter, an error occurs.
Any suggestions for the reason for the error?
Here is your updated code after the defUAlt --> defAUlt correction:
import json
class Contact:
def __init__(self, first, last):
self.first = first
self.last = last
#property
def full_name(self):
return ("{} {}".format(self.first, self.last))
class ContactEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Contact):
return {"is_contact": 'T'
,"first": obj.first
,"last": obj.last
,"full_name": obj.full_name}
return super().default(obj)
if __name__ == "__main__":
c = Contact("Jay", "Loophole")
print(json.dumps(c.__dict__))
print(json.dumps(c, cls=ContactEncoder))
You can check it out live on this page.