Error when doing delete and commit with sqlalchemy - sqlalchemy

I get an error I don't understand when I do session commit after a deletion like this: (in a shell with flask app context or anywhere while running the app)
>>> from app.extensions import db
>>> from app.models.user import User
>>> user = User.query.all()[0]
>>> db.session.delete(user)
>>> db.session.commit()
File
"/Users/hugo/Dropbox/lahey/api/.venv/lib/python3.6/site-packages/sqlalchemy/util/langhelpers.py",
line 962, in module
% (self._il_path, self._il_addtl)) ImportError: importlater.resolve_all() hasn't been called (this is sqlalchemy.orm
strategy_options)
My model for the object I try to delete looks like this:
import datetime
from sqlalchemy_utils.types.password import PasswordType
from sqlalchemy_utils import force_auto_coercion
from app.extensions import db
# Setup coercion of passwords
force_auto_coercion()
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(120), unique=True, nullable=False)
password = db.Column(PasswordType(schemes=['pbkdf2_sha512']), nullable=False)
name = db.Column(db.String(256))
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.now)
Deleting objects of other models works fine. Could this somehow be because I'm using the PasswordType column from sqlalchemy_utils?

If you are using Flask, the docsting says you are wrong configuring the Column:
Lazy configuration of the type with Flask config:
import flask
from sqlalchemy_utils import PasswordType, force_auto_coercion
force_auto_coercion()
class User(db.Model):
__tablename__ = 'user'
password = db.Column(
PasswordType(
# The returned dictionary is forwarded to the CryptContext
onload=lambda **kwargs: dict(
schemes=flask.current_app.config['PASSWORD_SCHEMES'],
**kwargs
),
),
unique=False,
nullable=False,
)

I've worked out what caused this. I've been using package sqlalchemy_bulk_lazy_loader
which had a bug (strategy_options was not imported in a correct way). The issue is now fixed in the package
See full sqlalchemy mail list thread for full details

Related

Alembic attempts to recreate all tables in the Base class on every migration

In my env.py I have set my target_metadata to Base.metadata which I import from models.py. I have a fresh database with a schema named basic that I want to use to create the tables and setup my models.py like this:
from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, MetaData, String
from sqlalchemy.orm import declarative_base
Base = declarative_base(metadata=MetaData(schema='basic'))
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
created_on = Column(DateTime, default=datetime.utcnow)
I run alembic revision --autogenerate -m"Create user model" and run alembic upgrade heads. Everything works as expected and I have table user in my database under the schema basic.
Now I want to add a table country. I add it to my models.py which now looks like this:
from datetime import datetime
from sqlalchemy import Column, DateTime, Integer, MetaData, String
from sqlalchemy.orm import declarative_base
Base = declarative_base(metadata=MetaData(schema='basic'))
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
created_on = Column(DateTime, default=datetime.utcnow)
class Country(Base):
__tablename__ = 'country'
id = Column(Integer, primary_key=True)
country = Column(String, nullable=False)
created_on = Column(DateTime, default=datetime.utcnow)
I run alembic revision --autogenerate -m"Create country model" which creates a new versions file that looks like this:
"""Create country model
Revision ID: 0eef32919b0d
Revises: 2da4668d1069
Create Date: 2023-01-19 15:39:08.778274
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0eef32919b0d'
down_revision = '2da4668d1069'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('country',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('country', sa.String(), nullable=False),
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
schema='basic'
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('created_on', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
schema='basic'
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user', schema='basic')
op.drop_table('country', schema='basic')
# ### end Alembic commands ###
Why does it also try to create the table user again? Running this will give an error that the object basic.user already exists. How can I fix this so that it looks at the current state of the db and only wants to create the table country?
Setting the option include_schemas=True (which is suggested in this thread: Alembic - sqlalchemy does not detect existing tables) helps but then includes all schemas and I only want it to be aware of this single schema.
I only want it to be aware of this single schema.
Then you also need to use include_name=, like so:
def run_migrations_online():
# …
def include_name(name, type_, parent_names):
if type_ == "schema":
# note this will not include the default schema
return name in ["basic"]
else:
return True
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata,
include_schemas=True,
include_name=include_name
)

SQLAlchemy Table classes and imports

I started a project using PostgreSQL and SQLAlchemy. Since i'm not a experienced programmer(just started using classes) and also quite new to databases i noticed some workflows i don't really understand.
What i understand up till now from classes is the following workflow:
# filename.py
class ClassName():
def __init__(self):
# do something
def some_funcion(self, var1, var2):
# do something with parameters
---------------------------------------
# main.py
from filename import ClassName
par1 = ...
par2 = ...
a = ClassName()
b = a.some_function(par1, par2)
Now i am creating tables from classes:
# base.py
from sqlalchemy.orm import declarative_base
Base = declarative_base()
# tables.py
from base import Base
from sqlalchemy import Column
from sqlalchemy import Integer, String
class A(Base):
__tablename__ = "a"
a_id = Column(Integer, primary_key=True)
a_column = Column(String(30))
class B(Base):
__tablename__ = "b"
b_id = Column(Integer, primary_key=True)
b_column = Column(String(30))
and
import typing
from base import Base
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy.orm import sessionmaker
from tables import A, B
metadata_obj = MetaData()
def create_tables(engine):
session = sessionmaker()
session.configure(bind=engine)
Base.metadata.create_all(bind=engine)
a = Table("a", metadata_obj, autoload_with=engine)
b = Table("b", metadata_obj, autoload_with=engine)
return(a, b) # not sure return is needed
if __name__ == "__main__":
username = "username"
password = "AtPasswordHere!"
dbname = "dbname"
url = "postgresql://" + username + ":" + password + "#localhost/" + dbname
engine = create_engine(url, echo=True, future=True)
a, b = create_tables(engine)
Everything works fine in that it creates Table A and Table B in the database. The point i don't understand is the following:
Both my IDE(pyflake) and LGTM complain 'Tables. ... imported but not used'. (EDIT i understand why it complains in the way it is not the normal Class flow. It is mor about Why it is not the normal class workflow)
Is this normal behavior for this usecase? I only see examples that make use of the above workflow
Are there better methods to create the same results (but without the warnings)
If this is the normal behavior: Is there an explanation for this? I didn't read it anywhere.

How do I write SQLAlchemy test fixtures for FastAPI applications

I am writing a FastAPI application that uses a SQLAlchemy database. I have copied the example from the FastAPI documentation, simplifying the database schema for concisions' sake. The complete source is at the bottom of this post.
This works. I can run it with uvicorn sql_app.main:app and interact with the database via the Swagger docs. When it runs it creates a test.db in the working directory.
Now I want to add a unit test. Something like this.
from fastapi import status
from fastapi.testclient import TestClient
from pytest import fixture
from main import app
#fixture
def client() -> TestClient:
return TestClient(app)
def test_fast_sql(client: TestClient):
response = client.get("/users/")
assert response.status_code == status.HTTP_200_OK
assert response.json() == []
Using the source code below, this takes the test.db in the working directory as the database. Instead I want to create a new database for every unit test that is deleted at the end of the test.
I could put the global database.engine and database.SessionLocal inside an object that is created at runtime, like so:
class UserDatabase:
def __init__(self, directory: Path):
directory.mkdir(exist_ok=True, parents=True)
sqlalchemy_database_url = f"sqlite:///{directory}/store.db"
self.engine = create_engine(
sqlalchemy_database_url, connect_args={"check_same_thread": False}
)
self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine)
models.Base.metadata.create_all(bind=self.engine)
but I don't know how to make that work with main.get_db, since the Depends(get_db) logic ultimately assumes database.engine and database.SessionLocal are available globally.
I'm used to working with Flask, whose unit testing facilities handle all this for you. I don't know how to write it myself. Can someone show me the minimal changes I'd have to make in order to generate a new database for each unit test in this framework?
The complete source of the simplified FastAPI/SQLAlchemy app is as follows.
database.py
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "sqlite:///./test.db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
models.py
from sqlalchemy import Column, Integer, String
from database import Base
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
age = Column(Integer)
schemas.py
from pydantic import BaseModel
class UserBase(BaseModel):
name: str
age: int
class UserCreate(UserBase):
pass
class User(UserBase):
id: int
class Config:
orm_mode = True
crud.py
from sqlalchemy.orm import Session
import schemas
import models
def get_user(db: Session, user_id: int):
return db.query(models.User).filter(models.User.id == user_id).first()
def get_users(db: Session, skip: int = 0, limit: int = 100):
return db.query(models.User).offset(skip).limit(limit).all()
def create_user(db: Session, user: schemas.UserCreate):
db_user = models.User(name=user.name, age=user.age)
db.add(db_user)
db.commit()
db.refresh(db_user)
return db_user
main.py
from typing import List
from fastapi import Depends, FastAPI, HTTPException
from sqlalchemy.orm import Session
import schemas
import models
import crud
from database import SessionLocal, engine
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
# Dependency
def get_db():
try:
db = SessionLocal()
yield db
finally:
db.close()
#app.post("/users/", response_model=schemas.User)
def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)):
return crud.create_user(db=db, user=user)
#app.get("/users/", response_model=List[schemas.User])
def read_users(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
users = crud.get_users(db, skip=skip, limit=limit)
return users
#app.get("/users/{user_id}", response_model=schemas.User)
def read_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.get_user(db, user_id=user_id)
if db_user is None:
raise HTTPException(status_code=404, detail="User not found")
return db_user
You need to override your get_db dependency in your tests, see these docs.
Something like this for your fixture:
#fixture
def db_fixture() -> Session:
raise NotImplementError() # Make this return your temporary session
#fixture
def client(db_fixture) -> TestClient:
def _get_db_override():
return db_fixture
app.dependency_overrides[get_db] = _get_db_override
return TestClient(app)

sqlalchemy ORM : how to declare a table class that contains multiple key?

I'm a newbie in Sqlalchemy.
I have a table with multiple key in column USERNAME.
This is what I've done in my model.
Model:
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root#localhost/admin'
db = SQLAlchemy(app)
class RADUSAGE(db.Model):
__tablename__ = 'RADUSAGE'
USERNAME = db.Column(db.String(513))
AGE = db.Column(db.Integer)
def __init__(self, USERNAME, AGE):
self.USERNAME = USERNAME
self.AGE = AGE
def __repr__(self):
return '<RADUSAGE %r>' % self.USERNAME
But I got an error:
File "/Users/admin/rad_env/lib/python2.7/site-packages/sqlalchemy/ext/declarative/base.py", line 530, in map
**self.mapper_args
File "<string>", line 2, in mapper
File "/Users/admin/rad_env/lib/python2.7/site-packages/sqlalchemy/orm/mapper.py", line 677, in __init__
self._configure_pks()
File "/Users/admin/rad_env/lib/python2.7/site-packages/sqlalchemy/orm/mapper.py", line 1277, in _configure_pks
(self, self.mapped_table.description))
sqlalchemy.exc.ArgumentError: Mapper Mapper|RADUSAGE|RADUSAGE could not assemble any primary key columns for mapped table 'RADUSAGE'
How can I declare this table class that contains multiple key in sqlalchemy? Hope someone can help me. Thanks in advance.
You should be able to specify primary_key=true in your mapping for each PK column:
USERNAME = db.Column(db.String(513), primary_key=True)
AGE = db.Column(db.Integer, primary_key=True)
Did you try that?

SQLAlchemy: how exchange unique values in one transaction?

My versions:
$ python
Python 3.4.0 (default, Apr 11 2014, 13:05:11)
[GCC 4.8.2] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import sqlalchemy
>>> sqlalchemy.__version__
'1.0.0'
Let's look to the example for sqlite (in production I use mysql, but it does not matter here):
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import Integer, String
Base=declarative_base()
engine = create_engine('sqlite:///:memory:', echo=True)
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(16))
tech_id = Column(Integer, unique=True, nullable=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
s = Session()
Now we add two records:
u1=User(name="User1", tech_id=None)
u2=User(name="User2", tech_id=10)
s.add(u1)
s.add(u2)
s.commit()
Next try to modify them:
u1=s.query(User).filter(User.name=="User1").first()
u2=s.query(User).filter(User.name=="User2").first()
u2.tech_id=None
u1.tech_id=10
s.commit()
After commit I've got the exception:
<-- cut -->
sqlalchemy.exc.IntegrityError: (sqlite3.IntegrityError) UNIQUE constraint failed: user.tech_id [SQL: 'UPDATE user SET tech_id=? WHERE user.id = ?'] [parameters: ((10, 1), (None, 2))]
If I do like this:
u2.tech_id=None
s.commit()
u1.tech_id=10
s.commit()
It's all right.
Is it possible to do requests by only one commit (by only one transaction)?
You can do it in this way:
#u1=s.query(User).filter(User.name=="User1").first()
#u2=s.query(User).filter(User.name=="User2").first()
#u2.tech_id=None
#u1.tech_id=10
#s.commit()
s.query(User).filter(User.name=="User2").update(
{User.tech_id: None}
)
s.query(User).filter(User.name=="User1").update(
{User.tech_id: 10}
)
s.commit()
The essence of that changes is to perform right sequence of actions. You can't create duplicate values for unique key (at least in mysql) even before committing. But you can create multiple NULL values for unique column.