I have two databases that I'm working with in with Python using SQLAlchemy, the databases share table names and therefore I'm getting an error message when running the code.
The error message is :
sqlalchemy.exc.InvalidRequestError: Table 'wo' is already defined for this MetaData instance. Specify 'extend_existing=True' to redefine options and columns on an existing Table object.
The simplified code is below:
from sqlalchemy import create_engine, Column, Integer, String, DateTime, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, backref
from mysql.connector.connection import MySQLConnection
Base = declarative_base()
def get_characterset_info(self):
return self.get_charset()
MySQLConnection.get_characterset_info = MySQLConnection.get_charset
mysqlengine = create_engine('mysql+mysqlconnector://......../mp2', echo=True)
MYSQLSession = sessionmaker(bind=mysqlengine)
mysqlsession= MYSQLSession()
MP2engine = create_engine('mssql+pyodbc://......../mp2', echo=True)
MP2Session = sessionmaker(bind=MP2engine)
mp2session= MP2Session()
class MYSQLWo(Base):
__tablename__= 'wo'
wonum = Column(String, primary_key=True)
taskdesc = Column(String)
comments = relationship("MYSQLWocom", order_by="MYSQLWocom.wonum", backref='wo')
class MYSQLWocom (Base):
__tablename__='wocom'
wonum = Column(String, ForeignKey('wo.wonum'), primary_key=True)
comments = Column(String, primary_key=True)
class MP2Wo(Base):
__tablename__= 'wo'
wonum = Column(String, primary_key=True)
taskdesc = Column(String)
comments = relationship("MP2Wocom", order_by="MP2Wocom.wonum", backref='wo')
class MP2Wocom (Base):
__tablename__='woc'
wonum = Column(String, ForeignKey('wo.wonum'), primary_key=True)
location = Column(String)
sublocation1 = Column(String)
texts = Column(String, primary_key=True)
How do I deal with databases having the same table structure? I'm guessing it has something to do with the MetaData instance, but the SQLAlchemy documentation gets a little confusing when talking about the difference in the class declarative and classical usage..
Since in reality the tables had different structures, the solution was to simply create a separate declarative base. If the tables indeed had the same structure, I would have only needed one class for both tables.
Base = declarative_base()
Base2 = declarative_base() #this is all I needed
class MYSQLWo(Base):
....
class MYSQLWocom(Base):
....
class MP2Wo(Base2):
....
class MP2Wocom(Base2)
http://groups.google.com/group/sqlalchemy/browse_thread/thread/afe09d6387a4dc69?hl=en
You can use one db instance with two Model to bypass this problem.
And this can also be used to implement a master/slave use case in Flask-SQLAlchemy.
Just like this:
app = Flask(__name__)
app.config['SQLALCHEMY_BINDS'] = {'rw': 'rw', 'r': 'r'}
db = SQLAlchemy(app)
db.Model_RW = db.make_declarative_base()
class A(db.Model):
__tablename__ = 'common'
class B(db.Model_RW):
__tablename__ = 'common'
Related
I am trying to create a DB with user and project tables having a many-to-many relation. A user can be a part of many projects and a project can have many users. I created the models for the user, project and the association tables and am able to create the database using alembic without any issues.
However, when i try to use the models. it throws an exception
sqlalchemy.exc.InvalidRequestError: One or more mappers failed to initialize - can't proceed with initialization of other mappers. Triggering mapper: 'mapped class User->users'. Original exception was: reverse_property 'user' on relationship User.projects references relationship ProjectUsers.user, which does not reference mapper mapped class User->users
Here is the code for the models
from app.modules.database import BaseModel
from sqlalchemy import Column, Integer, String, ForeignKey, JSON
from sqlalchemy.orm import relationship
from sqlalchemy_json import mutable_json_type
class Project(BaseModel):
__tablename__ = "projects"
__table_args__ = {'extend_existing': True}
project_name = Column(String, unique=True)
project_desc = Column(String)
users = relationship('app.modules.projects.project_models.ProjectUsers', back_populates='project')
owner_id = Column(Integer, ForeignKey('users.id'))
class ProjectUsers(BaseModel):
__tablename__ = "project_users"
__table_args__ = {'extend_existing': True}
user_id = Column(Integer, ForeignKey('users.id'), primary_key=True)
project_id = Column(Integer, ForeignKey('projects.id'), primary_key=True)
user = relationship("app.modules.users.user_models.User", back_populates="projects")
project = relationship("app.modules.projects.project_models.Project", back_populates="users")
The User model is :
from app.modules.database import BaseModel
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from dataclasses import dataclass
#dataclass
class User(BaseModel):
__tablename__ = "users"
__table_args__ = {'extend_existing': True}
email = Column(String, unique=True)
username = Column(String, unique=True)
f_name = Column(String)
l_name = Column(String)
m_name = Column(String)
role_id = Column(Integer, ForeignKey('roles.id'))
#user_role = relationship('app.modules.roles.role_models.Role', backref="users")
# A user can have access to many projects where they are not the owners of the project.
#projects = relationship('app.modules.projects.project_models.Project', secondary="project_users", back_populates="users")
projects = relationship('app.modules.projects.project_models.ProjectUsers', back_populates='user')
def __repr__(self):
return '<User: {}>'.format(self.username)
Alembic creates the corresponding tables with the proper associations without any issues, but running a simple test like creating a new User() or Project() throws the above exception.
I think the issue might come from using a dataclass without using the correct mapping method. There are three ways to apply ORM Mappings to a dataclass. You seem to use declarative mapping so I'll just apply the declarative mapping method for SQLAlchemy 1.4.
#mapper_registry.mapped
#dataclass
class User:
__tablename__ = "users"
__table_args__ = {'extend_existing': True}
__sa_dataclass_metadata_key__ = "sa"
email: str | None = field(init=False, metadata={"sa": Column(String, unique=True)})
username: str | None = field(init=False, metadata={"sa": Column(String, unique=True)})
f_name: str | None = field(init=False, metadata={"sa": Column(String)})
l_name: str | None = field(init=False, metadata={"sa": Column(String)})
m_name: str | None = field(init=False, metadata={"sa": Column(String)})
role_id: int | None = field(init=False, metadata={"sa": Column(Integer, ForeignKey('roles.id'))})
#user_role = relationship('app.modules.roles.role_models.Role', backref="users")
# A user can have access to many projects where they are not the owners of the project.
#projects = relationship('app.modules.projects.project_models.Project', secondary="project_users", back_populates="users")
projects: list[Project] = field(default_factory=list, metadata={"sa": relationship('app.modules.projects.project_models.ProjectUsers', back_populates='user')})
# define repr by using the dataclasses.field repr kwarg
If that alone does not solve your problem, please add the definition of app.modules.database.BaseModel to your question.
I have a model that depends on some fields on another model. This fields should be present when the record is created, but I do not see a way to enforce that on the database:
class Study(db.Model):
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
type = db.Column(Enum(StudyTypeChoices), nullable=False)
owner_id = db.Column(UUID(as_uuid=True), db.ForeignKey('owner.id'), nullable=False)
participants = db.relationship('Participant', lazy=True, cascade='save-update, merge, delete')
How can I make sure that 'participants' is provided when the Study record gets created (similar to what happens with the 'type' field)? I know I can put a wrapper around it to make sure of that, but I am wondering is there is a more neat way of doing it with sqlalchemy.
Edit: This is the definition of the Participant model
class Participant(UserBase):
id = db.Column(UUID(as_uuid=True), db.ForeignKey("user_base.id"), primary_key=True)
study_id = db.Column(UUID(as_uuid=True), db.ForeignKey('study.id'))
You can listen to before_flush events and prevent flushes containing studies without participants by raising an exception for instance.
#event.listens_for(Session, "before_flush")
def before_flush(session, flush_context, instances):
for instance in session.new: # might want to inspect session.dirty as well
if isinstance(instance, Study) and (
instance.participants is None or instance.participants == []
):
raise ValueError(
f"Study {instance} cannot have {instance.participants} participants."
)
This only checks for new studies, you might want to check in session.dirty as well for updated studies.
Full demo:
from sqlalchemy import Column, ForeignKey, Integer, create_engine, event
from sqlalchemy.orm import Session, declarative_base, relationship
Base = declarative_base()
class Study(Base):
__tablename__ = "study"
id = Column(Integer, primary_key=True)
participants = relationship("Participant", uselist=True, back_populates="study")
class Participant(Base):
__tablename__ = "participant"
id = Column(Integer, primary_key=True)
study_id = Column(Integer, ForeignKey("study.id"), nullable=True)
study = relationship("Study", back_populates="participants")
#event.listens_for(Session, "before_flush")
def before_flush(session, flush_context, instances):
for instance in session.new: # might want to inspect session.dirty as well
if isinstance(instance, Study) and (
instance.participants is None or instance.participants == []
):
raise ValueError(
f"Study {instance} cannot have {instance.participants} participants."
)
engine = create_engine("sqlite://", future=True, echo=True)
Base.metadata.create_all(engine)
s1 = Study()
p1_1 = Participant()
p1_2 = Participant()
s1.participants.extend([p1_1, p1_2])
s2 = Study()
with Session(bind=engine) as session:
session.add(s1)
session.commit() # OK
with Session(bind=engine) as session:
session.add(s2)
session.commit() # ValueError
In Hibernate it's possible to query using raw sql and get entities (objects) back. Something like: createSQLQuery(sql).addEntity(User.class).list().
Is it possible to do the same in sqlalchemy?
As #Ilja notes via link in a comment to the question, it is possible to do what you describe using .from_statement() as described in the documentation:
from sqlalchemy import Column, create_engine, Integer, select, String, text
from sqlalchemy.orm import declarative_base, Session
engine = create_engine("sqlite://")
Base = declarative_base()
class Person(Base):
__tablename__ = "person"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
def __repr__(self):
return f"<Person(id={self.id}, name='{self.name}')>"
Base.metadata.create_all(engine)
# sample data
with Session(engine) as session, session.begin():
session.add_all(
[Person(name="Adam"), Person(name="Alicia"), Person(name="Brandon")]
)
# test
with Session(engine) as session, session.begin():
sql = "SELECT id FROM person WHERE name LIKE 'A%'"
results = session.scalars(select(Person).from_statement(text(sql))).all()
print(results)
# [<Person(id=1, name='Adam')>, <Person(id=2, name='Alicia')>]
When using the entityManager you can try:
entityManager.createNativeQuery("select some native query", User.class)
According to the API:
public Query createNativeQuery(String sqlString, Class resultClass);
I have a flask-sqlalchemy polymorphic table structure like so
class Parent(db.Model):
id = db.Column(db.Integer, primary_key=True)
polytype = db.Column(db.String(32), nullable=False)
value = db.Column(db.String(32))
__mapper_args__ = {'polymorphic_identity': 'parent',
'polymorphic_on': polytype}
class Child(Parent):
id = db.Column(db.Integer,db.ForeignKey('parent.id'),
primary_key=True)
#validates('value')
def validate_value(self, key, val):
# [validation code]
return value
__mapper_args__ = {'polymorphic_identity': 'child'}
and I want to validate the value field. However, the validator for Child.value, the column inherited from Parent, never runs.
What is the correct way to validate an inherited column?
There's an old open issue about it.
In that issue, it is suggested that using an event listener can work, for example:
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import event
db = SQLAlchemy()
class Parent(db.Model):
id = db.Column(db.Integer, primary_key=True)
polytype = db.Column(db.String(32), nullable=False)
value = db.Column(db.String(32))
__mapper_args__ = {'polymorphic_identity': 'parent',
'polymorphic_on': polytype}
class Child(Parent):
id = db.Column(db.Integer,db.ForeignKey('parent.id'),
primary_key=True)
__mapper_args__ = {'polymorphic_identity': 'child'}
#event.listens_for(Parent.value, "set", propagate=True)
def validate_value(inst, val, *args):
print(f"checking value for {inst}")
assert val == "spam"
Parent(value="spam")
Child(value="spam")
If you don't want the listener to fire on Parent instances, decorate your listener func with event.listens_for(Child.value, ...).
Another workaround for this known open issue is creating an instance method to be called inside the #validate method, for example:
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm import validates
from sqlalchemy import event
db = SQLAlchemy()
class Parent(db.Model):
id = db.Column(db.Integer, primary_key=True)
polytype = db.Column(db.String(32), nullable=False)
value = db.Column(db.String(32))
__mapper_args__ = {'polymorphic_identity': 'parent',
'polymorphic_on': polytype}
#validates('value')
def validate_value(self, key, new_value):
self._validate_value(new_value)
def _validate_value(self, new_value):
print(f"checking value for {self}")
assert new_value == "spam"
class Child(Parent):
id = db.Column(db.Integer,db.ForeignKey('parent.id'),
primary_key=True)
__mapper_args__ = {'polymorphic_identity': 'child'}
def _validate_value(self, new_value):
print(f"checking value for {self}")
assert new_value == "child"
Parent(value="spam")
Child(value="child")
In this case, you are able to validate the value having different behaviors on the Childs overwriting the private method _validate_value
I use sqlalchemy's ORM library to interface with one of our application's databases. On one host, the database backend of an application instance is PostgreSQL, while on another, we use MySQL for historical reasons.
The schemas (and relationships) are identical on both backends, but the table and column names in PostgreSQL are lowercase, while on MySQL they are uppercase. Is there any way I can adapt my schema in sqlalchemy such that I don't need to duplicate code?
The following is an example of my schema. Hint, the application is a Confluence wiki ;)
from sqlalchemy import (
BigInteger,
Column,
DateTime,
String,
)
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Space(Base):
__tablename__ = 'spaces'
spaceid = Column(BigInteger, primary_key=True)
spacename = Column(String(255))
spacekey = Column(String(255), nullable=False, unique=True)
lowerspacekey = Column(String(255), nullable=False, index=True)
spacedescid = Column(BigInteger, index=True)
homepage = Column(BigInteger, index=True)
creator = Column(String(255), index=True)
creationdate = Column(DateTime, index=True)
lastmodifier = Column(String(255), index=True)
lastmoddate = Column(DateTime)
spacetype = Column(String(255))
spacestatus = Column(String(255), index=True)
Thanks to #rfkortekaas' suggestion, I managed to solve the issue by automapping the schema and subsequently overriding the naming scheme for table and column names.
The documentation of sqlalchemy has two very useful pages:
Overriding Naming Schemes
Intercepting Column Definitions
For posterity's sake:
from sqlalchemy import event
from sqlalchemy.ext.automap import automap_base
from .utilities import to_pascal_case, to_snake_case
AutomapBase = automap_base()
#event.listens_for(AutomapBase.metadata, "column_reflect")
def column_reflect(inspector, table, column_info):
column_info['key'] = to_snake_case(column_info['name'])
def classname_for_table(base, table_name, table):
return to_pascal_case(table_name)
if __name__ == '__main__':
dbcs = 'postgresql://username:password#127.0.0.1:3306/database'
automap_engine = create_engine(dbcs, future=True, echo=verbose > 2)
AutomapBase.prepare(
autoload_with=automap_engine,
classname_for_table=classname_for_table,
)
# Access the automapped classes with AutomapBase.classes.<class_name>