How to implement a relationship using classic mapping? sqlalchemy version 1.3.23. A simplified version of what I tried:
users_table = Table(
"users",
metadata,
Column("id", Integer, primary_key=True),
Column("username", String, nullable=False, unique=True),
)
markets_table = Table(
"markets",
metadata,
Column("id", Integer, primary_key=True),
Column("type", Enum(MarketTypes), nullable=False),
Column("status", Boolean, server_default="True"),
Column("user_id", ForeignKey("users.id"), nullable=False),
)
class User(object):
def __init__(self, id, username):
self.id = id,
self.username = username
class Market(object):
def __init__(self, id, type, status, user_id):
self.id = id,
self.type = type,
self.status = status,
self.user_id = user_id
mapper(Market, markets_table)
mapper(User, users_table, properties={"markets": relationship(markets_table, primaryjoin=users_table.c.id == markets_table.c.user_id)})
The problem is that I can't get users_table.c.markets
I found several questions on this topic on stackoverflow, but none of them could help me
In order to access the mapped property you should do it from the Class and not the Table, so instead of calling it like:
users_table.c.markets
you should call it like:
User.markets from an instance of the user
What you were trying was basically accessing a column markets in the user_table which doesn't exist.
Let me know if this helps!
Related
I am just a starter in FastAPI/Pydantic & SqlAlchemy - I have two model Post and Category where I want Post should fetch Category name instead of only id
when I try to use the below code it gives following error in console
Any help in solving this is much appreciated thank you!
response -> 1 -> category_name
field required (type=value_error.missing)
post.py models
class Post(Base):
__tablename__="post"
id = Column(Integer, primary_key=True, index=True)
title=Column(String(50))
user_id=Column(Integer, ForeignKey("users.id"))
category_id = Column(Integer, ForeignKey("category.id"))
category_name=relationship("Category", backref="post")
class Category(Base):
__tablename__="category"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
Pydantic models
class CategoryGet(BaseModel):
id:int
name:str
class Config:
orm_mode=True
class Post(BaseModel):
id = int
title=str
user_id=int
category_id = int
category_name=CategoryGet
class Config:
orm_mode=True
My mainapp.py
router = APIRouter()
#router.get("/", response_model=List[schemas.VehicleGet])
def get_vehicle(db: Session = Depends(get_db), skip: int = 0, limit: int = 50) -> Any:
vehicle = crud.post.get_multi(db, skip=skip, limit=limit)
return vehicle
Pydantic models attributes uses : instead of =
class Post(BaseModel):
id: int
title: str
user_id: int
category_id: int
category_name: CategoryGet
class Config:
orm_mode = True
I guess the problem is that, you forgot orm_mode=True config for you Post model and consequently it is unable to recognize the category_name field. I hope this will solve but if not you could check this thread where an example and some clarification about relationship handling with pydantic.
Sorry, if this is a newbie question but the documentation about the many-to-one relationship doesn't seems to cover this. I have been looking for something similar to this (under the "How to Insert / Add Data to Your Tables" section), however in the shown example this is always a unique insertion.
Basically, I want to populate my database with data located on my local machine. For the sake of simplicity I have constructed the below-shown example into a MWE that illustrates the problem. The problem consists of two tables called Price and Currency and the implementation is done in a declarative style.
model.py
from sqlalchemy import Column, Integer, String
from sqlalchemy import Float, BigInteger, ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Currency(Base):
__tablename__ = 'Currency'
id = Column(Integer, primary_key=True)
unit = Column(String(16), unique=True)
def __init__(self, unit):
self.unit = unit
class Price(Base):
__tablename__ = 'Price'
id = Column(BigInteger, primary_key=True)
currency_id = Column(Integer, ForeignKey("Currency.id"), nullable=False)
currency = relationship("Currency", backref="Currency.id")
hour1 = Column(Float)
hour2 = Column(Float)
def __init__(self, hour1, hour2):
self.hour1 = hour1
self.hour2 = hour2
Currently, I am populating the database using following code:
script.py
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from model import *
engine = create_engine('sqlite:///example.db', echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
session = db_session()
Base.metadata.create_all(engine)
oPrice = Price(2.5, 2.5)
oPrice.currency = Currency("EUR")
session.add(oPrice)
tPrice = Price(5.5, 1.5)
tPrice.currency = Currency("EUR")
session.add(tPrice)
session.commit()
This creates an error
sqlalchemy.exc.IntegrityError: (IntegrityError) column unit is not unique u'INSERT INTO "Currency" (unit) VALUES (?)' ('EUR',)
What is the best strategy for populating my database, such that I ensure that my Currency.id and Price.currency_id mapping is correct? Should I make the model-classes look for uniqueness before they are initialized, and do I do that in associated with the other table?
I'd second what Antti has suggested since currencies have standard codes like 'INR', 'USD' etc, you can make currency_code as primary key.
Or in case you want to keep the numeric primary key then one of the options is:
http://www.sqlalchemy.org/trac/wiki/UsageRecipes/UniqueObject
edit (adding example based on the recipe in the link above, the one with class decoartor)
database.py
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
engine = create_engine('sqlite:///example.db', echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
model.py
from sqlalchemy import Column, Integer, String
from sqlalchemy import Float, BigInteger, ForeignKey
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
from database import db_session
Base = declarative_base()
def _unique(session, cls, hashfunc, queryfunc, constructor, arg, kw):
cache = getattr(session, '_unique_cache', None)
if cache is None:
session._unique_cache = cache = {}
key = (cls, hashfunc(*arg, **kw))
if key in cache:
return cache[key]
else:
with session.no_autoflush:
q = session.query(cls)
q = queryfunc(q, *arg, **kw)
obj = q.first()
if not obj:
obj = constructor(*arg, **kw)
session.add(obj)
cache[key] = obj
return obj
def unique_constructor(scoped_session, hashfunc, queryfunc):
def decorate(cls):
def _null_init(self, *arg, **kw):
pass
def __new__(cls, bases, *arg, **kw):
# no-op __new__(), called
# by the loading procedure
if not arg and not kw:
return object.__new__(cls)
session = scoped_session()
def constructor(*arg, **kw):
obj = object.__new__(cls)
obj._init(*arg, **kw)
return obj
return _unique(
session,
cls,
hashfunc,
queryfunc,
constructor,
arg, kw
)
# note: cls must be already mapped for this part to work
cls._init = cls.__init__
cls.__init__ = _null_init
cls.__new__ = classmethod(__new__)
return cls
return decorate
#unique_constructor(
db_session,
lambda unit: unit,
lambda query, unit: query.filter(Currency.unit == unit)
)
class Currency(Base):
__tablename__ = 'Currency'
id = Column(Integer, primary_key=True)
unit = Column(String(16), unique=True)
def __init__(self, unit):
self.unit = unit
class Price(Base):
__tablename__ = 'Price'
id = Column(BigInteger, primary_key=True)
currency_id = Column(Integer, ForeignKey("Currency.id"), nullable=False)
currency = relationship("Currency", backref="Currency.id")
hour1 = Column(Float)
hour2 = Column(Float)
def __init__(self, hour1, hour2):
self.hour1 = hour1
self.hour2 = hour2
script.py:
from model import *
from database import engine, db_session as session
Base.metadata.create_all(engine)
oPrice = Price(2.5, 2.5)
oPrice.currency = Currency("EUR")
session.add(oPrice)
tPrice = Price(5.5, 1.5)
tPrice.currency = Currency("EUR")
session.add(tPrice)
session.commit()
The best simplest solution is to use the currency codes as the primary keys in Currency, and foreign keys in Price. Then you can have
price.currency_id = "EUR"
This also makes your database tables more readable - as in you won't have 28342 but 'GBP'.
In the code below I want to replace all_holdings in Account with a property called holdings that returns the desired_holdings (which are the holdings representing the latest known quantity which can change over time). I'm having trouble figuring out how to construct the call to relationship.
In addition I'd appreciate any comments on the appropriateness of the pattern (keeping historic data in a single table and using a max date subquery to get most recent), as well as on better alternatives, or improvements to the query.
from sqlalchemy import Column, Integer, String, Date, DateTime, REAL, ForeignKey, func
from sqlalchemy.orm import relationship, aliased
from sqlalchemy.sql.operators import and_, eq
from sqlalchemy.ext.declarative import declarative_base
from db import session
import datetime
import string
Base = declarative_base()
class MySQLSettings(object):
__table_args__ = {'mysql_engine':'InnoDB'}
class Account(MySQLSettings, Base):
__tablename__ = 'account'
id = Column(Integer, primary_key=True)
name = Column(String(64))
all_holdings = relationship('Holding', backref='account')
def desired_holdings(self):
max_date_subq = session.query(Holding.account_id.label('account_id'),
Holding.stock_id.label('stock_id'),
func.max(Holding.as_of).label('max_as_of')). \
group_by(Holding.account_id, Holding.stock_id).subquery()
desired_query = session.query(Holding).join(Account,
Account.id==account.id).join(max_date_subq).\
filter(max_date_subq.c.account_id==account.id).\
filter(Holding.as_of==max_date_subq.c.max_as_of).\
filter(Holding.account_id==max_date_subq.c.account_id).\
filter(Holding.stock_id==max_date_subq.c.stock_id)
return desired_query.all()
def __init__(self, name):
self.name = name
class Stock(MySQLSettings, Base):
__tablename__ = 'stock'
id = Column(Integer, primary_key=True)
name = Column(String(64))
def __init__(self, name):
self.name = name
class Holding(MySQLSettings, Base):
__tablename__ = 'holding'
id = Column(Integer, primary_key=True)
account_id = Column(Integer, ForeignKey('account.id'), nullable=False)
stock_id = Column(Integer, ForeignKey('stock.id'), nullable=False)
quantity = Column(REAL)
as_of = Column(Date)
stock = relationship('Stock')
def __str__(self):
return "Holding(%f, '%s' '%s')"%(self.quantity, self.stock.name, str(self.as_of))
def __init__(self, account, stock, quantity, as_of):
self.account_id = account.id
self.stock_id = stock.id
self.quantity = quantity
self.as_of = as_of
if __name__ == "__main__":
ibm = Stock('ibm')
session.add(ibm)
account = Account('a')
session.add(account)
session.flush()
session.add_all([ Holding(account, ibm, 100, datetime.date(2001, 1, 1)),
Holding(account, ibm, 200, datetime.date(2001, 1, 3)),
Holding(account, ibm, 300, datetime.date(2001, 1, 5)) ])
session.commit()
print "All holdings by relation:\n\t", \
string.join([ str(h) for h in account.all_holdings ], "\n\t")
print "Desired holdings query:\n\t", \
string.join([ str(h) for h in account.desired_holdings() ], "\n\t")
The results when run are:
All holdings by relation:
Holding(100.000000, 'ibm' '2001-01-01')
Holding(200.000000, 'ibm' '2001-01-03')
Holding(300.000000, 'ibm' '2001-01-05')
Desired holdings query:
Holding(300.000000, 'ibm' '2001-01-05')
Following answer provided by Michael Bayer after I posted to sqlalchemy google group:
The desired_holdings() query is pretty complicated and I'm not seeing a win by trying to get relationship() to do it. relationship() is oriented towards maintaining the persistence between two classes, not as much a reporting technique (and anything with max()/group_by in it is referring to reporting).
I would stick #property on top of desired_holdings, use object_session(self) to get at "session", and be done.
See more information on query-enabled properties.
This is my file so far:
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
from sqlalchemy import Column, Integer, String
from sqlalchemy import Table, Text
engine = create_engine('mysql://root:ababab#localhost/alctest',
echo=False)
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key = True)
name = Column(String(100))
fullname = Column(String(100))
password = Column(String(100))
addresses = relationship("Address", order_by="Address.id", backref="user")
def __init__(self, name, fullname, password):
self.name = name
self.fullname = fullname
self.password = password
def __repr__(self):
return "<User('%s','%s', '%s')>" % (self.name, self.fullname, self.password)
class Address(Base):
__tablename__ = 'addresses'
id = Column(Integer, primary_key = True)
email_address = Column(String(100), nullable=False)
#foreign key, must define relationship
user_id = Column(Integer, ForeignKey('users.id'))
user = relationship("User", backref = backref('addresses',order_by=id))
Base.metadata.create_all(engine)
This file is pretty simple. It creates a User and Address tables. After I run this file, the tables are created.
But now I want to add a column to "User". How can I do that? What do I have to do?
You can add column with Table.append_column method.
test = Column('test', Integer)
User.__table__.append_column(test)
But this will not fire the ALTER TABLE command to add that column in database. As per doc given for append_column that command you have to run manually after adding that column in model.
Short answer: You cannot: AFAIK, currently there is no way to do it from sqlalchemy directly.
Howerever, you can use sqlalchemy-migrate for this if you change your model frequently and have different versions rolled out to production. Else it might be an overkill and you may be better off generating the ALTER TABLE ... scripts manually.
I have a star-schema architectured database that I want to represent in SQLAlchemy. Now I have the problem on how this can be done in the best possible way. Right now I have a lot of properties with custom join conditions, because the data is stored in different tables.
It would be nice if it would be possible to re-use the dimensions for different fact tablesw but I haven't figured out how that can be done nicely.
A typical fact table in a star schema contains foreign key references to all dimension tables, so usually there wouldn't be any need for custom join conditions - they are determined automatically from foreign key references.
For example a star schema with two fact tables would look like:
Base = declarative_meta()
class Store(Base):
__tablename__ = 'store'
id = Column('id', Integer, primary_key=True)
name = Column('name', String(50), nullable=False)
class Product(Base):
__tablename__ = 'product'
id = Column('id', Integer, primary_key=True)
name = Column('name', String(50), nullable=False)
class FactOne(Base):
__tablename__ = 'sales_fact_one'
store_id = Column('store_id', Integer, ForeignKey('store.id'), primary_key=True)
product_id = Column('product_id', Integer, ForeignKey('product.id'), primary_key=True)
units_sold = Column('units_sold', Integer, nullable=False)
store = relation(Store)
product = relation(Product)
class FactTwo(Base):
__tablename__ = 'sales_fact_two'
store_id = Column('store_id', Integer, ForeignKey('store.id'), primary_key=True)
product_id = Column('product_id', Integer, ForeignKey('product.id'), primary_key=True)
units_sold = Column('units_sold', Integer, nullable=False)
store = relation(Store)
product = relation(Product)
But suppose you want to reduce the boilerplate in any case. I'd create generators local to the dimension classes which configure themselves on a fact table:
class Store(Base):
__tablename__ = 'store'
id = Column('id', Integer, primary_key=True)
name = Column('name', String(50), nullable=False)
#classmethod
def add_dimension(cls, target):
target.store_id = Column('store_id', Integer, ForeignKey('store.id'), primary_key=True)
target.store = relation(cls)
in which case usage would be like:
class FactOne(Base):
...
Store.add_dimension(FactOne)
But, there's a problem with that. Assuming the dimension columns you're adding are primary key columns, the mapper configuration is going to fail since a class needs to have its primary keys set up before the mapping is set up. So assuming we're using declarative (which you'll see below has a nice effect), to make this approach work we'd have to use the instrument_declarative() function instead of the standard metaclass:
meta = MetaData()
registry = {}
def register_cls(*cls):
for c in cls:
instrument_declarative(c, registry, meta)
So then we'd do something along the lines of:
class Store(object):
# ...
class FactOne(object):
__tablename__ = 'sales_fact_one'
Store.add_dimension(FactOne)
register_cls(Store, FactOne)
If you actually have a good reason for custom join conditions, as long as there's some pattern to how those conditions are created, you can generate that with your add_dimension():
class Store(object):
...
#classmethod
def add_dimension(cls, target):
target.store_id = Column('store_id', Integer, ForeignKey('store.id'), primary_key=True)
target.store = relation(cls, primaryjoin=target.store_id==cls.id)
But the final cool thing if you're on 2.6, is to turn add_dimension into a class decorator. Here's an example with everything cleaned up:
from sqlalchemy import *
from sqlalchemy.ext.declarative import instrument_declarative
from sqlalchemy.orm import *
class BaseMeta(type):
classes = set()
def __init__(cls, classname, bases, dict_):
klass = type.__init__(cls, classname, bases, dict_)
if 'metadata' not in dict_:
BaseMeta.classes.add(cls)
return klass
class Base(object):
__metaclass__ = BaseMeta
metadata = MetaData()
def __init__(self, **kw):
for k in kw:
setattr(self, k, kw[k])
#classmethod
def configure(cls, *klasses):
registry = {}
for c in BaseMeta.classes:
instrument_declarative(c, registry, cls.metadata)
class Store(Base):
__tablename__ = 'store'
id = Column('id', Integer, primary_key=True)
name = Column('name', String(50), nullable=False)
#classmethod
def dimension(cls, target):
target.store_id = Column('store_id', Integer, ForeignKey('store.id'), primary_key=True)
target.store = relation(cls)
return target
class Product(Base):
__tablename__ = 'product'
id = Column('id', Integer, primary_key=True)
name = Column('name', String(50), nullable=False)
#classmethod
def dimension(cls, target):
target.product_id = Column('product_id', Integer, ForeignKey('product.id'), primary_key=True)
target.product = relation(cls)
return target
#Store.dimension
#Product.dimension
class FactOne(Base):
__tablename__ = 'sales_fact_one'
units_sold = Column('units_sold', Integer, nullable=False)
#Store.dimension
#Product.dimension
class FactTwo(Base):
__tablename__ = 'sales_fact_two'
units_sold = Column('units_sold', Integer, nullable=False)
Base.configure()
if __name__ == '__main__':
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
sess = sessionmaker(engine)()
sess.add(FactOne(store=Store(name='s1'), product=Product(name='p1'), units_sold=27))
sess.commit()