ValidationError from Composite key with marshmallow_sqlalchemy, sqlalchemy, marshmallow - sqlalchemy

I am making an API with Flask and I am using sqlalchemy/flask-sqlalchemy, marshmallow and marshmallow_sqlalchemy for handling the modeling of the database.
I am loading in the data for the Character table through the code below
character = {
'name': raw_character['name'],
'original_name': raw_character['original_name'],
'alternative_name': raw_character['alternative_name'],
}
characters_serialized.append(character)
schema = CharacterSchema()
characters = schema.load(data=characters_serialized, many=True, session=db.session)
raw_character is json as seen below:
{
"name": "Olaa",
"original_name": "olå",
"alternative_name": ["ol", "oå"]
}
The model itself is defined as a table for Character and a table representing the list of alternative names
class CharacterAlternativeName(db.Model):
__tablename__ = "character_alternative_name"
character_id = sa.Column(sa.Integer, sa.ForeignKey("character.id"), primary_key=True)
alternative_name = sa.Column(sa.String, primary_key=True)
def __repr__(self):
return "<CharacterAlternativeName(alternative_name={self.alternative_name!r})>".format(self=self)
class Character(db.Model):
__tablename__ = "character"
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String)
original_name = sa.Column(sa.String)
alternative_name = relationship("CharacterAlternativeName")
def __repr__(self):
return "<Character(name={self.name!r})>".format(self=self)
class CharacterSchema(SQLAlchemySchema):
class Meta:
model = Character
include_relationships = True
load_instance = True # Optional: deserialize to model instances
id = auto_field()
name = auto_field()
original_name = auto_field()
alternative_name = auto_field()
The problem I am facing is that it seems to struggle to create the composite key in the CharacterAlternativeName table, as when it tries to deserialize them it gives the following error message
"marshmallow.exceptions.ValidationError: {0: {'alternative_name': {0: ["Could not deserialize related value 'ol'; expected a dictionary with keys ['character_id', 'alternative_name']"], 1: ["Could not deserialize related value 'oå'; expected a dictionary with keys ['character_id', 'alternative_name']"]}}}"
Which seems to suggest it struggles to create the composite key. Any ideas how to make the composite key work with sqlalchemy and marshmallow?

Related

How to query data from relations tables and show them in json format [duplicate]

This question already has answers here:
How to serialize SqlAlchemy result to JSON?
(37 answers)
Closed 1 year ago.
I have data class for Coach And data class for User. How to query all the coach results with the list of users inside in json format.
So e.g I want to achieve this:"{"coach_id":1,"coach_login":"Alan","subscribers":[
{"user_id":1,"user_login":"John"}]}"
#dataclass
class User(db.Model):
__tablename__='user'
user_id: int
login: str
user_id = db.Column(db.BigInteger, primary_key=True)
login = db.Column(db.String(255))
password_hash = db.Column(db.String(255))
subscriptions = db.relationship('Coach', secondary=subs, backref='subscribers')
#property
def password(self):
raise AttributeError('Password is not in valid format')
#password.setter
def password(self,password):
self.password_hash = generate_password_hash(password)
def varify_password(self, password):
return check_password_hash(self.password_hash,password)
#dataclass
class Coach(db.Model):
__tablename__='coach'
coach_id: int
login: str
coach_id = db.Column(db.BigInteger, primary_key=True)
login = db.Column(db.String(255))
password_hash = db.Column(db.String(255))
#property
def password(self):
raise AttributeError('Password is not in valid format')
#password.setter
def password(self,password):
self.password_hash = generate_password_hash(password)
def varify_password(self, password):
return check_password_hash(self.password_hash,password)
You can get you data with SqlAlchemy and then parse it to JSON thank to dataclass decorator :
# Endpoint to get a list of all users
#app.route("/api/users/", methods=['GET'])
def users():
users = User.query.all()
return jsonify(users)
You can use Marshmallow (https://marshmallow.readthedocs.io/)
For flask-marshmallow: https://flask-marshmallow.readthedocs.io/en/latest/
marshmallow is an ORM/ODM/framework-agnostic library for converting
complex datatypes, such as objects, to and from native Python
datatypes.
You can initialize Marshmallow the same whay as SQLAlchemy (you can find good examples in the documentation)
Then you can use ma.SQLAlchemyAutoSchema to create schema for your table (for relationships: https://marshmallow.readthedocs.io/en/stable/nesting.html)
class UserSchema(ma.SQLAlchemyAutoSchema):
class Meta:
model= User
sqla_session = db.session
Then you can load a User in in a UserSchema and dump it
user = User.query.first()
user_schema = UserSchema()
user_schema.dump(user)

DRF Create queryset by adding foreign key queryset into it

I want to get foreign data(Child model) into Parent Model queryset.
I am able to add foreign data into Parent data but it is JSON. I want to convert that JSON into queryset because get_queryset() returns model/queryset.
I googled a lot but unable to find anything helpful.
class Parent(models.Model):
parent_name = models.TextField()
child = models.ForeignKey(Child, related_name="+", on_delete=models.CASCADE, null=True)
class Child(models.Model):
child_name = models.TextField()
class ParentViewSet(viewsets.ModelViewSet):
queryset = Parent.objects.all()
serializer_class = ParentInfoSerializer
filter_backends = (filters.DjangoFilterBackend,)
filter_fields = ['parent_name']
def get_queryset(self):
response = []
parent_name = self.request.GET.getlist("parent_name")[0]#
parent_queryset =
Parent.objects.filter(parent_name=parent_name)
for par_queryset in parent_queryset:
parent_result = self.serializer_class(phy_queryset).data
child_id = physician_info_result["child"]
child_instance = Child.objects.get(pk=child_id)
child_result = ChildSerializer(child_instance).data
parent_result["child"] = child_result
response.append(parent_result)
return response
URL -
http://localhost:9000/api/parent?parent_name=xyz
Response output is:
[{
"parent_name": "xyz",
"child": [{
"id": 1
"child_name": "abc"
}]
}]
But above output it is JSON which I don't want. I want output into queryset/model.
NOTE: Queryset output should contain foreign queryset.
You could just replace the default field for child to child serializer and it'll work like you want.
class ParentSerializer(models.Model):
child = ChildSerializer()
class Meta:
models = Parent
fields = ('parent_name', 'child')
And you also don't need to override the get_queryset method.

SQLAlchemy: foreign keys to declared_attr columns

I'm having trouble using the foreign_keys argument with declared_attr columns. My models look like this:
class BasicTable(object):
created = db.Column(db.DateTime)
last_modified = db.Column(db.DateTime)
#declared_attr
def created_by_id(cls):
return db.Column(db.Integer, db.ForeignKey("app_user.id", use_alter = True, name='fk_created_by_id'))
#declared_attr
def created_by(cls):
return db.relationship("AppUser", foreign_keys='{}.{}'.format(cls.__tablename__, 'created_by_id'))
#declared_attr
def last_modified_by_id(cls):
return db.Column(db.Integer, db.ForeignKey("app_user.id", use_alter = True, name='fk_last_modified_by_id'))
#declared_attr
def last_modified_by(cls):
return db.relationship("AppUser", foreign_keys='{}.{}'.format(cls.__tablename__, 'last_modified_by_id'))
class AppUser(BasicTable, db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64))
service_id = db.Column(db.Integer, db.ForeignKey("service.id"))
Because there are two columns in BasicTable that reference AppUser, I was getting "ambiguous foreign keys" errors, so I tried to use the foreign_keys argument as described here. The above gives me this error:
AttributeError: 'Table' object has no attribute 'last_modified_by_id'
When I check the database, that field does exist on all the tables that use BasicTable. Is this error happening because I'm referencing a declared_attr column? This suggests so, but when I tried to use the lambda technique like this:
foreign_keys=lambda: cls.created_by_id
I get this error:
InvalidRequestError: When initializing mapper Mapper|AppUser|app_user, expression 'BasicTable' failed to locate a name ("name 'BasicTable' is not defined"). If this is a class name, consider adding this relationship() to the <class 'app.models.AppUser'> class after both dependent classes have been defined.
Is there a way around this? Thanks!
class BasicTable(object):
created = db.Column(db.DateTime)
last_modified = db.Column(db.DateTime)
#declared_attr
def created_by_id(cls):
return db.Column(db.Integer, db.ForeignKey("app_user.id", use_alter = True, name='fk_created_by_id'))
#declared_attr
def created_by(cls):
return db.relationship('AppUser', primaryjoin='%s.created_by_id==AppUser.id' % cls.__name__,
remote_side='AppUser.id')
#declared_attr
def last_modified_by_id(cls):
return db.Column(db.Integer, db.ForeignKey("app_user.id", use_alter = True, name='fk_last_modified_by_id'))
#declared_attr
def last_modified_by(cls):
return db.relationship('AppUser', primaryjoin='%s.last_modified_by_id==AppUser.id' % cls.__name__,
remote_side='AppUser.id')

Mapping SQLAlchemy object from dict automatically

I'm new with Flask and SQLALchemy.
I got a User instance user:
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
uname = db.Column(db.String(64))
avatar = db.Column(db.String(64))
...
and i got a json user_json:
{
'uname': 'John',
'avatar': 'http://example/john.jpg',
...
}
I want to each filed of user will automatically update from the json.
And this is what i have tried:
for key, value in user_json.items():
if key in self.__table__.columns:
self[key] = value
And, obviously, it does't work. :(

How to build backref with both associatition object and secondaryjoin?

I need some models for instance following:
Work - e.g. works of literature.
Worker - e.g. composer, translator or something similar has contribution to work.
Thus, a 'type' field is required to distinguish workers by division of work. As SQLAlchemy's documentation, this case can benifit from association object like following:
class Work(base):
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
class Worker(base):
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
class Assignment(base):
work_id = Column(Integer, Foreignkey('work.id'), primary_key=True)
worker_id = Column(Integer, Foreignkey('worker.id'), primary_key=True)
type = Column(SmallInteger, nullable=True)
Nonetheless, how to take advantage of backref and alternatvie join condition for building relation immediately to implement that each Work object can retrieve and modify corresponding Worker(s) via different attributions for distinction. For example:
work = session.query(Work).get(1)
work.name
>>> 'A Dream of The Red Mansions'
work.composers
>>> [<Worker('Xueqin Cao')>]
work.translators
>>> [<Worker('Xianyi Yang')>, <Worker('Naidie Dai')>]
Vice versa:
worker = session.query(Worker).get(1)
worker.name
>>> 'Xueqin Cao'
worker.composed
>>> [<Work('A Dream of The Red Mansions')>]
worker.translated
>>> []
Adding secondaryjoin directly without secondary specified seems not feasible, besides, SQLAlchemy's docs notes that:
When using the association object pattern, it is advisable that the association-mapped table not be used as the secondary argument on a relationship() elsewhere, unless that relationship() contains the option viewonly=True. SQLAlchemy otherwise may attempt to emit redundant INSERT and DELETE statements on the same table, if similar state is detected on the related attribute as well as the associated object.
Then, is there some way to build these relations elegantly and readily ?
There's three general ways to go here.
One is, do a "vanilla" setup where you have "work"/"workers" set up without distinguishing on "type" - then, use relationship() for "composer", "composed", "translator", "translated" by using "secondary" to Assignment.__table__ along with custom join conditions, as well as viewonly=True. So you'd do writes via the vanilla properties only. A disadvantage here is that there's no immediate synchronization between the "vanilla" and "specific" collections.
Another is, same with the "vanilla" setup, but just use plain Python descriptors to give "composer", "composed", "translator", "translated" views in memory, that is, [obj.worker for obj in self.workers if obj.type == 'composer']. This is the simplest way to go. Whatever you put in the "vanilla" collections shows right up in the "filtered" collection, the SQL is simple, and there's fewer SELECT statements in play (one per Worker/Work instead of N per Worker/Work).
Finally, the approach that's closest to what you're asking, with primary joins and backrefs, but note with the association object, the backrefs are between Work/Assignment and Assignment/Worker, but not between Work/Worker directly. This approach probably winds up using more SQL to get at the results but is the most complete, and also has the nifty feature that the "type" is written automatically. We're also using a "one way backref", as Assignment doesn't have a simple way of relating back outwards (there's ways to do it but it would be tedious). Using a Python function to automate creation of the relationships reduces the boilerplate, and note here I'm using a string for "type", this can be an integer if you add more arguments to the system:
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.associationproxy import association_proxy
Base = declarative_base()
def _work_assignment(name):
assign_ = relationship("Assignment",
primaryjoin="and_(Assignment.work_id==Work.id, "
"Assignment.type=='%s')" % name,
back_populates="work", cascade="all, delete-orphan")
assoc = association_proxy("%s_assign" % name, "worker",
creator=lambda worker: Assignment(worker=worker, type=name))
return assoc, assign_
def _worker_assignment(name):
assign_ = relationship("Assignment",
primaryjoin="and_(Assignment.worker_id==Worker.id, "
"Assignment.type=='%s')" % name,
back_populates="worker", cascade="all, delete-orphan")
assoc = association_proxy("%s_assign" % name, "work",
creator=lambda work: Assignment(work=work, type=name))
return assoc, assign_
class Work(Base):
__tablename__ = 'work'
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
composers, composer_assign = _work_assignment("composer")
translators, translator_assign = _work_assignment("translator")
class Worker(Base):
__tablename__ = 'worker'
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
composed, composer_assign = _worker_assignment("composer")
translated, translator_assign = _worker_assignment("translator")
class Assignment(Base):
__tablename__ = 'assignment'
work_id = Column(Integer, ForeignKey('work.id'), primary_key=True)
worker_id = Column(Integer, ForeignKey('worker.id'), primary_key=True)
type = Column(String, nullable=False)
worker = relationship("Worker")
work = relationship("Work")
e = create_engine("sqlite://", echo=True)
Base.metadata.create_all(e)
session = Session(e)
ww1, ww2, ww3 = Worker(name='Xueqin Cao'), Worker(name='Xianyi Yang'), Worker(name='Naidie Dai')
w1 = Work(name='A Dream of The Red Mansions')
w1.composers.append(ww1)
w1.translators.extend([ww2, ww3])
session.add(w1)
session.commit()
work = session.query(Work).get(1)
assert work.name == 'A Dream of The Red Mansions'
assert work.composers == [ww1]
assert work.translators == [ww2, ww3]
worker = session.query(Worker).get(ww1.id)
assert worker.name == 'Xueqin Cao'
assert worker.composed == [work]
assert worker.translated == []
worker.composed[:] = []
# either do this...
session.expire(work, ['composer_assign'])
# or this....basically need composer_assign to reload
# session.commit()
assert work.composers == []