I have been trying to create an association relation between two tables, intake and module . Each intake has a one-to-many relationship with the modules.
However there is a coursework assigned to each module, and each coursework has a duedate which is unique to each intake.
I tried this but it didnt work:
intake_modules_table = Table('tg_intakemodules',metadata,
Column('intake_id',Integer,ForeignKey('tg_intake.intake_id',
onupdate="CASCADE",ondelete="CASCADE")),
Column('module_id',Integer,ForeignKey('tg_module.module_id',
onupdate ="CASCADE",ondelete="CASCADE")),
Column('dueddate', Unicode(16))
)
class Intake(DeclarativeBase):
__tablename__ = 'tg_intake'
#{ Columns
intake_id = Column(Integer, autoincrement=True, primary_key=True)
code = Column(Unicode(16))
commencement = Column(DateTime)
completion = Column(DateTime)
#{ Special methods
def __repr__(self):
return '"%s"' %self.code
def __unicode__(self):
return self.code
#}
class Module(DeclarativeBase):
__tablename__ ='tg_module'
#{ Columns
module_id = Column(Integer, autoincrement=True, primary_key=True)
code = Column(Unicode(16))
title = Column(Unicode(30))
#{ relations
intakes = relation('Intake',
secondary=intake_modules_table, backref='modules')
#{ Special methods
def __repr__(self):
return '"%s"'%self.title
def __unicode__(self):
return '"%s"'%self.title
#}
When I do this the column duedate specified in the intake_module_table is not created.
Please some help will be appreciated here.
thanks in advance
Actually column duedate is created, but you don't get it as some model attribute when querying your models. I you need to define intermediate model for intake_modules_table table and setup relation to it instead of Intake. Sure, the access to columns of relation will be a bit longer (module.infakes[0].duedate, module.infakes[0].infake.code). Also you can setup association proxy to access list of Infake objects the same way you do now.
Related
I have a model that depends on some fields on another model. This fields should be present when the record is created, but I do not see a way to enforce that on the database:
class Study(db.Model):
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
type = db.Column(Enum(StudyTypeChoices), nullable=False)
owner_id = db.Column(UUID(as_uuid=True), db.ForeignKey('owner.id'), nullable=False)
participants = db.relationship('Participant', lazy=True, cascade='save-update, merge, delete')
How can I make sure that 'participants' is provided when the Study record gets created (similar to what happens with the 'type' field)? I know I can put a wrapper around it to make sure of that, but I am wondering is there is a more neat way of doing it with sqlalchemy.
Edit: This is the definition of the Participant model
class Participant(UserBase):
id = db.Column(UUID(as_uuid=True), db.ForeignKey("user_base.id"), primary_key=True)
study_id = db.Column(UUID(as_uuid=True), db.ForeignKey('study.id'))
You can listen to before_flush events and prevent flushes containing studies without participants by raising an exception for instance.
#event.listens_for(Session, "before_flush")
def before_flush(session, flush_context, instances):
for instance in session.new: # might want to inspect session.dirty as well
if isinstance(instance, Study) and (
instance.participants is None or instance.participants == []
):
raise ValueError(
f"Study {instance} cannot have {instance.participants} participants."
)
This only checks for new studies, you might want to check in session.dirty as well for updated studies.
Full demo:
from sqlalchemy import Column, ForeignKey, Integer, create_engine, event
from sqlalchemy.orm import Session, declarative_base, relationship
Base = declarative_base()
class Study(Base):
__tablename__ = "study"
id = Column(Integer, primary_key=True)
participants = relationship("Participant", uselist=True, back_populates="study")
class Participant(Base):
__tablename__ = "participant"
id = Column(Integer, primary_key=True)
study_id = Column(Integer, ForeignKey("study.id"), nullable=True)
study = relationship("Study", back_populates="participants")
#event.listens_for(Session, "before_flush")
def before_flush(session, flush_context, instances):
for instance in session.new: # might want to inspect session.dirty as well
if isinstance(instance, Study) and (
instance.participants is None or instance.participants == []
):
raise ValueError(
f"Study {instance} cannot have {instance.participants} participants."
)
engine = create_engine("sqlite://", future=True, echo=True)
Base.metadata.create_all(engine)
s1 = Study()
p1_1 = Participant()
p1_2 = Participant()
s1.participants.extend([p1_1, p1_2])
s2 = Study()
with Session(bind=engine) as session:
session.add(s1)
session.commit() # OK
with Session(bind=engine) as session:
session.add(s2)
session.commit() # ValueError
In the following model...
class Question(models.Model):
question_text = models.CharField(max_length=200)
likes = models.IntegerField(default=0)
dislikes = models.IntegerField(default=0)
pub_at = models.DateTimeField(auto_now_add=True)
category = models.ForeignKey(
Category, on_delete=models.SET_NULL, null=True)
def __str__(self):
return f"{self.question_text}"
def validity(self):
total_likes = self.likes + self.dislikes
if total_likes != 0:
return (self.likes / total_likes) * 100
else:
return 100
I want to be able to access Question.objects.get(pk=1).validity() assuming that pk=1 exists in this case. In python shell I can do this easily. But how do I do this using React. I am able to get all my questions and the fields in React without a problem but I don't think I have a way to access the validity method I created.
In this case I would suggest the following. First, remove the property from the model:
# models.py
class Question(models.Model):
question_text = models.CharField(max_length=200)
likes = models.IntegerField(default=0)
dislikes = models.IntegerField(default=0)
pub_at = models.DateTimeField(auto_now_add=True)
category = models.ForeignKey(
Category, on_delete=models.SET_NULL, null=True)
def __str__(self):
return f"{self.question_text}"
Then add a SerializerMethodField (docs) to your serializer. It is read-only and can be used to pass computed values to your views:
# serializers.py
class QuestionSerializer(serializers.ModelSerializer):
validity = serializers.SerializerMethodField()
class Meta:
model = Question
fields = ['question_text', 'likes', 'dislikes', 'pub_at', 'category', 'validity']
def get_validity(self, instance):
total_likes = instance.likes + instance.dislikes
# Your approach is not wrong. This is a more explicit way of dealing with that particular error type
try:
return (instance.likes / total_likes) * 100
except ZeroDivisionError:
return 100
Bear in mind that the Foreign Key category will be serialized as its database unique id value (Primary Key) in this case.
You might want to use the #property decorator so that you can access the value the same way you would access any of the other fields on your Question model:
class Question(models.Model):
question_text = models.CharField(max_length=200)
likes = models.IntegerField(default=0)
dislikes = models.IntegerField(default=0)
pub_at = models.DateTimeField(auto_now_add=True)
category = models.ForeignKey(
Category, on_delete=models.SET_NULL, null=True)
def __str__(self):
return f"{self.question_text}"
#property
def validity(self):
total_likes = self.likes + self.dislikes
percentage = (self.likes / total_likes) * 100
return percentage
Explanations can be found in the docs or here. Keep in mind that it will not be saved like the other attributes as columns on the database when you run migrations.
I am answering my own question here I found a solution to. Although, #property does work when rendering using a simple Django template when using React and rendering json responses validity is still not available.
In my serializers.py file I did the following...
class QuestionSerializer(serializers.ModelSerializer):
validity = serializers.ReadOnlyField()
class Meta:
model = Question
fields = '__all__'
Take away the #property from the models as it is no longer needed. This has worked for me and you can go to the Django rest_framework or test it in your React application to see if you have access to this.
I would like to know if there are any issues doing this and/or a better way. I was also trying to do validity = serializers.Field() instead of validity = serializers.ReadOnlyField() but got an error saying I needed a Field.to_representation() that takes in self, value as positional arguments.
What arguments exactly do I pass in here. I tried self, Question.validity and did not work. I am not sure what I am doing here.
As an update the method in the model I updated to...
def validity(self):
total_likes = self.likes + self.dislikes
if total_likes != 0:
return (self.likes / total_likes) * 100
else:
return 100
I did not notice before and does not really matter for the question but division by zero is not allowed being that by default division by zero will always occur.
I am having a model structure like:
class user(models.Model):
name = models.CharField(max_length=100)
tasks = models.IntegerField(default=0)
class project(models.Model):
worker = models.ForeignKey(user, on_delete=models.CASCADE)
project_name = models.CharField(max_length=100)
class task(models.Model):
project = models.ForeignKey(project, on_delete=models.CASCADE)
task_name = models.CharField(max_length=150)
expected_date = models.DateField(auto_now=False,auto_now_add=False,)
actual_date = models.DateField(auto_now=False,auto_now_add=False,blank=True,null=True,)
I want to traverse through the task list and if actual date field is not null i.e. task completed then to update the tasks field in user class by 1. I have written the following code:
a = task.objects.filter(actual_date__isnull=False)
for x in a:
x.project.worker.tasks+=1
However this is not giving the desired result. What should I do?
You are not saving your object after modifying it - simply modifying the value doesn't write it to the database. Try this instead:
a = task.objects.filter(actual_date__isnull=False)
for x in a:
worker = x.project.worker
worker.tasks += 1
worker.save()
On a separate note you should consider following PEP8 conventions and using CamelCase for your class names. As it is currently you can very easily mix up classes with objects.
I'm using the SQLalchemy association-object pattern (http://docs.sqlalchemy.org/en/rel_1_1/orm/basic_relationships.html#association-object) for three model classes.
Basic relationship is on the left side one User can belong to multiple Organizations. I'm storing extra User-Organization relevant data in the association object class. Then, the association-object class maps a many-to-one to the Organization.
From SQLAlchemy point, the relationship works fine. The problem is testing this with factory boy has proven difficult and always results in error RecursionError: maximum recursion depth exceeded.
Below are the three models for the association object relationship, where User is parent and the Child is Organization:
class MemberOrgsAssoc(Model):
"""The left side of the relationship maps a User as a one-to-many to
Organizations. User-Organization relevant data is stored in
this association-object table. Then, there is a one-to-many from
this association-object table to the Organization table. """
__tablename__ = 'member_orgs'
member_id = Column(db.Integer, db.ForeignKey("users.id"), primary_key=True)
org_id = Column(db.Integer, db.ForeignKey("organizations.id"), primary_key=True)
manager_id = Column(db.Integer, db.ForeignKey("users.id"))
org_title = Column(db.Unicode(50))
organization = relationship("Organization", back_populates="members")
member = relationship("User", back_populates="organizations",
foreign_keys=[member_id])
manager = relationship("User", back_populates="subordinates",
foreign_keys=[manager_id])
class User(SurrogatePK, Model):
"""A user of the app."""
__tablename__ = 'users'
username = Column(db.Unicode(80), unique=True, nullable=False)
organizations = relationship("MemberOrgsAssoc", back_populates="member",
primaryjoin = "member_orgs.c.member_id == User.id",
lazy="dynamic")
subordinates = relationship("MemberOrgsAssoc", back_populates="manager",
primaryjoin = "member_orgs.c.manager_id == User.id",
lazy="dynamic")
class Organization(SurrogatePK, Model):
"""An organization that Users may belong to."""
__tablename__ = 'organizations'
name = Column(db.Unicode(128), nullable=False)
members = relationship("MemberOrgsAssoc", back_populates="organization")
So all the above SQLAlchemy model classes and relationships seem to work as intended for now.
Below are the three factory-boy classes I'm attempting to make work.
MemberOrgs association-object factory:
class MemberOrgsAssocFactory(BaseFactory):
"""Association-object table Factory"""
class Meta:
"""Factory config"""
model = MemberOrgsAssoc
member_id = factory.SubFactory('tests.factories.UserFactory')
org_id = factory.SubFactory('tests.factories.OrganizationFactory')
manager_id = factory.SubFactory('tests.factories.UserFactory')
org_title = Sequence(lambda n: 'CEO{0}'.format(n))
organization = factory.SubFactory('tests.factories.OrganizationFactory')
member = factory.SubFactory('tests.factories.UserFactory')
manager = factory.SubFactory('tests.factories.UserFactory')
class UserFactory(BaseFactory):
"""User factory."""
class Meta:
"""Factory configuration."""
model = User
username = Sequence(lambda n: 'user{0}'.format(n))
organizations = factory.List(
[factory.SubFactory('tests.factories.MemberOrgsAssocFactory')])
subordinates = factory.List(
[factory.SubFactory('tests.factories.MemberOrgsAssocFactory')])
class OrganizationFactory(BaseFactory):
"""Company factory"""
class Meta:
"""Factory config"""
model = Organization
id = Sequence(lambda n: '{0}'.format(n))
name = Sequence(lambda n: 'company{0}'.format(n))
members = factory.List(
[factory.SubFactory('tests.factories.MemberOrgsAssocFactory')])
Finally, need to make a user for the tests and so below is a pytest fixture to make a User. This is where the tests fail due to `RecursionError: maximum recursion depth exceeded".
#pytest.fixture(scope='function')
def user(db):
"""An user for the unit tests.
setup reference: https://github.com/FactoryBoy/factory_boy/issues/101
# how to handle self referential foreign key relation in factory boy
# https://github.com/FactoryBoy/factory_boy/issues/173
"""
user = UserFactory(
organizations__0=None,
subordinates__0=None,
)
a = MemberOrgsAssocFactory(
is_org_admin=True,
is_default_org=True,
is_active=True,
)
a.organization=OrganizationFactory()
user.organizations.append(a)
db.session.commit()
return user
Error message:
E RecursionError: maximum recursion depth exceeded
!!! Recursion detected (same locals & position)
More or less resolved this, though a bit fragile overall. Must follow required pattern carefully as laid out in the sqlalchemy docs:
""" EXAMPLE USE:
# create User object, append an Organization object via association
p = User()
a = MemberOrgsAssoc(extra_data="some data")
a.organization = Organization()
p.organizations.append(a)
# iterate through Organization objects via association, including association attributes:
for assoc in p.organizations:
print(assoc.extra_data)
print(assoc.child)
"""
Below changes to the pytest fixture resolved the RecursionError issue and got it working:
#pytest.fixture(scope='function')
def user(db):
"""An user for the tests."""
user = UserFactory(
organizations='',
subordinates=''
)
a = MemberOrgsAssocFactory(
member_id=None,
org_id=None,
manager_id=None,
is_org_admin=True,
is_default_org=True,
is_active=True,
organization=None,
member=None,
manager=None
)
a.organization = OrganizationFactory(members=[])
user.organizations.append(a)
db.session.commit()
# debugging
# thisuser = User.get_by_id(user.id)
# for assoc in thisuser.organizations:
# if assoc.is_default_org:
# print('The default organization of thisuser is -> {}'.format(assoc.organization.name))
return user
I need some models for instance following:
Work - e.g. works of literature.
Worker - e.g. composer, translator or something similar has contribution to work.
Thus, a 'type' field is required to distinguish workers by division of work. As SQLAlchemy's documentation, this case can benifit from association object like following:
class Work(base):
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
class Worker(base):
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
class Assignment(base):
work_id = Column(Integer, Foreignkey('work.id'), primary_key=True)
worker_id = Column(Integer, Foreignkey('worker.id'), primary_key=True)
type = Column(SmallInteger, nullable=True)
Nonetheless, how to take advantage of backref and alternatvie join condition for building relation immediately to implement that each Work object can retrieve and modify corresponding Worker(s) via different attributions for distinction. For example:
work = session.query(Work).get(1)
work.name
>>> 'A Dream of The Red Mansions'
work.composers
>>> [<Worker('Xueqin Cao')>]
work.translators
>>> [<Worker('Xianyi Yang')>, <Worker('Naidie Dai')>]
Vice versa:
worker = session.query(Worker).get(1)
worker.name
>>> 'Xueqin Cao'
worker.composed
>>> [<Work('A Dream of The Red Mansions')>]
worker.translated
>>> []
Adding secondaryjoin directly without secondary specified seems not feasible, besides, SQLAlchemy's docs notes that:
When using the association object pattern, it is advisable that the association-mapped table not be used as the secondary argument on a relationship() elsewhere, unless that relationship() contains the option viewonly=True. SQLAlchemy otherwise may attempt to emit redundant INSERT and DELETE statements on the same table, if similar state is detected on the related attribute as well as the associated object.
Then, is there some way to build these relations elegantly and readily ?
There's three general ways to go here.
One is, do a "vanilla" setup where you have "work"/"workers" set up without distinguishing on "type" - then, use relationship() for "composer", "composed", "translator", "translated" by using "secondary" to Assignment.__table__ along with custom join conditions, as well as viewonly=True. So you'd do writes via the vanilla properties only. A disadvantage here is that there's no immediate synchronization between the "vanilla" and "specific" collections.
Another is, same with the "vanilla" setup, but just use plain Python descriptors to give "composer", "composed", "translator", "translated" views in memory, that is, [obj.worker for obj in self.workers if obj.type == 'composer']. This is the simplest way to go. Whatever you put in the "vanilla" collections shows right up in the "filtered" collection, the SQL is simple, and there's fewer SELECT statements in play (one per Worker/Work instead of N per Worker/Work).
Finally, the approach that's closest to what you're asking, with primary joins and backrefs, but note with the association object, the backrefs are between Work/Assignment and Assignment/Worker, but not between Work/Worker directly. This approach probably winds up using more SQL to get at the results but is the most complete, and also has the nifty feature that the "type" is written automatically. We're also using a "one way backref", as Assignment doesn't have a simple way of relating back outwards (there's ways to do it but it would be tedious). Using a Python function to automate creation of the relationships reduces the boilerplate, and note here I'm using a string for "type", this can be an integer if you add more arguments to the system:
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.associationproxy import association_proxy
Base = declarative_base()
def _work_assignment(name):
assign_ = relationship("Assignment",
primaryjoin="and_(Assignment.work_id==Work.id, "
"Assignment.type=='%s')" % name,
back_populates="work", cascade="all, delete-orphan")
assoc = association_proxy("%s_assign" % name, "worker",
creator=lambda worker: Assignment(worker=worker, type=name))
return assoc, assign_
def _worker_assignment(name):
assign_ = relationship("Assignment",
primaryjoin="and_(Assignment.worker_id==Worker.id, "
"Assignment.type=='%s')" % name,
back_populates="worker", cascade="all, delete-orphan")
assoc = association_proxy("%s_assign" % name, "work",
creator=lambda work: Assignment(work=work, type=name))
return assoc, assign_
class Work(Base):
__tablename__ = 'work'
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
composers, composer_assign = _work_assignment("composer")
translators, translator_assign = _work_assignment("translator")
class Worker(Base):
__tablename__ = 'worker'
id = Column(Integer, primary_key=True)
name = Column(String(50))
description = Column(Text)
composed, composer_assign = _worker_assignment("composer")
translated, translator_assign = _worker_assignment("translator")
class Assignment(Base):
__tablename__ = 'assignment'
work_id = Column(Integer, ForeignKey('work.id'), primary_key=True)
worker_id = Column(Integer, ForeignKey('worker.id'), primary_key=True)
type = Column(String, nullable=False)
worker = relationship("Worker")
work = relationship("Work")
e = create_engine("sqlite://", echo=True)
Base.metadata.create_all(e)
session = Session(e)
ww1, ww2, ww3 = Worker(name='Xueqin Cao'), Worker(name='Xianyi Yang'), Worker(name='Naidie Dai')
w1 = Work(name='A Dream of The Red Mansions')
w1.composers.append(ww1)
w1.translators.extend([ww2, ww3])
session.add(w1)
session.commit()
work = session.query(Work).get(1)
assert work.name == 'A Dream of The Red Mansions'
assert work.composers == [ww1]
assert work.translators == [ww2, ww3]
worker = session.query(Worker).get(ww1.id)
assert worker.name == 'Xueqin Cao'
assert worker.composed == [work]
assert worker.translated == []
worker.composed[:] = []
# either do this...
session.expire(work, ['composer_assign'])
# or this....basically need composer_assign to reload
# session.commit()
assert work.composers == []