how to get table object and query specific fields in sqlalchemy? - sqlalchemy

I create a table when user want to add IP address and port to get a url map that refers to the specific ip:port he adds. The table is created based on his email address.
def create_user_table(table_name):
engine = get_sql_engine()
metadata = MetaData(engine)
user_table = Table(table_name, metadata,
Column("userip_id", db.Integer, primary_key=True, autoincrement=True),
Column("date_created", db.DateTime, default=db.func.current_timestamp()),
Column("date_modified", db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp()),
Column("ipaddress", CIDR, nullable=False),
Column("urlmap", db.String(10), nullable=False, unique=True),
Column("port", db.Integer, index=True),
Column("device", db.String(10)),
Column("path", db.String(30)),
Column("service", db.String(10)),
Column("count_updates", db.Integer, default=0, nullable=False),
)
user_table.create(engine)
return user_table
I have not made ipaddress and port address unique as same user can have different port but same ip and different ip but same port address (I have opted for the later considering a user can be updating )
if I create a table in db inheriting form db.Model I can query like
cls.query.filter(ipaddress=ipaddress).first() this will return me that table object I can check if that Ip has a specific port, but now that I have created table inheriting from sqlalchemy.Table I do not have the query function available .
How do I query and compare if the specific ip has a specific port address mapped to a existing urlmap(not known) ?

Figured out like always :p
# if same ip exists external port address should not exist in mapped in db.
query_table = helpers.get_query_result(user_table_name, "ipaddress", data['ip']).fetchall()
ports_mapped_to_ip_in_db = [item[5] for item in query_table]
if int(data['port']) in ports_mapped_to_ip_in_db:
return jsonify({"status": "{}:{} is already registered.".format(data["ip"], data["port"])})
where get_query_result in helpers module is :
def get_query_result(table_name, field_name=None, value=None):
""" execute query and provide result
"""
engine = get_sql_engine()
if all([field_name, value]):
return engine.execute("SELECT * FROM {} WHERE {}='{}'".format(table_name, field_name, value))
else:
return engine.execute("SELECT * FROM {}".format(table_name))

Related

django admin site returns MultipleObjectsReturned exception with inspectdb imported legacy database and composite primary key

Using inspectdb, I have imported a legacy database, that contains entities with composite primary keys, in django . The database schema contains about 200 different entities and inspectdb is quite handy in that situation.
This is the schema in mysql:
CREATE TABLE `mymodel` (
`id` bigint(20) unsigned NOT NULL DEFAULT '0',
`siteid` bigint(20) unsigned NOT NULL DEFAULT '0',
...
PRIMARY KEY (`siteid`,`id`),
...
Following the autogenerated model in django (imported using python manager.py inspectdb)
class Mymodel(models.Model):
id = models.PositiveBigIntegerField()
siteid = models.PositiveBigIntegerField(primary_key=True)
...
class Meta:
managed = False
db_table = 'mymodel'
unique_together = (('siteid', 'id'),
I have registered all models in the admin site using the following approach:
from django.contrib import admin
from django.apps import apps
app = apps.get_app_config('appname')
for model_name, model in app.models.items():
admin.site.register(model)
After all the work is done, I navigate to the admin site and click on any object in the "mymodel" section and the following exception will be returned:
appname.models.Content.MultipleObjectsReturned: get() returned more than one Mymodel-- it returned more than 20!
Obviously, (this is what it seems to me at least) admin is using the siteid to get the object, tough it should use the unique_together from the Meta class.
Any suggestions how I can achieve to solve this with a general configuration and get the admin site module to query using the unique_together?
Yes you can solve this problem but you put a little more effort.
First you separate model-admin class for model Mymodel and customize model-admin class method:
Since django admin build change url in ChangeList class, So we can create a custom Changelist class like MymodelChangelist and pass id field value as a query params. We will use id field value to getting object.
Override get_object() method to use custom query for getting object from queryset
Override get_changelist() method of model-admin to set your custom Changelist class
Override save_model() method to save object explicitly.
admin.py
class MymodelChangelist(ChangeList):
# override changelist class
def url_for_result(self, result):
id = getattr(result, 'id')
pk = getattr(result, self.pk_attname)
url = reverse('admin:%s_%s_change' % (self.opts.app_label,
self.opts.model_name),
args=(quote(pk),),
current_app=self.model_admin.admin_site.name)
# Added `id` as query params to filter queryset to get unique object
url = url + "?id=" + str(id)
return url
#admin.register(Mymodel)
class MymodelAdmin(admin.ModelAdmin):
list_display = [
'id', 'siteid', 'other_model_fields'
]
def get_changelist(self, request, **kwargs):
"""
Return the ChangeList class for use on the changelist page.
"""
return MymodelChangelist
def get_object(self, request, object_id, from_field=None):
"""
Return an instance matching the field and value provided, the primary
key is used if no field is provided. Return ``None`` if no match is
found or the object_id fails validation.
"""
queryset = self.get_queryset(request)
model = queryset.model
field = model._meta.pk if from_field is None else model._meta.get_field(from_field)
try:
object_id = field.to_python(object_id)
# get id field value from query params
id = request.GET.get('id')
return queryset.get(**{'id': id, 'siteid': object_id})
except (model.DoesNotExist, ValidationError, ValueError):
return None
def save_model(self, request, obj, form, change):
cleaned_data = form.cleaned_data
if change:
id = cleaned_data.get('id')
siteid = cleaned_data.get('siteid')
other_fields = cleaned_data.get('other_fields')
self.model.objects.filter(id=id, siteid=siteid).update(other_fields=other_fields)
else:
obj.save()
Now you can update any objects and also add new object. But, On addition one case you can't add- siteid which is already added because of primary key validation

sqlalchemy relationship select from other table instead of insert

I'm having difficulties in relationships. I have users and roles and defined model and schema for them.
the problem is when I try to add a new user with a previously defined role (I have its ID and name)it will try to update/insert the role table by the value the user specifies. but I only want to select from roles and specify that as a user role and not updating the role table(if role not found return error).
what I want to achieve is how to limit SQLalchemy in updating related tables by the value that the user specifies.
here is my models:
class User(db.Model):
"""user model
"""
__tablename__ = 'user'
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, unique=True, nullable=False)
username = db.Column(db.String(40), unique=True, nullable=False)
password = db.Column(db.String(255), nullable=False)
role_id = db.Column(UUID(as_uuid=True), db.ForeignKey('role.id') , nullable=False)
class Role(db.Model):
"""role model
"""
__tablename__ = 'role'
id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, unique=True, nullable=False)
name = db.Column(db.String(40), unique=True, nullable=False)
perm_add = db.Column(db.Boolean, default=False)
perm_edit = db.Column(db.Boolean, default=False)
perm_del = db.Column(db.Boolean, default=False)
here is the schema that I defined:
class UserSchema(ma.SQLAlchemyAutoSchema):
password = ma.String(load_only=True, required=True)
email = ma.String(required=True)
role = fields.Nested("RoleSchema", only=("id", "name"), required=True)
class Meta:
model = User
sqla_session = db.session
load_instance = True
and I grab user input which is checked by schema and commit it to DB.
schema = UserSchema()
user = schema.load(request.json)
db.session.add(user)
try:
db.session.commit()
the point is here I could not change anything regarding role name or ID as it seems it is changed by schema even before applying to DB (I mean request.json)
In my example, I am using the additional webargs library. It facilitates validation on the server side and enables clean notation. Since marschmallow is based on webargs anyway, I think the addition makes sense.
I have based myself on your specifications. Depending on what you intend to do further, you may need to make adjustments.
I added a relationship to the user model to make the role easier to use.
class User(db.Model):
"""user model"""
# ...
# The role is mapped by sqlalchemy using the foreign key
# as an object and can be reached via a virtual relationship.
role = db.relationship('Role')
I have allowed the foreign key as a query parameter in the schema and limited the nested schema to the output. The email is assigned to the username.
class RoleSchema(ma.SQLAlchemyAutoSchema):
class Meta:
model = Role
load_instance = True
class UserSchema(ma.SQLAlchemyAutoSchema):
# The entry of the email is converted into a username.
username = ma.String(required=True, data_key='email')
password = ma.String(required=True, load_only=True)
# The foreign key is only used here for loading.
role_id = ma.Integer(required=True, load_only=True)
# The role is dumped with a query.
role = ma.Nested("RoleSchema", only=("id", "name"), dump_only=True)
class Meta:
model = User
load_instance = True
include_relationships = True
It is now possible to query the role from the database and react if it does not exist. The database table for the roles is no longer updated automatically.
from flask import abort
from sqlalchemy.exc import SQLAlchemyError
from webargs.flaskparser import use_args, use_kwargs
# A schema-compliant input is expected as JSON
# and passed as a parameter to the function.
#blueprint.route('/users/', methods=['POST'])
#use_args(UserSchema(), location='json')
def user_new(user):
# The role is queried from the database and assigned to the user object.
# If not available, 404 Not Found is returned.
user_role = Role.query.get_or_404(user.role_id)
user.role = user_role
# Now the user can be added to the database.
db.session.add(user)
try:
db.session.commit()
except SQLAlchemyError as exc:
# If an error occurs while adding to the database,
# 422 Unprocessable Entity is returned
db.session.rollback()
abort(422)
# Upon successful completion, the new user is returned
# with the status code 201 Created.
user_schema = UserSchema()
user_data = user_schema.dump(user)
return jsonify(data=user_data), 201

Python SQL Alchemy Multiple Databases - Binding Automap_Base

I am working through SQL Alchemy but struggling with how to structure the information from the docs into my project. I have two databases, my first will be used to store all new information from the python application. Where as the second database (DB1 in this case) is an existing database that I need to access information from. What is the right way to create this structure using SQLAlchemy?
I used the suggested BINDS method for multiple databases. This seems to be working.
class BaseConfig(object):
SECRET_KEY = "SO_SECURE"
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'mssql+pyodbc://sa:funpassword#(localdb)\MSSQLLocalDB/Testing?driver=SQL+Server+Native+Client+11.0'
SQLALCHEMY_BINDS = {
'DB1': 'mssql+pyodbc://sa:$funpassword#ProdDB/DB1?driver=SQL+Server+Native+Client+11.0'
}
SQLALCHEMY_TRACK_MODIFICATIONS = True
This configuration seems to work okay because I am able to create new models in both of these databases using the code below. (This was done to just confirm that I was able to connect to both). db is my SqlAlchemy(app) initialization in my index.py file.
from index import app, db
#Test Writing To Default DB
class Test(db.Model):
id = db.Column(db.Integer(), primary_key=True)
employeeNum = db.Column(db.String(255), unique=False)
job = db.Column(db.String(255))
def __init__(self, employeeNum, job):
self.employeeNum = employeeNum
self.job = job
# Test Writing To DB1
class Test(db.Model):
__bind_key__ = 'DB1'
id = db.Column(db.Integer(), primary_key=True)
employeeNum = db.Column(db.String(255), unique=False)
job = db.Column(db.String(255))
def __init__(self, employeeNum, job):
self.employeeNum = employeeNum
self.job = job
I have tried many combinations using the table and automap_base from SQLAlchemy docs but this does not seem to work. I am not sure how I can use the bind_key of DB1 when I am trying to map existing tables.
from index import app, db
def loadSession():
Table('employee', db.metadata, Column('emp_num', Text, primary_key=True))
Base = automap_base(metadata=metadata)
Base.prepare()
employee = Base.classes.employee
emp = db.session.query(employee).all()
for i in emp:
print(i.data)
Thanks for any help you can provide.
for your DB1 with existing schema & data you could use reflection to get the tables mapped into sqlalchemy.
For your example it would probably look something like this:
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import MetaData
from index import app, db
def loadSession():
# get a db engine object for DB1 configuration
engine = db.get_engine(bind='DB1')
# create a empty db Metadata object and bind it to DB1
database = MetaData(bind=engine)
# load the DB1 table/column structure into the Metadata object. This is quite a heavy operation and should optimally be done once on app setup and then the created automap_base can be stored / reused on demand until app shutdown
database.reflect(
bind=engine, views=True, autoload_replace=False
)
# create a python class structure out of the db metadata structure
auto_base = automap_base(metadata=database)
auto_base.prepare()
# create a db session for DB1
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
# get the reflected employees class from DB1. The employee table must exist in the database
employee = auto_base.classes.employee
# query DB1 with the new session object for all employee records
emp = db_session.query(employee).all()
for i in emp:
print(i.data)
# the following could also be substituted for the query without creating a session object:
for entry in employee.query.all():
print(entry)

django router forgets the DB while pointing to ForeignKey

My mobileapp is going to be used by different schools( with different db but same structure).Logged in user(parent) will be connected with multiple db(if their two child in different schools) using DynamicDbRouter. Now the problem is django router forgets the DB while pointing to ForeignKey.
views.py
class StudentFees(LoginRequiredMixin,TemplateView):
template_name = 'student_fees.html'
def get_context_data(self,**kwargs):
context = super(StudentFees,self).get_context_data(**kwargs)
schl_id=kwargs['schl_id']
school_id=School.objects.get(id=kwargs['schl_id'])
with in_database(school_id) :
context['classes'] = StudentSection.objects.filter(student_detail=kwargs['student_id'])
context['invoices'] = Invoice.objects.filter(student_master=kwargs['student_id'],status=1)
print context
return context
Here student_detail & student_master are foreign key fields.
routers.py
class in_database(object):
def __init__(self, client, read=True, write=False):
...
database = {'ENGINE':'django.db.backends.mysql','NAME':client.db_name,'USER':client.username,'PASSWORD':client.password,'HOST':client.host,'PORT':client.port}
...
Instead of pointing to client_db.model it points to default_db.model while accessing foreign keys.
I can able to see context details in console.. but values not redering in student_fees.html. It throws error like
ProgrammingError :
default_db.studentsection' doesn't exist

Django how to select foreign value in query

Having following models:
User(models.Model):
...
login = ...
Asset(models.Model):
user = models.ForeignKey(User)
...
How to select users login in Asset query using django QuerySet capabilities. For example:
Asset.objects.extra(select = {'user_login' : 'user__login'})
make to return query set with user_login field in each model object
Each Asset object already has a foreign key to the user. So you can always access -
asset = Asset.objects.get(pk=any_id)
if asset.user.login == 'some_value':
do_some_magic()
Please read the documentation.
Use .select_related('user') to select all assets and related users in a single query. Then simply access it through asset.user.login.
assets = Asset.objects.selec_related('user').filter(<any filter>)
for asset in assets:
# no additional queries here, as the user objects are preloaded into memory
print asset.user.login
I have found following solution:
Asset.object.extra( select = {'user_login' : '`%s.%s`' % (User._meta.db_table, 'login') } ).order_by('user__login')
The order_by expression is used to make JOIN on User's model table, than user's login can be accessed in SELECT expression within user_table.login