I'm a newbie in Sqlalchemy.
I have a table with multiple key in column USERNAME.
This is what I've done in my model.
Model:
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root#localhost/admin'
db = SQLAlchemy(app)
class RADUSAGE(db.Model):
__tablename__ = 'RADUSAGE'
USERNAME = db.Column(db.String(513))
AGE = db.Column(db.Integer)
def __init__(self, USERNAME, AGE):
self.USERNAME = USERNAME
self.AGE = AGE
def __repr__(self):
return '<RADUSAGE %r>' % self.USERNAME
But I got an error:
File "/Users/admin/rad_env/lib/python2.7/site-packages/sqlalchemy/ext/declarative/base.py", line 530, in map
**self.mapper_args
File "<string>", line 2, in mapper
File "/Users/admin/rad_env/lib/python2.7/site-packages/sqlalchemy/orm/mapper.py", line 677, in __init__
self._configure_pks()
File "/Users/admin/rad_env/lib/python2.7/site-packages/sqlalchemy/orm/mapper.py", line 1277, in _configure_pks
(self, self.mapped_table.description))
sqlalchemy.exc.ArgumentError: Mapper Mapper|RADUSAGE|RADUSAGE could not assemble any primary key columns for mapped table 'RADUSAGE'
How can I declare this table class that contains multiple key in sqlalchemy? Hope someone can help me. Thanks in advance.
You should be able to specify primary_key=true in your mapping for each PK column:
USERNAME = db.Column(db.String(513), primary_key=True)
AGE = db.Column(db.Integer, primary_key=True)
Did you try that?
Related
I started a project using PostgreSQL and SQLAlchemy. Since i'm not a experienced programmer(just started using classes) and also quite new to databases i noticed some workflows i don't really understand.
What i understand up till now from classes is the following workflow:
# filename.py
class ClassName():
def __init__(self):
# do something
def some_funcion(self, var1, var2):
# do something with parameters
---------------------------------------
# main.py
from filename import ClassName
par1 = ...
par2 = ...
a = ClassName()
b = a.some_function(par1, par2)
Now i am creating tables from classes:
# base.py
from sqlalchemy.orm import declarative_base
Base = declarative_base()
# tables.py
from base import Base
from sqlalchemy import Column
from sqlalchemy import Integer, String
class A(Base):
__tablename__ = "a"
a_id = Column(Integer, primary_key=True)
a_column = Column(String(30))
class B(Base):
__tablename__ = "b"
b_id = Column(Integer, primary_key=True)
b_column = Column(String(30))
and
import typing
from base import Base
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy.orm import sessionmaker
from tables import A, B
metadata_obj = MetaData()
def create_tables(engine):
session = sessionmaker()
session.configure(bind=engine)
Base.metadata.create_all(bind=engine)
a = Table("a", metadata_obj, autoload_with=engine)
b = Table("b", metadata_obj, autoload_with=engine)
return(a, b) # not sure return is needed
if __name__ == "__main__":
username = "username"
password = "AtPasswordHere!"
dbname = "dbname"
url = "postgresql://" + username + ":" + password + "#localhost/" + dbname
engine = create_engine(url, echo=True, future=True)
a, b = create_tables(engine)
Everything works fine in that it creates Table A and Table B in the database. The point i don't understand is the following:
Both my IDE(pyflake) and LGTM complain 'Tables. ... imported but not used'. (EDIT i understand why it complains in the way it is not the normal Class flow. It is mor about Why it is not the normal class workflow)
Is this normal behavior for this usecase? I only see examples that make use of the above workflow
Are there better methods to create the same results (but without the warnings)
If this is the normal behavior: Is there an explanation for this? I didn't read it anywhere.
Since the auto_increment setting in the MySQL is for the global, which cannot be set to a specific table?
I'm considering if it's possible to make the id auto increasing by 2 in the Model of Django?
models.py
class Video(models.Model):
name = model.CharField(max_length=100, default='')
upload_time = models.DateTimeField(blank=True, null=True)
def __str__(self):
return self.name
What should I do? Thanks for ur help.
You could do it my overriding save() method of your model as
from django.db.models import Max, F
class Video(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100, default='')
upload_time = models.DateTimeField(blank=True, null=True)
def save(self, *args, **kwargs):
if not self.pk:
max = Video.objects.aggregate(max=Max(F('id')))['max']
self.id = max + 2 if max else 1 # if the DB is empty
super().save(*args, **kwargs)
def __str__(self):
return self.name
the correct way is to change your mysql server settings
check this out: auto_increment_increment
Possible Solutions:
Assume I have a Model of Customer.
Customer.objects.order_by('primay_key_id ').last().primay_key_id + 2)
primay_key_id = models.IntegerField(default=(Customer.objects.order_by('primay_key_id ').last().primay_key_id + 2),primary_key=True)
or
from django.db import transaction
#Uncomment Lines for Django version less than 2.0
def save(self):
"Get last value of Code and Number from database, and increment before save"
#with transaction.atomic():
#top = Customer.objects.select_for_update(nowait=True).order_by('-customer_customerid')[0] #Ensures Lock on Database
top = Customer.objects.order_by('-id')[0]
self.id = top.id + 1
super(Customer, self).save()
The Above Code would not have a Concurrency Issue for Django 2.0 as:
As of Django 2.0, related rows are locked by default (not sure what the behaviour was before) and the rows to lock can be specified in the same style as select_related using the of parameter!
For Lower Versions, you need to be atomic!
or
from django.db import transaction
def increment():
with transaction.atomic():
ids = Customer.objects.all()
length = len(ids)-1
if(length<=0): #Changed to Handle empty Entries
return 1
else:
id = ids[length].customer_customerid
return id+2
or
from django.db import transaction
def increment():
with transaction.atomic():
return Customer.objects.select_for_update(nowait=True).order_by('-customer_customerid')[0] #Ensures Atomic Approach!
and set primary key in model to Integer Field and on every new entry primary_key_field=increment() Like
and then in your Models.py
set the Primary_Key to:
import increment()
primay_key_id = models.IntegerField(default=increment(),primary_key=True)
I get an error I don't understand when I do session commit after a deletion like this: (in a shell with flask app context or anywhere while running the app)
>>> from app.extensions import db
>>> from app.models.user import User
>>> user = User.query.all()[0]
>>> db.session.delete(user)
>>> db.session.commit()
File
"/Users/hugo/Dropbox/lahey/api/.venv/lib/python3.6/site-packages/sqlalchemy/util/langhelpers.py",
line 962, in module
% (self._il_path, self._il_addtl)) ImportError: importlater.resolve_all() hasn't been called (this is sqlalchemy.orm
strategy_options)
My model for the object I try to delete looks like this:
import datetime
from sqlalchemy_utils.types.password import PasswordType
from sqlalchemy_utils import force_auto_coercion
from app.extensions import db
# Setup coercion of passwords
force_auto_coercion()
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(120), unique=True, nullable=False)
password = db.Column(PasswordType(schemes=['pbkdf2_sha512']), nullable=False)
name = db.Column(db.String(256))
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
updated_at = db.Column(db.DateTime, onupdate=datetime.datetime.now)
Deleting objects of other models works fine. Could this somehow be because I'm using the PasswordType column from sqlalchemy_utils?
If you are using Flask, the docsting says you are wrong configuring the Column:
Lazy configuration of the type with Flask config:
import flask
from sqlalchemy_utils import PasswordType, force_auto_coercion
force_auto_coercion()
class User(db.Model):
__tablename__ = 'user'
password = db.Column(
PasswordType(
# The returned dictionary is forwarded to the CryptContext
onload=lambda **kwargs: dict(
schemes=flask.current_app.config['PASSWORD_SCHEMES'],
**kwargs
),
),
unique=False,
nullable=False,
)
I've worked out what caused this. I've been using package sqlalchemy_bulk_lazy_loader
which had a bug (strategy_options was not imported in a correct way). The issue is now fixed in the package
See full sqlalchemy mail list thread for full details
I was following this tutorial as a starting point to using models/controllers with python. Bear with me as I'm learning python.
I'm basically trying to get a json representation of my database table with key:value pairs as one would typically expect.
I can't seem to figure out the proper way of doing this.
My Model looks like this:
from app import db
import json
# define a base model for other database tables to inherit
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer,primary_key=True)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(db.DateTime, default=db.func.current_timestamp(), onupdate=db.func.current_timestamp())
# define a members model
class Member(Base):
__tablename__ = "members"
# member name
fname = db.Column(db.String(48), nullable=False)
sname = db.Column(db.String(48), nullable=False)
title = db.Column(db.String(90), nullable=True)
# new instance instantiation procedure
def __init__(self, fname, sname, title):
self.fname = fname
self.sname = sname
self.title = title
def __repr__(self):
# return '<Member %r>' % (self.fname)
# return '[{"id":self.id,"fname":self.fname,"sname":self.sname,"title":self.title}]'
return json.dumps(self.__dict__)
that will return an error: TypeError: datetime.datetime(2015, 2, 18, 11, 50, 1) is not JSON serializable
My Controller looks like the following:
# import flask dependencies
from flask import Blueprint, request, jsonify, Response
# import the database object from the main app module
from app import db
# import module models (i.e. Members)
from app.team_members.models import Member
# define the blueprint: 'member' set its url prefix: app.url/members
team_members = Blueprint('member',__name__,url_prefix='/members')
# set the route and accepted methods
#team_members.route('/list/',methods=['GET'])
def list():
members = Member.query.all()
return jsonify(dict(('member%d' % i, member) for i, member in enumerate(Member.query.all(), start=1)))
# resp = Response(response=members, status=200, mimetype="application/json")
# return resp
I understand that jsonify will not return a list, but a dictionary for security reasons. And If I understand correctly, I need __repr__ to return a string.
Any help would be much appreciated.
I want to create a new instance of an SQLAlchemy object, so that fields are filled with default values, but I want to commit that to the database generating an UPDATE to a row that already exists with the same primary key, effectively resetting it to the default values. Is there any simple way to do that?
I have tried to do that and failed, because SQLAlchemy session tracks state of objects. So there is no easy way to make session to track new object as persistent one.
But you want to reset object to default, do you? There is a simple way to do that:
from sqlalchemy.ext.declarative import declarative_base
class Base(object):
def reset(self):
for name, column in self.__class__.__table__.columns.items():
if column.default is not None:
setattr(self, name, column.default.execute())
Base = declarative_base(bind=engine, cls=Base)
This adds reset method to all your model classes.
Here is the complete working example to fiddle with:
import os
from datetime import datetime
from sqlalchemy import create_engine
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import functions
here = os.path.abspath(os.path.dirname(__file__))
engine = create_engine('sqlite:///%s/db.sqlite' % here, echo=True)
Session = sessionmaker(bind=engine)
class Base(object):
def reset(self):
for name, column in self.__class__.__table__.columns.items():
if column.default is not None:
setattr(self, name, column.default.execute())
Base = declarative_base(bind=engine, cls=Base)
class Thing(Base):
__tablename__ = 'things'
id = Column(Integer, primary_key=True)
value = Column(String(255), default='default')
ts1 = Column(DateTime, default=datetime.now)
ts2 = Column(DateTime, default=functions.now())
def __repr__(self):
return '<Thing(id={0.id!r}, value={0.value!r}, ' \
'ts1={0.ts1!r}, ts2={0.ts2!r})>'.format(self)
if __name__ == '__main__':
Base.metadata.drop_all()
Base.metadata.create_all()
print("---------------------------------------")
print("Create a new thing")
print("---------------------------------------")
session = Session()
thing = Thing(
value='some value',
ts1=datetime(2014, 1, 1),
ts2=datetime(2014, 2, 2),
)
session.add(thing)
session.commit()
session.close()
print("---------------------------------------")
print("Quering it from DB")
print("---------------------------------------")
session = Session()
thing = session.query(Thing).filter(Thing.id == 1).one()
print(thing)
session.close()
print("---------------------------------------")
print("Reset it to default")
print("---------------------------------------")
session = Session()
thing = session.query(Thing).filter(Thing.id == 1).one()
thing.reset()
session.commit()
session.close()
print("---------------------------------------")
print("Quering it from DB")
print("---------------------------------------")
session = Session()
thing = session.query(Thing).filter(Thing.id == 1).one()
print(thing)
session.close()
Is there any simple way to do that?
Upon further consideration, not really. The cleanest way will be to define your defaults in __init__. The constructor is never called when fetching objects from the DB, so it's perfectly safe. You can also use backend functions such as current_timestamp().
class MyObject(Base):
id = Column(sa.Integer, primary_key=True)
column1 = Column(sa.String)
column2 = Column(sa.Integer)
columnN = Column(sa.String)
updated = Column(sa.DateTime)
def __init__(self, **kwargs):
kwargs.setdefault('column1', 'default value')
kwargs.setdefault('column2', 123)
kwargs.setdefault('columnN', None)
kwargs.setdefault('updated', sa.func.current_timestamp())
super(MyObject, self).__init__(**kwargs)
default_obj = MyObject()
default_obj.id = old_id
session.merge(default_obj)
session.commit()