Python SQL Alchemy Multiple Databases - Binding Automap_Base - sqlalchemy

I am working through SQL Alchemy but struggling with how to structure the information from the docs into my project. I have two databases, my first will be used to store all new information from the python application. Where as the second database (DB1 in this case) is an existing database that I need to access information from. What is the right way to create this structure using SQLAlchemy?
I used the suggested BINDS method for multiple databases. This seems to be working.
class BaseConfig(object):
SECRET_KEY = "SO_SECURE"
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'mssql+pyodbc://sa:funpassword#(localdb)\MSSQLLocalDB/Testing?driver=SQL+Server+Native+Client+11.0'
SQLALCHEMY_BINDS = {
'DB1': 'mssql+pyodbc://sa:$funpassword#ProdDB/DB1?driver=SQL+Server+Native+Client+11.0'
}
SQLALCHEMY_TRACK_MODIFICATIONS = True
This configuration seems to work okay because I am able to create new models in both of these databases using the code below. (This was done to just confirm that I was able to connect to both). db is my SqlAlchemy(app) initialization in my index.py file.
from index import app, db
#Test Writing To Default DB
class Test(db.Model):
id = db.Column(db.Integer(), primary_key=True)
employeeNum = db.Column(db.String(255), unique=False)
job = db.Column(db.String(255))
def __init__(self, employeeNum, job):
self.employeeNum = employeeNum
self.job = job
# Test Writing To DB1
class Test(db.Model):
__bind_key__ = 'DB1'
id = db.Column(db.Integer(), primary_key=True)
employeeNum = db.Column(db.String(255), unique=False)
job = db.Column(db.String(255))
def __init__(self, employeeNum, job):
self.employeeNum = employeeNum
self.job = job
I have tried many combinations using the table and automap_base from SQLAlchemy docs but this does not seem to work. I am not sure how I can use the bind_key of DB1 when I am trying to map existing tables.
from index import app, db
def loadSession():
Table('employee', db.metadata, Column('emp_num', Text, primary_key=True))
Base = automap_base(metadata=metadata)
Base.prepare()
employee = Base.classes.employee
emp = db.session.query(employee).all()
for i in emp:
print(i.data)
Thanks for any help you can provide.

for your DB1 with existing schema & data you could use reflection to get the tables mapped into sqlalchemy.
For your example it would probably look something like this:
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import MetaData
from index import app, db
def loadSession():
# get a db engine object for DB1 configuration
engine = db.get_engine(bind='DB1')
# create a empty db Metadata object and bind it to DB1
database = MetaData(bind=engine)
# load the DB1 table/column structure into the Metadata object. This is quite a heavy operation and should optimally be done once on app setup and then the created automap_base can be stored / reused on demand until app shutdown
database.reflect(
bind=engine, views=True, autoload_replace=False
)
# create a python class structure out of the db metadata structure
auto_base = automap_base(metadata=database)
auto_base.prepare()
# create a db session for DB1
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
# get the reflected employees class from DB1. The employee table must exist in the database
employee = auto_base.classes.employee
# query DB1 with the new session object for all employee records
emp = db_session.query(employee).all()
for i in emp:
print(i.data)
# the following could also be substituted for the query without creating a session object:
for entry in employee.query.all():
print(entry)

Related

SQLALCHEMY CONNECTION TWO DATABASE PROBLEM

Hello I have little problem,So need Help to connect two tables in database. I used ORM in python and I know how to connect it in main.py file.But I decided to make one file per table,so when i want to use foreign key it say's error. Here is the code which i used for connection two tables:
engine = create_engine('mysql+pymysql://root:#localhost/popis?charset=utf8mb4', echo=False)
Session = sessionmaker(bind=engine)
session = Session()
Base = declarative_base()
class Stanovnik(Base):
__tablename__ = "stanovnik"
id = Column(Integer, primary_key=True)
ime = Column(String(20))
prezime = Column(String(50))
ratni_staz = Column(Integer)
godine = Column(Integer)
broj_clanova=Column(Integer)
sifra_adrese = Column(Integer,ForeignKey(adresa.sifra_adrese))
sifra_zene = Column(Integer,ForeignKey(zena.sifra_zene))
sifra_ostali = Column(Integer,ForeignKey(ostali.sifra_ostali))
Base.metadata.create_all(engine)
I tried every method for sql but one thing is missing I think.

SQLAlchemy Async: How to synchronise the columns of a declarative model with its underlying table after metadata reflection?

I have also asked this question in sqlalchemy discussion group.
I am using alembic to apply a migration for a postgresql view using an async engine. This is successfully applied to the database.
I have the following declarative mapped class to the view, defined as:
class MailingListView(Base):
"""View for mailing labels.
After metata reflection from db -> model expecting columns to
be available on this class.
"""
__tablename__ = "mailing_list_view"
# Specify the column override from the underlying view that is the primary key
id = Column(UUID(as_uuid=True), primary_key=True)
# Expecting these columns below to be mapped in this class after
# metadata reflection. Currently have to uncomment these
# to manually synchronise with view!
#
# addressee = Column(String)
# street = Column(String)
# town = Column(String)
# county = Column(String)
# postcode = Column(String)
# cursor = Column(String)
I am reflecting the views using the following:
def use_inspector(conn):
inspector = inspect(conn)
return inspector.get_view_names()
views = await connection.run_sync(use_inspector)
# I can see the table columns in __table__.c.keys()
# after the reflection below has run
await connection.run_sync(
target_metadata.reflect,
only=views,
views=True,
extend_existing=True,
)
After applying migrations and performing the above reflection I can see that my mapped model has the underlying table columns updated with those defined in the underlying view.
obj = MailingListView()
obj.__table__.c.keys()
However, the properties of my mapped class are not updated after reflection, raising an exception:
obj = MailingListView()
obj.town = "town" # this raises an exception with unavailable property
How is it possible for a postgresql db (asyncpg) + async sqlalchemy to:
Synchronise the columns of a declarative model with its underlying table after metadata reflection?
Currently, I have to manually specify the columns in the declarative model.

Error creating tables in MySQL using peewee and FastAPI

I have below basic code where I am trying to create tables in food_delivery database using peewee ORM. I am receiving error as below:
AttributeError: 'str' object has no attribute 'safe_create_index'
My Code below:
DATABASE = 'food_delivery'
db = MySQLDatabase(
host='localhost',
user='my_user',
password='****',
database=DATABASE
)
class BaseModel(Model):
class Meta:
database = DATABASE
class Customer(BaseModel):
city = CharField()
customer = AutoField(column_name='customer_id')
email = CharField(column_name='email_id',unique=True)
first_name = CharField()
landmark = CharField()
last_name = CharField()
password = CharField()
phone_no = CharField(max_length=10)
pincode = IntegerField()
state = CharField()
class Meta:
table_name = 'customer'
database = DATABASE
def create_tables():
with db:
db.create_tables([Customer])
create_tables()
Can someone suggest what might be wrong with code?
Thanks!
I kept looking for issue and found that I had a logical syntax misplacement basically. Instead of database name under all the table definition in META, I actually had to pass connection instance of database instead of database name. But I wonder how inappropriate the thrown error is and its kind of impossible to fi seeing the error details. This requires much improvement by library maintainers.

insert_from over two databases

I try to move all rows from one database table (source) to another database (target). The source-DB is a local database while target-DB runs on another machine. I want to transfer rows between the two databases and found the Model.insert_from() method for that task. Unfortunately it does nothing and I can't find any reason for that.
The database model is:
databaseSource = MySQLDatabase('sourceDB', **{'host': 'localhost', 'user': 'local', 'password': ''})
databaseTarget = MySQLDatabase('targetDB', **{'host': 'externalserver', 'user': 'external', 'password': ''})
class BaseModelSource(Model):
class Meta:
database = databaseSource
class BaseModelTarget(Model):
class Meta:
database = databaseTarget
class UsersSource(BaseModelSource):
crdate = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
description = TextField()
firstName = CharField(column_name='first_name')
class Meta:
table_name = 'users'
class UsersTarget(BaseModelTarget):
crdate = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
description = TextField()
firstName = CharField(column_name='first_name')
class Meta:
table_name = 'users'
With that my task should run with:
import peewee
from dbmodels import *
dataQuery = UsersSource.select(
UsersSource.crdate,
UsersSource.description,
UsersSource.firstName)
insertQuery = UsersTarget.insert_from(dataQuery,[
UsersTarget.crdate,
UsersTarget.description,
UsersTarget.firstName]).execute()
The resulting MySQL-query is this and as you can see, the selected data is empty []:
('INSERT INTO `users` (`crdate`, `description`, `first_name`) SELECT `t1`.`crdate`, `t1`.`description`, `t1`.`first_name` FROM `users` AS `t1`', [])
When I run the SELECT query on my table it outputs:
SELECT `t1`.`crdate`, `t1`.`description`, `t1`.`first_name` FROM `users` AS `t1`;
2018-08-12 16:50:36 valid Heinz
2018-08-12 19:34:45 valid Hilde
2018-08-12 19:33:31 invalid Paul
I searched like hell but didn't find any hint, why my result is empty.
Does anybody know more or a better method?
Peewee cannot insert data between two different database servers/connections. If both databases are on the same server, however, you can use the "schema" Meta option to reference each database from a single connection, and do the INSERT FROM that way:
db = MySQLDatabase('my_db')
class UsersSource(Model):
crdate = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
description = TextField()
firstName = CharField(column_name='first_name')
class Meta:
database = db
schema = 'source_db_name'
table_name = 'users'
class UsersTarget(Model):
crdate = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
description = TextField()
firstName = CharField(column_name='first_name')
class Meta:
database = db
schema = 'dest_db_name'
table_name = 'users'
If the databases are on different servers, then you have no other option but to dump it and reload it.
If this is a one-off operation I recommend using mysqldump and then copying the file to the remote server and sourcing the dumped files with the mysql client.
If this needs to be a continual process look at MySQL replication.
To batch transfer it you'll need to iterate over the result set of the SELECT and put this into the INSERT statement as they are on different servers.

Persist one object from one database to another using sqlalchemy

I have two databases (both Mysql) that have exactly the same tables, and I want to copy some data from one to another using Sqlalchemy.
I can copy simple objects following the answer given in this question:
Cannot move object from one database to another
The problem is when the object has dependencies from another table, and I want to copy the dependencies as well.
So to make it more clear, this is my model (the same for both databases but using a different bind_key that points to a different database):
db1 = SQLAlchemy()
Class Payment(db.Model):
__tablename__ = 'payments'
__bind_key__ = 'db1'
id = db1.Column(db.Integer, primary_key=True)
paymethod_id = db1.Column(db.Integer(), db1.ForeignKey(PaymentMethod.id))
payment_method = db1.relationship(PaymentMethod)
What I would like to do is the following:
from models1 import Payment as Payment1
from models2 import Payment as Payment2
# query from one database
payment1 = db1.session.query(Payment1).first()
# create and add it to the other database
payment2 = Payment2(**payment1.__dict__.copy())
db2.session.add(payment)
db2.session.commit()
But in this case the foreign key fails because I don't have the PaymentMethod stored yet.
Is there a different approach to do that or I would have to do this procedure for every dependency of my object and be sure that I store the children beforehand?
Any help is appreciated :)
I came up with a solution that remaps the object to the right model and stores all its children. You call the method save_obj and pass the object you want to map. It will then retrieve a table with the same name but then from the model you want to remap the object to and it will recursively do the same for all its children. You have to define the right model in the method get_model.
To run this is necessary to disable autoflush to prevent committing before the object is correctly formed and it is also necessary to commit after calling the method. I'm using flask-sqlalchemy.
Hope this can help or give some insight to someone that faces a similar problem :)
def save_obj(obj, checked=[]):
if obj in checked:
# if the object was already converted, retrieve the right object
model = get_model(obj.__mapper__.mapped_table.name)
return get_obj(obj, model)
checked.append(obj)
children = []
relations = obj.__mapper__.relationships.items()
# get all the relationships of this model (foreign keys)
for relation in relations:
model = get_model(relation[1].table.name)
if model:
# remove the cascade option for this object, so the children are not stored automatically in the session
relation[1]._cascade = CascadeOptions('')
child = getattr(obj, relation[0])
if not child:
continue
# if the child is a set of children
if isinstance(child, list):
new_children = []
for ch in copy(child):
# convert the child
new_child = save_obj(ch, checked)
new_children.append(new_child)
children.append((relation[0], new_children))
else:
new_child = save_obj(child, checked)
children.append((relation[0], new_child))
# get the model of the object passed
model = get_model(obj.__mapper__.mapped_table.name)
new_obj = get_obj(obj, model)
# set all the children in this object
for child in children:
if child[1]:
setattr(new_obj, child[0], child[1])
checked.append(new_obj)
session.add(new_obj)
return new_obj
def get_model(table_name):
# get the right model for this object
for table in db.Model._decl_class_registry.values():
if hasattr(table, '__tablename__') and table.__tablename__ == table_name:
return table
return None
def create_new_obj(obj, model):
params = obj.__dict__.copy()
params.pop('_sa_instance_state')
return model(**params)
def get_obj(child, model):
# check if the object is already stored in the db
child_in_db = session.query(model).get(child.id)
if child_in_db:
return child_in_db
# check if the object is already in the session
for s in session.new:
if type(s) == model and s.id == child.id:
return s
return create_new_obj(child, model)