I am working on a task to remove the added pages to the postgresql database using sqlalchemy.
The page is getting deleted from the server but fails to get deleted from the database.
here's the function which deletes the page:
def delete_page(self, page_id, application):
# removed
removed_from_everyone = True
# fro campaign_id
for campaign_id in self.bandit_campaigns:
if page_id in self.bandit_pages[campaign_id].keys():
# If the page is active
if self.bandit_pages[campaign_id][page_id]:
removed_from_everyone = False
# check if the page exist adn if it's not used by anyone
if page_id in self.structure.keys() and removed_from_everyone:
del self.structure[page_id]
# for all the campaign
for campaign_id in self.bandit_campaigns:
# puts it in the new structure
del self.bandit_pages[campaign_id][page_id]
application.logger.info(f'page_id: {page_id}')
application.logger.info(f'type page_id: {type(page_id)}')
# remove arm
self.remove_arm(page_id)
application.logger.info(f'pages: {self.pages}')
# Backup of the situation, in this case save only the pages
# pickle.dump(self.structure, open('structure.pickle', "wb"))
# this one store the last know status
# pickle.dump(self.bandit_pages, open('bandit_pages.pickle', "wb"))
try:
pg = Structure.query.filter_by(page_url=page_id)
db.session.delete(pg)
bp = Bandit_pages.query.filter_by(campaign_id=campaign_id)
db.session.delete(bp)
db.session.commit()
except Exception as e:
print("exception in new page deletion", e)
db.session.rollback()
here's the code of Structure and Bandit_page tables creation:
class Structure(db.Model):
__tablename__ = 'structure'
arm_id = db.Column(db.Integer, primary_key=True)
page_url = db.Column(db.String())
def __init__(self, arm_id,page_url):
self.arm_id = arm_id
self.page_url = page_url
class Bandit_pages(db.Model):
__tablename__ = 'bandit_pages'
campaign_id = db.Column(db.String())
arm_id = db.Column(db.Integer)
status = db.Column(db.Boolean, default=False)
__table_args__ = (
PrimaryKeyConstraint('campaign_id', 'arm_id'),
{},)
def __init__(self, campaign_id, arm_id, status):
self.campaign_id = campaign_id
self.arm_id = arm_id
self.status = status
I tried a way to delete them by using a for loop and then deleting it but that didn't help.
Also the function to add pages is similar to the delete page function,so I am unclear where I am making a mistake. Please help me out. Thanks!
You Can Try these Commands ->
db.session.delete(me)
db.session.commit()
Your code is complicated: you use the same loop a few times with the same variable name.
As a result of this you try to remove only the last value from self.bandit_campaigns:
bp = Bandit_pages.query.filter_by(campaign_id=campaign_id)
db.session.delete(bp)
db.session.commit()
Also, I'm not sure that the keys of self.self.bandit_campaigns are you string ids, check this too.
For deleting all elements from self.bandit_campaigns u can use:
db.session.query(Bandit_pages).filter(Bandit_pages.campaign_id.in_(your_list_of_values)).delete()
So I solved the question by myself,Found that it was kind of a trivial mistake which was overlooked.
try:
Structure.query.filter_by(page_url='ml3', arm_id=1).delete() # .all()
#db.session.delete(pg)
Bandit_pages.query.filter_by(campaign_id='96879533', arm_id=1).delete() # .all()
#db.session.delete(bp)
db.session.commit()
db.session.expire_all()
except Exception as e:
print("exception in new page deletion", e)
db.session.rollback()
this piece of code(I changed a few lines) worked fine.
Related
I was testing Alembic.
Initially I created a model something like this:
from main import Base
from sqlalchemy import Column, BigInteger, SmallInteger, String, Sequence, ForeignKey
class Users(Base):
__tablename__ = "users"
id = Column(BigInteger, Sequence("user_id_seq"),
primary_key=True)
first_name = Column(String(50))
last_name = Column(String(50))
email = Column(String(255))
password = Column(String(60), nullable=True)
Then I created the revision in alembic and it worked absolutely fine and I got the result properly.
Then I added the user types table and then my models looked like this,
from main import Base
from sqlalchemy import Column, BigInteger, SmallInteger, String, Sequence, ForeignKey
class Users(Base):
__tablename__ = "users"
id = Column(BigInteger, Sequence("user_id_seq"),
primary_key=True)
first_name = Column(String(50))
last_name = Column(String(50))
email = Column(String(255))
password = Column(String(60), nullable=True)
user_type = Column(SmallInteger, ForeignKey(
"user_types.id", name="fk_user_type"))
class UserTypes(Base):
__tablename__ = "user_types"
id = Column(SmallInteger, Sequence("user_types_id_seq"),
primary_key=True)
type = Column(String(20))
Now I created the revision for this and obviously that also worked.
But then I thought that lets make the user_type default value 1. So I did a small change in Users model and added default value 1:
user_type = Column(SmallInteger, ForeignKey(
"user_types.id", name="fk_user_type"), default=1)
Now ideally if I created migration it should work. But it gave me the blank file:
"""Made Default Value Of user_type 1
Revision ID: 054b79123431
Revises: 84bc1adb3e66
Create Date: 2022-12-28 17:20:06.757224
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '054b79123431'
down_revision = '84bc1adb3e66'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
I also tried to add compare_server_default=True in the context.configure calls in both offline and online migration function as it was said in an answer I found on internet which was related to the same issue but that also didnt worked. Here is the link.
So if anyone knows the solution for this please tell me I would really be thankful to you!
I'm using windows7 and MySQL8.0. I've tried to edit the my.ini by stopping the service first. First of all, if I tried to replace my.ini with secure_file_priv = "",it was saying access denied. So, I simply saved it with 'my1.ini' then deleted the my.ini' and again renamed 'my1.ini' to 'my.ini'. Now when I try to start the MySQL80 service from administrative tools>Services, I am unable to start it again. Even I've tried this from the CLI client, but it raises the issue of secure_file_priv. How do I do it? I've been able to store the scraped data into MySQL database using Scrapy,but not able to export it to my project directory.
#pipelines.py
from itemadapter import ItemAdapter
import mysql.connector
class QuotewebcrawlerPipeline(object):
def __init__(self):
self.create_connection()
self.create_table()
#self.dump_database()
def create_connection(self):
"""
This method will create the database connection & the cusror object
"""
self.conn = mysql.connector.connect(host = 'localhost',
user = 'root',
passwd = 'Pxxxx',
database = 'itemcontainer'
)
self.cursor = self.conn.cursor()
def create_table(self):
self.cursor.execute(""" DROP TABLE IF EXISTS my_table""")
self.cursor.execute(""" CREATE TABLE my_table (
Quote text,
Author text,
Tag text)"""
)
def process_item(self, item, spider):
#print(item['quote'])
self.store_db(item)
return item
def store_db(self,item):
"""
This method is used to write the scraped data from item container into the database
"""
#pass
self.cursor.execute(""" INSERT INTO my_table VALUES(%s,%s,%s)""",(item['quote'][0],item['author'][0],
item['tag'][0])
)
self.conn.commit()
#self.dump_database()
# def dump_database(self):
# self.cursor.execute("""USE itemcontainer;SELECT * from my_table INTO OUTFILE 'quotes.txt'""",
# multi = True
# )
# print("Data saved to output file")
#item_container.py
import scrapy
from ..items import QuotewebcrawlerItem
class ItemContainer(scrapy.Spider):
name = 'itemcontainer'
start_urls = [
"http://quotes.toscrape.com/"
]
def parse(self,response):
items = QuotewebcrawlerItem()
all_div_quotes = response.css("div.quote")
for quotes in all_div_quotes:
quote = quotes.css(".text::text").extract()
author = quotes.css(".author::text").extract()
tag = quotes.css(".tag::text").extract()
items['quote'] = quote
items['author'] = author
items['tag'] = tag
yield items
Since the auto_increment setting in the MySQL is for the global, which cannot be set to a specific table?
I'm considering if it's possible to make the id auto increasing by 2 in the Model of Django?
models.py
class Video(models.Model):
name = model.CharField(max_length=100, default='')
upload_time = models.DateTimeField(blank=True, null=True)
def __str__(self):
return self.name
What should I do? Thanks for ur help.
You could do it my overriding save() method of your model as
from django.db.models import Max, F
class Video(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100, default='')
upload_time = models.DateTimeField(blank=True, null=True)
def save(self, *args, **kwargs):
if not self.pk:
max = Video.objects.aggregate(max=Max(F('id')))['max']
self.id = max + 2 if max else 1 # if the DB is empty
super().save(*args, **kwargs)
def __str__(self):
return self.name
the correct way is to change your mysql server settings
check this out: auto_increment_increment
Possible Solutions:
Assume I have a Model of Customer.
Customer.objects.order_by('primay_key_id ').last().primay_key_id + 2)
primay_key_id = models.IntegerField(default=(Customer.objects.order_by('primay_key_id ').last().primay_key_id + 2),primary_key=True)
or
from django.db import transaction
#Uncomment Lines for Django version less than 2.0
def save(self):
"Get last value of Code and Number from database, and increment before save"
#with transaction.atomic():
#top = Customer.objects.select_for_update(nowait=True).order_by('-customer_customerid')[0] #Ensures Lock on Database
top = Customer.objects.order_by('-id')[0]
self.id = top.id + 1
super(Customer, self).save()
The Above Code would not have a Concurrency Issue for Django 2.0 as:
As of Django 2.0, related rows are locked by default (not sure what the behaviour was before) and the rows to lock can be specified in the same style as select_related using the of parameter!
For Lower Versions, you need to be atomic!
or
from django.db import transaction
def increment():
with transaction.atomic():
ids = Customer.objects.all()
length = len(ids)-1
if(length<=0): #Changed to Handle empty Entries
return 1
else:
id = ids[length].customer_customerid
return id+2
or
from django.db import transaction
def increment():
with transaction.atomic():
return Customer.objects.select_for_update(nowait=True).order_by('-customer_customerid')[0] #Ensures Atomic Approach!
and set primary key in model to Integer Field and on every new entry primary_key_field=increment() Like
and then in your Models.py
set the Primary_Key to:
import increment()
primay_key_id = models.IntegerField(default=increment(),primary_key=True)
i have two tables, one is MeasuredController and MeasuredGrid, and there is no relation like foreign key or manytoman etc , and in admin i have to show two fields of MeasuredGrid i.e power and status, where MeasuredController's senddate = MeasuredGrid's senddate, in two different column, i have wrote code like below, but in the current code , the database will be hit two time for each object, so is there a way like select related or use cache concept ?
list_display = ("grid_status", "grid_power")
def grid_status(self, obj):
STATUS_CHOICES = {0:"Outage", 1:"No Outage" }
mobj = MeasuredGrid.objects.filter(senddate=obj.senddate).latest("senddate")
try:
return STATUS_CHOICES[int(mobj.status)], 2
except:
pass
grid_status.short_description = 'Grid Status'
def grid_power(self, obj):
mobj = MeasuredGrid.objects.filter(senddate=obj.senddate).latest("senddate")
return mobj.power
grid_power.short_description = 'Grid Power[W]'
You can use Cache Framework. It is very easy:
# coding: utf-8
from django.core.cache import cache
from django.contrib import admin
from .models import MeasuredController, MeasuredGrid
class MeasuredControllerAdmin(admin.ModelAdmin):
list_display = ("grid_status", "grid_power")
STATUS_CHOICES = {
0: "Outage",
1: "No Outage",
}
def grid_status(self, obj):
mobj = self._get_mobj_data(obj)
return mobj['status']
grid_status.short_description = 'Grid Status'
def grid_power(self, obj):
mobj = self._get_mobj_data(obj)
return mobj['power']
grid_power.short_description = 'Grid Power[W]'
def _get_mobj_data(self, obj):
"""Get a relevant MeasuredGrid object for a given MeasuredController"""
data = cache.get('mobj_%s' % obj.pk)
if data is not None:
return data
mobj = MeasuredGrid.objects.filter(senddate=obj.senddate).latest("senddate")
status = None
try:
status = self.STATUS_CHOICES[int(mobj.status)], 2
except: # <---------- Not the best decision. You probably need ValueError or KeyError
pass
data = {
"id": mobj.pk,
"power": mobj.power,
"status": status,
}
cache.set('mobj_%s' % obj.pk, data) # the default timeout is 300 seconds
return data
NB: the default cache backend is django.core.cache.backends.locmem.LocMemCache, so the cache will work even in dev environment (ie. DEBUG = True).
Recently I've got a SQLAlchemy InvalidRequestError.
The error log shows:
InvalidRequestError: Transaction <sqlalchemy.orm.session.SessionTransaction object at
0x106830dd0> is not on the active transaction list
In what circumstance this error will be raised???
-----Edit----
# the following two line actually in my decorator
s = Session()
s.add(model1)
# refer to <http://techspot.zzzeek.org/2012/01/11/django-style-database-routers-in-sqlalchemy/>
s2 = Session().using_bind('master')
model2 = s2.query(Model2).with_lockmode('update').get(1)
model2.somecolumn = 'new'
s2.commit()
This exception is raised
-----Edit2 -----
s = Session().using_bind('master')
model = Model(user_id=123456)
s.add(model)
s.flush()
# here, raise the exception.
# I add log in get_bind() of RoutingSession. when doing 'flush', the _name is None, and it returns engines['slave'].
#If I use commit() instead of flush(), then it commits successfully
I change the using_bind method as the following and it works well.
def using_bind(self, name):
self._name = name
return self
The previous RoutingSession:
class RoutingSession(Session):
_name = None
def get_bind(self, mapper=None, clause=None):
logger.info(self._name)
if self._name:
return engines[self._name]
elif self._flushing:
logger.info('master')
return engines['master']
else:
logger.info('slave')
return engines['slave']
def using_bind(self, name):
s = RoutingSession()
vars(s).update(vars(self))
s._name = name
return s
that's an internal assertion which should never occur. There's no way to answer this question without at least a full stack trace, if perhaps you are improperly using the Session in a concurrent fashion, or manipulating its internals. I can only show that exception raised if I manipulate private methods or state pertaining to the Session object.
Here's that:
from sqlalchemy.orm import Session
s = Session()
s2 = Session()
t = s.transaction
t2 = s2.transaction
s2.transaction = t # nonsensical assignment of the SessionTransaction
# from one Session to also be referred to by another,
# corrupts the transaction chain by leaving out "t2".
# ".transaction" should never be assigned to on the outside
t2.rollback() # triggers the assertion case
basically, the above should never happen, since you're not supposed to assign to ".transaction". that's a read-only attribute.