A field has been added to one of my MySQL table previously:
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-09-14 00:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('my_app', '0102_previous_migrations'),
]
operations = [
migrations.AddField(
model_name='my_client',
name='my_team',
field=models.CharField(choices=[('Unassigned', 'Unassigned'), ('ACT', 'ACT'), ('Korea', 'Korea'), ('National', 'National')], default='Unassigned', max_length=255, verbose_name='My Team'),
),
]
So users have a choices of the above selection in my UI and will be saved into the table my_client:
Unassigned
ACT
Korea
National
The changes have been deployed and now a beginner like me would like to change, i.e. remove Korea and add 2 new choices: NSW and SA
How would I go about this? Do I need another migration or I will just need to change those choices in the models?
I use this in my model now like this:
class Client(MyAppModel):
TEAM_CHOICES = (
('Unassigned', 'Unassigned'),
('ACT', 'ACT'),
('Korea', 'Korea'),
('National', 'National'),
)
DEFAULT_TEAM = 'Unassigned'
my_team = models.CharField(verbose_name='MyTeam', null=False, max_length=255, choices=TEAM_CHOICES, default=DEFAULT_TEAM)
Update:
Base on the comment I have, I will need a migration too, a AlterField?
Do I also have to update all the existing value right now in the table my_client if any of them is having Korea to say Unassigned if I would like to remove Korea as a choice? What command can I use in the migration?
A high-level approach:
Create a migration which only adds the new choice. Write a method to take all models with the old choice, and give them a proper new choice migration.RunPython. Create a migration which removes the old choice.
Figure this out later, hope it helps others beginner like me.
I have to update the Client model
class Client(MyAppModel):
TEAM_CHOICES = (
('Unassigned', 'Unassigned'),
('National', 'National'),
('NSW', 'NSW'),
('SA', 'SA'),
)
Then enter this command:
python manage.py makemigrations
A migration will be generated:
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('my_app', '0102_previous_migrations'),
]
operations = [
migrations.AlterField(
model_name='my_client',
name='my_team',
field=models.CharField(choices=[('Unassigned', 'Unassigned'), ('ACT', 'ACT'), ('National', 'National'), ('NSW', 'NSW'), ('SA', 'SA')], default='Unassigned', max_length=255, verbose_name='My Team'),
),
]
Next to update all the existing values in the table now, referred here, enter this command to create an empty migration file:
python manage.py makemigrations --empty my_app
In the new migration file, put something like:
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2017-10-26 06:36
from __future__ import unicode_literals
from django.db import migrations
def set_unassigned(apps, schema_editor):
Client = apps.get_model('my_app', 'my_client')
for client in Client.objects.all():
client.account_team = 'Unassigned'
client.save()
class Migration(migrations.Migration):
dependencies = [
('my_app', '0104_client_team_update'),
]
operations = [
migrations.RunPython(set_unassigned),
]
Related
model.py:
from django.db import models
from datetime import datetime
from django.db.models import TextField, JSONField, Model
# Create your models here.
class reservation(models.Model):
res=models.JSONField()
da = models.DateTimeField(default=datetime.now, blank=True)
tasks.py:
#shared_task
def ress():
content={
"customer": 48,
"reservation_id_pms": str(id),
"reservation_channel_number": None,
"reservation_group_id_pms": "ed2b9d55-46d9-4471-a1e9-ad6c00e30661",
"extra_reservation_code": "550ca1c1",
}
reservations=reservation.objects.create(res=content)
reservations.save()
res.append(content)
return None
error:
from django.db.models import TextField, JSONField, Model ImportError:
cannot import name 'JSONField' from 'django.db.models'
(/usr/lib/python3/dist-packages/django/db/models/init.py)
It is possible that you are using lower versions of Django (< 1.9) when JSONField was not introduced. So try upgrading the version.
If this is not the case, then try installing simplejson library as sometimes the library may not come with Django itself.
pip install simplejson
This is a continuation of this question.
As my flask app should not write anything in my database, I set up Flask-SQLAlchemy to reflect my database. This way I do not have to change my models, when I change my schema:
# app/__init__.py
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app():
app = Flask(__name__)
db.init_app(app)
with app.app_context():
db.Model.metadata.reflect(db.engine)
# app/models.py
from app import db
class Data(db.Model):
__table__ = db.Model.metadata.tables['data']
But now, if I have to import the Model before I created the app, I run into Errors because the metadata is not set yet. This is a problem when it comes to testing for example:
# test.py
import unittest
from app import create_app, db
from app.models import Data
class TestGUI(unittest.TestCase):
#classmethod
def setUpClass(cls):
cls.app = create_app()
# etc ...
This throws KeyError: 'data' in __table__ = db.Model.metadata.tables['data'] when importing from app.models as the metadata is not correctly set before the create_app() function is run.
I did find a solution (thanks to #snakecharmerb). The solution is simply to avoid the problem, to not import app.models before running create_app(). A bit hacky, so feel free to post an answer as well, when you have a better solution.
My test file now looks like this:
# test.py
import unittest
from app import create_app, db
app = create_app()
from app.models import Data
class TestGUI(unittest.TestCase):
#classmethod
def setUpClass(cls):
cls.app = app
# etc ...
I'm trying to connect the alembic library to the databases and sqlalchemy libraries. As a guide, I use this example link
My projects file:
db.py
from databases import Database
from sqlalchemy import MetaData, create_engine
DATABASE_URL = "postgresql://....#localhost:5432/db"
engine = create_engine(DATABASE_URL)
metadata = MetaData()
database = Database(DATABASE_URL)
models.py
from sqlalchemy import Table, Column, Integer, String, DateTime
from sqlalchemy.sql import func
from db import metadata
notes = Table(
"notes",
metadata,
Column("id", Integer, primary_key=True),
Column("title", String(50)),
Column("description", String(50)),
Column("created_date", DateTime, default=func.now(), nullable=False),
)
env.py (alembic settings)
from db import DATABASE_URL, metadata
....
#add new
target_metadata = metadata
...
#change
def run_migrations_online():
config.set_main_option('sqlalchemy.url', str(DATABASE_URL))
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
When I run
alembic revision --autogenerate -m 'Add notest table'
the new file at migrations/versions this context is created
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
I suppose it may be related to the use of the target_metadata = metadata variable. It seems to be all according to instructions, but the migrations do not work as expected.
If anyone has a similar problem. All you have to do is import the tables from models.py into the env.py file before metadata object.
env.py
...
from models.notes import notes
from db import DATABASE_URL, metadata
...
I'm making a pipeline in scrapy to store scraped data in a mysql database. When the spider is run in terminal it works perfectly. Even the pipeline is opened. However the data is not being sent to the database. Any help appreciated! :)
here's the pipeline code:
import sys
import MySQLdb
import hashlib
from scrapy.exceptions import DropItem
from scrapy.http import Request
from tutorial.items import TutorialItem
class MySQLTest(object):
def __init__(self):
db = MySQLdb.connect(user='root', passwd='', host='localhost', db='python')
cursor = db.cursor()
def process_item(self, spider, item):
try:
cursor.execute("INSERT INTO info (venue, datez) VALUES (%s, %s)", (item['artist'], item['date']))
self.conn.commit()
except MySQLdb.Error, e:
print "Error %d: %s" % (e.args[0], e.args[1])
return item
and heres the spider code
import scrapy # Import required libraries.
from scrapy.selector import HtmlXPathSelector # Allows for path detection in a websites code.
from scrapy.spider import BaseSpider # Used to create a simple spider to extract data.
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor # Needed for the extraction of href links in HTML to crawl further pages.
from scrapy.contrib.spiders import CrawlSpider # Needed to make the crawl spider.
from scrapy.contrib.spiders import Rule # Allows specified rules to affect what the link
import spotipy
import soundcloud
import mysql.connector
from tutorial.items import TutorialItem
class AllGigsSpider(CrawlSpider):
name = "allGigs" # Name of the Spider. In command promt, when in the correct folder, enter "scrapy crawl Allgigs".
allowed_domains = ["www.allgigs.co.uk"] # Allowed domains is a String NOT a URL.
start_urls = [
"http://www.allgigs.co.uk/whats_on/London/clubbing-1.html",
"http://www.allgigs.co.uk/whats_on/London/festivals-1.html",
"http://www.allgigs.co.uk/whats_on/London/comedy-1.html",
"http://www.allgigs.co.uk/whats_on/London/theatre_and_opera-1.html",
"http://www.allgigs.co.uk/whats_on/London/dance_and_ballet-1.html"
] # Specify the starting points for the web crawler.
rules = [
Rule(SgmlLinkExtractor(restrict_xpaths='//div[#class="more"]'), # Search the start URL's for
callback="parse_me",
follow=True),
]
def parse_me(self, response):
for info in response.xpath('//div[#class="entry vevent"]|//div[#class="resultbox"]'):
item = TutorialItem() # Extract items from the items folder.
item ['artist'] = info.xpath('.//span[#class="summary"]//text()').extract() # Extract artist information.
item ['date'] = info.xpath('.//span[#class="dates"]//text()').extract() # Extract date information.
#item ['endDate'] = info.xpath('.//abbr[#class="dtend"]//text()').extract() # Extract end date information.
#item ['startDate'] = info.xpath('.//abbr[#class="dtstart"]//text()').extract() # Extract start date information.
item ['genre'] = info.xpath('.//div[#class="header"]//text()').extract()
yield item # Retreive items in item.
client = soundcloud.Client(client_id='401c04a7271e93baee8633483510e263')
tracks = client.get('/tracks', limit=1, license='cc-by-sa', q= item['artist'])
for track in tracks:
print(tracks)
I believe the problem was in my settings.py file where i had missed a comma... yawn.
ITEM_PIPELINES = {
'tutorial.pipelines.MySQLTest': 300,
}
I have the following model:
class Transaction(models.Model):
deviceId = models.CharField(max_length=32, db_index=True)
transactionTime = models.DateTimeField(db_index=True)
class Meta:
index_together = [
["deviceId", "transactionTime"],
]
When I issue the sql command on the app: python manage.py sql device
I get only the table create statement and no index create statements.
Is there something I am doing wrong?
You need to use "sqlall":
python manage.py sqlall device