Use Python for live search on MySQL db - mysql

I'm new to Python and am working on a simple desktop application to read records from a MySQL db. I need to make live search using tkinter entry through mysql db. When user strikes a keyboard key, an auto suggestion list should be generated with auto -omplete options to choose from...
At the moment the code below doesn't work. What is wrong?
#-*- coding: utf-8 -*-
import Tkinter
from Tkinter import *
import MySQLdb
top = Tkinter.Tk()
top.minsize(300,300)
top.geometry("500x500")
# here we make text input field
E1 = Entry(top, bd =2)
E1.pack(side = RIGHT)
Lb1 = Listbox( E1) # here the list generated from entry but covering it completely is bad ??
def clickme(x):
txtt=E1.get()
txt=txtt+"%"
#connection
db = MySQLdb.connect("127.0.0.1","root","123456","test2",use_unicode=True, charset="utf8" )
if db:print"connected"
cursor=db.cursor()
cursor.execute("SELECT name FROM `table` WHERE name LIKE '%s' " % (txt))
#------------
res=cursor.fetchall()
i=0
for query in res:
i+=1
lngth=len(query[0])
u=query[0].encode('utf-8')
Lb1.delete (0,lngth)
if len(txtt)>0:
Lb1.insert(i, u)
Lb1.pack()
else:
Lb1.delete (0,lngth)
Lb1.pack_forget()
top.bind("<Key>", clickme)
top.mainloop()

I don't work with Tkinker so I don't know how to put Listbox near Entry but I made some modification.
If you write text in Entry then Listbox is showing with data from db.
If you remove text from Entry then Listbox is hidding.
#!/usr/bin/python
#-*- coding: utf-8 -*-
import Tkinter
from Tkinter import *
import MySQLdb
#----------------------------------------------------------------------
class MainWindow():
def __init__(self, root):
frame = Frame(root, width=500, height=500)
#root.minsize(300,300)
frame.pack()
# here we make text input field
self.E1 = Entry(frame, bd=2)
self.E1.pack(side=TOP)
# here the list generated from entry but covering it completely is bad ??
self.Lb1 = Listbox(frame, bd=2)
#Lb1.pack(side=BOTTOM)
root.bind("<Key>", self.clickme)
# open database (only once) at start program
self.db = MySQLdb.connect("127.0.0.1", "root", "password", "test", use_unicode=True, charset="utf8")
#-------------------
def __del__(self):
# close database on exit
self.db.close()
#-------------------
def clickme(self, x):
txt = self.E1.get()
self.Lb1.delete(0, END) # delete all on list
if txt == '':
self.Lb1.pack_forget() # hide list
else:
self.Lb1.pack(side=BOTTOM) # show list
txt_for_query = txt + "%"
cursor = self.db.cursor()
cursor.execute("SELECT name FROM `table` WHERE name LIKE '%s'" % (txt_for_query))
res = cursor.fetchall()
for line in res:
self.Lb1.insert(END, line[0].encode('utf-8')) # append list
cursor.close()
#----------------------------------------------------------------------
root = Tk()
MainWindow(root)
root.mainloop()

Related

Empty data in SQL table

I am learning Tkinter and have made a program (just for practice) which takes user input in Entry field and save what the user has entered in a MySQL database by clicking submit button.
Code:
from tkinter import *
import tkinter
import mysql.connector
from tkinter import *
import mysql.connector
import random
DB = mysql.connector.connect(
host = "localhost",
user = "Lincoln",
password = "lincoln110904#",
database = "test"
)
cursor = DB.cursor()
gui2 = Tk()
gui2.title("Airline Ticket Booking System")
gui2.iconbitmap("C:/Users/keepa/OneDrive/Desktop/icon.ico")
gui2.maxsize(width=1000, height=70)
gui2.minsize(width=1000, height=700)
data = Entry(gui2, text = 'food name', textvariable="data_var")
data.pack()
data_var = tkinter.StringVar(data).get()
user_id = random.randint(1128, 9721)
def submit_it():
sql = "INSERT INTO user_time(user_id, time) VALUES(%s, %s)"
values = (user_id, str(data_var))
cursor.execute(sql,values)
DB.commit()
submit = Button(gui2, text = 'submit', command=submit_it)
submit.pack()
gui2.mainloop()
Output in MySQL database:
Anyone, please help why the data in the time row is empty/blank?
You need to:
use reference of a StringVar instead of string for the textvariable option
get the input content when it is needed instead of getting it just after the entry is created:
...
data_var = tkinter.StringVar()
data = tkinter.Entry(gui2, text='food name', textvariable=data_var)
...
def submit_it():
sql = "INSERT INTO user_time (user_id, time) VALUES (%s, %s)"
# get the input data from the Entry here
values = (user_id, data_var.get())
cursor.execute(sql, values)
DB.commit()
Also note that wildcard import is not recommended.

Not able to export table into output file due to secure_file_priv

I'm using windows7 and MySQL8.0. I've tried to edit the my.ini by stopping the service first. First of all, if I tried to replace my.ini with secure_file_priv = "",it was saying access denied. So, I simply saved it with 'my1.ini' then deleted the my.ini' and again renamed 'my1.ini' to 'my.ini'. Now when I try to start the MySQL80 service from administrative tools>Services, I am unable to start it again. Even I've tried this from the CLI client, but it raises the issue of secure_file_priv. How do I do it? I've been able to store the scraped data into MySQL database using Scrapy,but not able to export it to my project directory.
#pipelines.py
from itemadapter import ItemAdapter
import mysql.connector
class QuotewebcrawlerPipeline(object):
def __init__(self):
self.create_connection()
self.create_table()
#self.dump_database()
def create_connection(self):
"""
This method will create the database connection & the cusror object
"""
self.conn = mysql.connector.connect(host = 'localhost',
user = 'root',
passwd = 'Pxxxx',
database = 'itemcontainer'
)
self.cursor = self.conn.cursor()
def create_table(self):
self.cursor.execute(""" DROP TABLE IF EXISTS my_table""")
self.cursor.execute(""" CREATE TABLE my_table (
Quote text,
Author text,
Tag text)"""
)
def process_item(self, item, spider):
#print(item['quote'])
self.store_db(item)
return item
def store_db(self,item):
"""
This method is used to write the scraped data from item container into the database
"""
#pass
self.cursor.execute(""" INSERT INTO my_table VALUES(%s,%s,%s)""",(item['quote'][0],item['author'][0],
item['tag'][0])
)
self.conn.commit()
#self.dump_database()
# def dump_database(self):
# self.cursor.execute("""USE itemcontainer;SELECT * from my_table INTO OUTFILE 'quotes.txt'""",
# multi = True
# )
# print("Data saved to output file")
#item_container.py
import scrapy
from ..items import QuotewebcrawlerItem
class ItemContainer(scrapy.Spider):
name = 'itemcontainer'
start_urls = [
"http://quotes.toscrape.com/"
]
def parse(self,response):
items = QuotewebcrawlerItem()
all_div_quotes = response.css("div.quote")
for quotes in all_div_quotes:
quote = quotes.css(".text::text").extract()
author = quotes.css(".author::text").extract()
tag = quotes.css(".tag::text").extract()
items['quote'] = quote
items['author'] = author
items['tag'] = tag
yield items

Pipeline doesn't write to MySQL but also gives no error

I've tried to implement this pipeline in my spider.
After installing the necessary dependencies I am able to run the spider without any errors but for some reason it doesn't write to my database.
I'm pretty sure there is something going wrong with connecting to the database. When I give in a wrong password, I still don't get any error.
When the spider scraped all the data, it needs a few minutes before it starts dumping the stats.
2017-08-31 13:17:12 [scrapy] INFO: Closing spider (finished)
2017-08-31 13:17:12 [scrapy] INFO: Stored csv feed (27 items) in: test.csv
2017-08-31 13:24:46 [scrapy] INFO: Dumping Scrapy stats:
Pipeline:
import MySQLdb.cursors
from twisted.enterprise import adbapi
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from scrapy.utils.project import get_project_settings
from scrapy import log
SETTINGS = {}
SETTINGS['DB_HOST'] = 'mysql.domain.com'
SETTINGS['DB_USER'] = 'username'
SETTINGS['DB_PASSWD'] = 'password'
SETTINGS['DB_PORT'] = 3306
SETTINGS['DB_DB'] = 'database_name'
class MySQLPipeline(object):
#classmethod
def from_crawler(cls, crawler):
return cls(crawler.stats)
def __init__(self, stats):
print "init"
#Instantiate DB
self.dbpool = adbapi.ConnectionPool ('MySQLdb',
host=SETTINGS['DB_HOST'],
user=SETTINGS['DB_USER'],
passwd=SETTINGS['DB_PASSWD'],
port=SETTINGS['DB_PORT'],
db=SETTINGS['DB_DB'],
charset='utf8',
use_unicode = True,
cursorclass=MySQLdb.cursors.DictCursor
)
self.stats = stats
dispatcher.connect(self.spider_closed, signals.spider_closed)
def spider_closed(self, spider):
print "close"
""" Cleanup function, called after crawing has finished to close open
objects.
Close ConnectionPool. """
self.dbpool.close()
def process_item(self, item, spider):
print "process"
query = self.dbpool.runInteraction(self._insert_record, item)
query.addErrback(self._handle_error)
return item
def _insert_record(self, tx, item):
print "insert"
result = tx.execute(
" INSERT INTO matches(type,home,away,home_score,away_score) VALUES (soccer,"+item["home"]+","+item["away"]+","+item["score"].explode("-")[0]+","+item["score"].explode("-")[1]+")"
)
if result > 0:
self.stats.inc_value('database/items_added')
def _handle_error(self, e):
print "error"
log.err(e)
Spider:
import scrapy
import dateparser
from crawling.items import KNVBItem
class KNVBspider(scrapy.Spider):
name = "knvb"
start_urls = [
'http://www.knvb.nl/competities/eredivisie/uitslagen',
]
custom_settings = {
'ITEM_PIPELINES': {
'crawling.pipelines.MySQLPipeline': 301,
}
}
def parse(self, response):
# www.knvb.nl/competities/eredivisie/uitslagen
for row in response.xpath('//div[#class="table"]'):
for div in row.xpath('./div[#class="row"]'):
match = KNVBItem()
match['home'] = div.xpath('./div[#class="value home"]/div[#class="team"]/text()').extract_first()
match['away'] = div.xpath('./div[#class="value away"]/div[#class="team"]/text()').extract_first()
match['score'] = div.xpath('./div[#class="value center"]/text()').extract_first()
match['date'] = dateparser.parse(div.xpath('./preceding-sibling::div[#class="header"]/span/span/text()').extract_first(), languages=['nl']).strftime("%d-%m-%Y")
yield match
If there are better pipelines available to do what I'm trying to achieve that'd be welcome as well. Thanks!
Update:
With the link provided in the accepted answer I eventually got to this function that's working (and thus solved my problem):
def process_item(self, item, spider):
print "process"
query = self.dbpool.runInteraction(self._insert_record, item)
query.addErrback(self._handle_error)
query.addBoth(lambda _: item)
return query
Take a look at this for how to use adbapi with MySQL for saving scraped items. Note the difference in your process_item and their process_item method implementation. While you return the item immediately, they return Deferred object which is the result of runInteraction method and which returns the item upon its completion. I think this is the reason your _insert_record never gets called.
If you can see the insert in your output that's already a good sign.
I'd rewrite the insert function this way:
def _insert_record(self, tx, item):
print "insert"
raw_sql = "INSERT INTO matches(type,home,away,home_score,away_score) VALUES ('%s', '%s', '%s', '%s', '%s')"
sql = raw_sql % ('soccer', item['home'], item['away'], item['score'].explode('-')[0], item['score'].explode('-')[1])
print sql
result = tx.execute(sql)
if result > 0:
self.stats.inc_value('database/items_added')
It allows you to debug the sql you're using. In you version you're not wrapping the string in ' which is a syntax error in mysql.
I'm not sure about your last values (score) so I treated them as strings.

Retrieving MySQL with Kivy

I have a Kivy code, where the output is:
I want to get replace the Box No. with strings retrieved from MySQL
So far I have tried to implement the MySQL to the python script:
class RemoveScreen(MyLayout):
def __init__(self,**kwargs):
db = MySQLdb.connect("localhost", "root", "[PASSWORD]", "tcs_microrage_crm")
cursor=db.cursor()
self.var = StringVar()
self.label1 = Label(self, text=0, textvariable=self.var)
myvar=str(self.var)
#http://stackoverflow.com/questions/775296/python-mysql-parameterized-queries
cursor.execute("SELECT part_name FROM stock_lists WHERE part_number = %s", (myvar))
self.myvar=StringVar()
self.myvar.set(cursor.fetchone())
self.label2 = Label(self, text=0, textvariable=myvar)
But this didn't work.
Q: How can I do MySQL queries and print individual strings in the kv file.
To show you how you could do that, I made a little search example.
This searches for fruit names in the database, and will output its name and price to the table.
from kivy.app import App
import MySQLdb
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.gridlayout import GridLayout
from kivy.uix.label import Label
from kivy.uix.button import Button
from kivy.uix.textinput import TextInput
class DbCon:
def __init__(self):
self.db = MySQLdb.connect(user="root",passwd="pw",db="kivy")
self.c = self.db.cursor()
def get_rows(self,search = ""):
self.c.execute("SELECT * FROM fruit WHERE name REGEXP '.*%s.*' LIMIT 3" % search)
return self.c.fetchall()
class Table(BoxLayout):
def __init__(self,**kwargs):
super(Table,self).__init__(**kwargs)
self.orientation = "vertical"
self.search_field = BoxLayout(orientation="horizontal")
self.search_input = TextInput(text='search',multiline=False)
self.search_button = Button(text="search",on_press=self.search)
self.search_field.add_widget(self.search_input)
self.search_field.add_widget(self.search_button)
self.add_widget(self.search_field)
self.add_widget(Label(text="table"))
self.table = GridLayout(cols=2,rows=4)
self.table.add_widget(Label(text="Fruit"))
self.table.add_widget(Label(text="Price"))
self.rows = [[Label(text="item"),Label(text="price")],
[Label(text="item"),Label(text="price")],
[Label(text="item"),Label(text="price")]]
for item,price in self.rows:
self.table.add_widget(item)
self.table.add_widget(price)
self.add_widget(self.table)
self.db = DbCon()
self.update_table()
def update_table(self,search=""):
for index,row in enumerate(self.db.get_rows(search)):
self.rows[index][0].text = row[1]
self.rows[index][1].text = str(row[2])
def clear_table(self):
for index in range(3):
self.rows[index][0].text = ""
self.rows[index][1].text = ""
def search(self, *args):
self.clear_table()
self.update_table(self.search_input.text)
class MyApp(App):
def build(self):
return Table()
MyApp().run()

Scrapy / Pipeline not inserting data to MySQL database

I'm making a pipeline in scrapy to store scraped data in a mysql database. When the spider is run in terminal it works perfectly. Even the pipeline is opened. However the data is not being sent to the database. Any help appreciated! :)
here's the pipeline code:
import sys
import MySQLdb
import hashlib
from scrapy.exceptions import DropItem
from scrapy.http import Request
from tutorial.items import TutorialItem
class MySQLTest(object):
def __init__(self):
db = MySQLdb.connect(user='root', passwd='', host='localhost', db='python')
cursor = db.cursor()
def process_item(self, spider, item):
try:
cursor.execute("INSERT INTO info (venue, datez) VALUES (%s, %s)", (item['artist'], item['date']))
self.conn.commit()
except MySQLdb.Error, e:
print "Error %d: %s" % (e.args[0], e.args[1])
return item
and heres the spider code
import scrapy # Import required libraries.
from scrapy.selector import HtmlXPathSelector # Allows for path detection in a websites code.
from scrapy.spider import BaseSpider # Used to create a simple spider to extract data.
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor # Needed for the extraction of href links in HTML to crawl further pages.
from scrapy.contrib.spiders import CrawlSpider # Needed to make the crawl spider.
from scrapy.contrib.spiders import Rule # Allows specified rules to affect what the link
import spotipy
import soundcloud
import mysql.connector
from tutorial.items import TutorialItem
class AllGigsSpider(CrawlSpider):
name = "allGigs" # Name of the Spider. In command promt, when in the correct folder, enter "scrapy crawl Allgigs".
allowed_domains = ["www.allgigs.co.uk"] # Allowed domains is a String NOT a URL.
start_urls = [
"http://www.allgigs.co.uk/whats_on/London/clubbing-1.html",
"http://www.allgigs.co.uk/whats_on/London/festivals-1.html",
"http://www.allgigs.co.uk/whats_on/London/comedy-1.html",
"http://www.allgigs.co.uk/whats_on/London/theatre_and_opera-1.html",
"http://www.allgigs.co.uk/whats_on/London/dance_and_ballet-1.html"
] # Specify the starting points for the web crawler.
rules = [
Rule(SgmlLinkExtractor(restrict_xpaths='//div[#class="more"]'), # Search the start URL's for
callback="parse_me",
follow=True),
]
def parse_me(self, response):
for info in response.xpath('//div[#class="entry vevent"]|//div[#class="resultbox"]'):
item = TutorialItem() # Extract items from the items folder.
item ['artist'] = info.xpath('.//span[#class="summary"]//text()').extract() # Extract artist information.
item ['date'] = info.xpath('.//span[#class="dates"]//text()').extract() # Extract date information.
#item ['endDate'] = info.xpath('.//abbr[#class="dtend"]//text()').extract() # Extract end date information.
#item ['startDate'] = info.xpath('.//abbr[#class="dtstart"]//text()').extract() # Extract start date information.
item ['genre'] = info.xpath('.//div[#class="header"]//text()').extract()
yield item # Retreive items in item.
client = soundcloud.Client(client_id='401c04a7271e93baee8633483510e263')
tracks = client.get('/tracks', limit=1, license='cc-by-sa', q= item['artist'])
for track in tracks:
print(tracks)
I believe the problem was in my settings.py file where i had missed a comma... yawn.
ITEM_PIPELINES = {
'tutorial.pipelines.MySQLTest': 300,
}