Empty data in SQL table - mysql

I am learning Tkinter and have made a program (just for practice) which takes user input in Entry field and save what the user has entered in a MySQL database by clicking submit button.
Code:
from tkinter import *
import tkinter
import mysql.connector
from tkinter import *
import mysql.connector
import random
DB = mysql.connector.connect(
host = "localhost",
user = "Lincoln",
password = "lincoln110904#",
database = "test"
)
cursor = DB.cursor()
gui2 = Tk()
gui2.title("Airline Ticket Booking System")
gui2.iconbitmap("C:/Users/keepa/OneDrive/Desktop/icon.ico")
gui2.maxsize(width=1000, height=70)
gui2.minsize(width=1000, height=700)
data = Entry(gui2, text = 'food name', textvariable="data_var")
data.pack()
data_var = tkinter.StringVar(data).get()
user_id = random.randint(1128, 9721)
def submit_it():
sql = "INSERT INTO user_time(user_id, time) VALUES(%s, %s)"
values = (user_id, str(data_var))
cursor.execute(sql,values)
DB.commit()
submit = Button(gui2, text = 'submit', command=submit_it)
submit.pack()
gui2.mainloop()
Output in MySQL database:
Anyone, please help why the data in the time row is empty/blank?

You need to:
use reference of a StringVar instead of string for the textvariable option
get the input content when it is needed instead of getting it just after the entry is created:
...
data_var = tkinter.StringVar()
data = tkinter.Entry(gui2, text='food name', textvariable=data_var)
...
def submit_it():
sql = "INSERT INTO user_time (user_id, time) VALUES (%s, %s)"
# get the input data from the Entry here
values = (user_id, data_var.get())
cursor.execute(sql, values)
DB.commit()
Also note that wildcard import is not recommended.

Related

How to retrieve image from database using python

i want to retrieve image from database using python but i have a problem where i execute this code
import mysql.connector
import io
from PIL import Image
connection= mysql.connector.connect(
host ="localhost",
user ="root",
passwd ="pfe_altran",
database = "testes",
)
cursor=connection.cursor()
sql1 = "SELECT * FROM pfe WHERE id = 1 "
cursor.execute(sql1)
data2 = cursor.fetchall()
file_like2 = io.BytesIO(data2[0][0])
img1=Image.open(file_like2)
img1.show()
cursor.close()
connection.close()
and i have this error :
file_like2 = io.BytesIO(data2[0][0]) TypeError: a bytes-like object
is required, not 'int'
cursor.fetchall() returns a list of rows to your variable so you need to handle that using a looper
for row in data2:
file_like2 = io.BytesIO(row[0]) #assumming row[0] contains the byte form of your image
you can use cursor.fetchmany(size=1) or cursor.fetchone() if you know that your query will return only a single row or if you know that you only need one row, this way you can manipulate it directly and not use the loop.

api data directly insert into PostgreSQL database with python

main.py
import json, urllib.request, requests
import psycopg2
from psycopg2.extras import execute_values
# Retrieve Json Data from Within API
url = "https://datahead.herokuapp.com/api/employeers/"
response = urllib.request.urlopen(url)
data = json.loads(response.read())
# ***** connect to the db *******
try:
conn = psycopg2.connect("dbname='datahead' user='postgres' host='localhost' password='datahead'")
except:
print("I am unable to connect to the database")
# cursor
cur = conn.cursor()
fields = [
'id', #integer
'name', #varchar
'log_date', #date
'log_time', #timestamp
'login', #timestamp
'logout' #timestamp
]
for item in data:
my_data = [item[field] for field in fields]
insert_query = "INSERT INTO employee VALUES (%s, %s, %s, %s, %s, %s)"
cur.execute(insert_query, tuple(my_data))
conn.commit()
# close the cursor
cur.close()
# close the connection
conn.close()
please visit my api data format https://datahead.herokuapp.com/api/employeers/
Will it change here timestamp type, if yes so which data type i've to use?
Line 56 what happened?
I think login_time should be of type time without time zone in PostgresSQL.
But you would probably be better off using timestamp with time zone to represent date and time together.

Proper management of database resources: cursor and connection

I am creating a test Flask API, and have created a Database class that I use from my main app. I am using pymysql to access my MySQL DB but I am having trouble figuring out when to close the cursor and connection. Right now I have
import pymysql
class Database:
def __init__(self):
host = '127.0.0.1'
user = 'root'
password = ''
db = 'API'
self.con = pymysql.connect(host=host, user=user, password=password, db=db, cursorclass=pymysql.cursors.DictCursor, autocommit=True)
self.cur = self.con.cursor()
def getUser(self, id):
sql = 'SELECT * from users where id = %d'
self.cur.execute(sql, (id))
result = self.cur.fetchall()
return result
def getAllUsers(self):
sql = 'SELECT * from users'
self.cur.execute(sql)
result = self.cur.fetchall()
return result
def AddUser(self, firstName, lastName, email):
sql = "INSERT INTO `users` (`firstName`, `lastName`, `email`) VALUES (%s, %s, %s)"
self.cur.execute(sql, (firstName, lastName, email))
I have tried adding self.cur.close() and self.con.close() after each execution of the cursor in the functions but then I get an error the next time I call a function saying the cursor is closed, or after I do an insert statement it won't show the new value even though it was inserted correctly into MySQL. How do I know when to close the cursor, and how to start it back up properly with each call to a method?
This sounds like a great use case for a python context manager. Context Managers allow you to properly manage resources, such as a database connection, by allowing you to specify how your resource's set-up and tear down methods should work. You can create your own custom context manager in one of two ways: First, by wrapping your database class, and implementing the required methods for the context manager: __init__(), __enter__(), and __exit__(). Second, by utilizing a #contextmanager decorator on a function definition and creating a generator for your database resource within said function definition. I will show both approaches and let you decide which one is your preference. The __init__() method is the initialization method for your custom context manager, similar to the initialization method used for custom python classes. The __enter__() method is your setup code for your custom context manager. Lastly, the __exit()__ method is your teardown code for your custom context manager. Both approaches utilize these methods with the main difference being the first method will explicitly state these methods within your class definition. Where as in the second approach, all the code up to your generator's yield statement is your initialization and setup code and all the code after the yield statement is your teardown code. I would also consider extracting out your user based database actions into a user model class as well. Something along the lines of:
custom context manager: (class based approach):
import pymysql
class MyDatabase():
def __init__(self):
self.host = '127.0.0.1'
self.user = 'root'
self.password = ''
self.db = 'API'
self.con = None
self.cur = None
def __enter__(self):
# connect to database
self.con = pymysql.connect(host=self.host, user=self.user, password=self.password, db=self.db, cursorclass=pymysql.cursors.DictCursor, autocommit=True)
self.cur = self.con.cursor()
return self.cur
def __exit__(self, exc_type, exc_val, traceback):
# params after self are for dealing with exceptions
self.con.close()
user.py (refactored):'
# import your custom context manager created from the step above
# if you called your custom context manager file my_database.py: from my_database import MyDatabase
import <custom_context_manager>
class User:
def getUser(self, id):
sql = 'SELECT * from users where id = %d'
with MyDatabase() as db:
db.execute(sql, (id))
result = db.fetchall()
return result
def getAllUsers(self):
sql = 'SELECT * from users'
with MyDatabase() as db:
db.execute(sql)
result = db.fetchall()
return result
def AddUser(self, firstName, lastName, email):
sql = "INSERT INTO `users` (`firstName`, `lastName`, `email`) VALUES (%s, %s, %s)"
with MyDatabase() as db:
db.execute(sql, (firstName, lastName, email))
context manager (decorator approach):
from contextlib import contextmanager
import pymysql
#contextmanager
def my_database():
try:
host = '127.0.0.1'
user = 'root'
password = ''
db = 'API'
con = pymysql.connect(host=host, user=user, password=password, db=db, cursorclass=pymysql.cursors.DictCursor, autocommit=True)
cur = con.cursor()
yield cur
finally:
con.close()
Then within your User class you could use the context manager by first importing the file and then using it similar to as before:
with my_database() as db:
sql = <whatever sql stmt you wish to execute>
#db action
db.execute(sql)
Hopefully that helps!

Use Python for live search on MySQL db

I'm new to Python and am working on a simple desktop application to read records from a MySQL db. I need to make live search using tkinter entry through mysql db. When user strikes a keyboard key, an auto suggestion list should be generated with auto -omplete options to choose from...
At the moment the code below doesn't work. What is wrong?
#-*- coding: utf-8 -*-
import Tkinter
from Tkinter import *
import MySQLdb
top = Tkinter.Tk()
top.minsize(300,300)
top.geometry("500x500")
# here we make text input field
E1 = Entry(top, bd =2)
E1.pack(side = RIGHT)
Lb1 = Listbox( E1) # here the list generated from entry but covering it completely is bad ??
def clickme(x):
txtt=E1.get()
txt=txtt+"%"
#connection
db = MySQLdb.connect("127.0.0.1","root","123456","test2",use_unicode=True, charset="utf8" )
if db:print"connected"
cursor=db.cursor()
cursor.execute("SELECT name FROM `table` WHERE name LIKE '%s' " % (txt))
#------------
res=cursor.fetchall()
i=0
for query in res:
i+=1
lngth=len(query[0])
u=query[0].encode('utf-8')
Lb1.delete (0,lngth)
if len(txtt)>0:
Lb1.insert(i, u)
Lb1.pack()
else:
Lb1.delete (0,lngth)
Lb1.pack_forget()
top.bind("<Key>", clickme)
top.mainloop()
I don't work with Tkinker so I don't know how to put Listbox near Entry but I made some modification.
If you write text in Entry then Listbox is showing with data from db.
If you remove text from Entry then Listbox is hidding.
#!/usr/bin/python
#-*- coding: utf-8 -*-
import Tkinter
from Tkinter import *
import MySQLdb
#----------------------------------------------------------------------
class MainWindow():
def __init__(self, root):
frame = Frame(root, width=500, height=500)
#root.minsize(300,300)
frame.pack()
# here we make text input field
self.E1 = Entry(frame, bd=2)
self.E1.pack(side=TOP)
# here the list generated from entry but covering it completely is bad ??
self.Lb1 = Listbox(frame, bd=2)
#Lb1.pack(side=BOTTOM)
root.bind("<Key>", self.clickme)
# open database (only once) at start program
self.db = MySQLdb.connect("127.0.0.1", "root", "password", "test", use_unicode=True, charset="utf8")
#-------------------
def __del__(self):
# close database on exit
self.db.close()
#-------------------
def clickme(self, x):
txt = self.E1.get()
self.Lb1.delete(0, END) # delete all on list
if txt == '':
self.Lb1.pack_forget() # hide list
else:
self.Lb1.pack(side=BOTTOM) # show list
txt_for_query = txt + "%"
cursor = self.db.cursor()
cursor.execute("SELECT name FROM `table` WHERE name LIKE '%s'" % (txt_for_query))
res = cursor.fetchall()
for line in res:
self.Lb1.insert(END, line[0].encode('utf-8')) # append list
cursor.close()
#----------------------------------------------------------------------
root = Tk()
MainWindow(root)
root.mainloop()

scrapy and mysql

I am trying to get scrapy to insert crawled data into mysql and my code crawls fine and collects the data in the buffer, does not error, but database is never updated.
'no luck', 'no error'
pipeline.py
from twisted.enterprise import adbapi
import datetime
import MySQLdb.cursors
class SQLStorePipeline(object):
def __init__(self):
self.dbpool = adbapi.ConnectionPool('MySQLdb', db='craigs',
user='bra', passwd='boobs', cursorclass=MySQLdb.cursors.DictCursor,
charset='utf8', use_unicode=True)
def process_item(self, items, spider):
# run db query in thread pool
query = self.dbpool.runInteraction(self._conditional_insert, items)
query.addErrback(self.handle_error)
return items
def _conditional_insert(self, tx, items):
# create record if doesn't exist.
# all this block run on it's own thread
tx.execute("select * from scraped where link = %s", (items['link'][0], ))
result = tx.fetchone()
if result:
log.msg("Item already stored in db: %s" % items, level=log.DEBUG)
else:
tx.execute(\
"insert into scraped (posting_id, email, location, text, title) "
"values (%s, %s, %s, %s, %s)",
(items['posting_id'][0],
items['email'][1],
items['location'][2],
items['text'][3],
items['title'][4],
)
)
log.msg("Item stored in db: %s" % items, level=log.DEBUG)
def handle_error(self, e):
log.err(e)
crawl code
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
from craigs.items import CraigsItem
class MySpider(CrawlSpider):
name = "craigs"
f = open("urls.txt")
start_urls = [url.strip() for url in f.readlines()]
f.close()
rules = [Rule(SgmlLinkExtractor(restrict_xpaths=('/html/body/blockquote[3]/p/a',)), follow=True, callback='parse_profile')]
def parse_profile(self, response):
items = []
img = CraigsItem()
hxs = HtmlXPathSelector(response)
img['title'] = hxs.select('//h2[contains(#class, "postingtitle")]/text()').extract()
img['posting_id'] = hxs.select('//html/body/article/section/section[2]/div/p/text()').extract()
items.append(img)
return items[0]
return img[0]
settings.py
BOT_NAME = 'craigs'
BOT_VERSION = '1.0'
SPIDER_MODULES = ['craigs.spiders']
NEWSPIDER_MODULE = 'craigs.spiders'
USER_AGENT = '%s/%s' % (BOT_NAME, BOT_VERSION)
The reason why the pipeline code is not being called at all is because it hasn't been activated. This activation is done by adding a new section to settings.py, as per the Item Pipelines page in the documentation. e.g
ITEM_PIPELINES = [
'craigs.pipeline.SQLStorePipeline',
]
Additionally, your parse_profile function should just return img. You'd only add an items list to return if a single response page would result in multiple items.
activate Pipeline in settings and use yield instead of return
You should COMMIT the current transaction, which making changes permanent.
So after
tx.execute(\
"insert into scraped (posting_id, email, location, text, title) "
"values (%s, %s, %s, %s, %s)",
(items['posting_id'][0],
items['email'][1],
items['location'][2],
items['text'][3],
items['title'][4],
)
)
you have to
db.commit()
db here is something like
db = MySQLdb.connect(host="localhost",user = "root", passwd = "1234", db="database_name")
Please try it.