Hello I have win 10 Greece. I want to fetch same data (the path of a file) from mysql. The problem is while I retrieve the path instead of the name of the folder I came across with??????. A sample of the code is the following
import pymysql as MySQLdb
dbid = "client"
password = "pass"
database = "clients"
serverip = "192.168.168.150"
db = None
cur = None
try:
db = MySQLdb.connect(host=serverip, user=dbid, passwd=password, db=database, connect_timeout=20)
cur = db.cursor()
except MySQLdb.Error as e:
print("Cannot connect with the Data Base : "), e
query1 = "SELECT path FROM `instructions` WHERE clientmac ='%s'" % (str(5555))
try:
row = None
cur.execute(query1)
db.commit()
row = cur.fetchall()
except MySQLdb.Error as e:
print("Problem while Executing query :", query1, "error : ", e)
print(row)
db.close()
cur.close()
Python 3 uses Unicode so I thing that the problem is not from python but from phpmyadmin. The field path is “utf8_general_ci”. Everything looks fine but why instead of
'C:\\Users\\Γιαννης\\Documents\\Arduino' I am getting 'C:\\Users\\???????\\Documents\\Arduino'
I found the answer. I hat to but some additional coding before the execution of the query. I post the code below.
import pymysql as MySQLdb
dbid = "client"
password = "pass"
database = "clients"
serverip = "192.168.168.150"
socketip = "192.168.168.18"
db = None
cur = None
try:
db = MySQLdb.connect(host=serverip, user=dbid, passwd=password, db=database, use_unicode=True, connect_timeout=20)
cur = db.cursor()
except MySQLdb.Error as e:
print("Cannot connect with the Data Base : "), e
query1 = "SELECT path FROM `instructions` WHERE clientmac ='%s'" % (str(5555))
try:
row = None
db.set_charset('utf8')
cur.execute('SET NAMES utf8;')
cur.execute('SET CHARACTER SET utf8;')
cur.execute('SET character_set_connection=utf8;')
cur.execute(query1)
db.commit()
row = cur.fetchall()
except MySQLdb.Error as e:
print("Problem while Executing query :", query1, "error : ", e)
print(row)
db.close()
cur.close()
Related
I am trying to make a twitter points program. Basically, you get points based off of the number of likes, retweets and replies your post with a specified hashtag gets. I made an API to get these points from a database but fastAPI is not doing all the funtions specified to return the correct values.
API code:
DATABASE_URL = "mysql+mysqlconnector://root:password#localhost:3306/twitterdb"
database = Database(DATABASE_URL)
metadata_obj = MetaData()
engine = create_engine(
DATABASE_URL, connect_args={"check_same_thread": False}
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
metadata = sqlalchemy.MetaData()
Base = declarative_base()
user_points = sqlalchemy.Table(
"points",
metadata_obj,
sqlalchemy.Column("username", sqlalchemy.String,),
sqlalchemy.Column("rt_points", sqlalchemy.Integer,),
sqlalchemy.Column("reply_points", sqlalchemy.Integer),
sqlalchemy.Column("like_points", sqlalchemy.Integer),
sqlalchemy.Column("total_points", sqlalchemy.Integer)
)
engine = sqlalchemy.create_engine(
DATABASE_URL
)
metadata.create_all(engine)
app = FastAPI()
#app.on_event("startup")
async def connect():
await database.connect()
#app.on_event("shutdown")
async def shutdown():
await database.disconnect()
class UserName(BaseModel):
rt_points: int
reply_points: int
like_points: int
total_points : int
#app.get('/userdata/', response_model=UserName)
async def get_points(user: str):
username=user
metrics.clear()
tweets_list = tweet_id(username)
tweets_list.get_tweet_ids(str(username))
metrics.main()
summing=summer(username)
summing.sum_fun(str(username))
query = user_points.select().where(user_points.c.username == username)
user = await database.fetch_one(query)
return {**user}
if __name__ == "__main__":
uvicorn.run("main:app", reload= True, host="127.0.0.1", port=5000, log_level="info")
code for metrics.py:
ids=[]
class tweet_id:
def __init__(self, name):
self.name = name
def get_tweet_ids(self, name):
try:
connection = mysql.connector.connect(host='localhost',
database='twitterdb',
user='root',
password='password')
cursor = connection.cursor()
query="truncate table twitterdb.points"
query1="truncate table twitterdb.Metrics"
sql_select_query = """SELECT tweetid FROM twitterdb.StreamData WHERE username = %s"""
# set variable in query
cursor.execute(query)
cursor.execute(query1)
cursor.execute(sql_select_query, (name,))
# fetch result
record = cursor.fetchall()
for row in record:
ids.append(int(row[0]))
except mysql.connector.Error as error:
print("Failed to get record from MySQL table: {}".format(error))
finally:
if connection.is_connected():
cursor.close()
connection.close()
def create_url():
tweet_fields = "tweet.fields=public_metrics"
converted_list = [str(element) for element in ids]
id_list = ",".join(converted_list)
url = "https://api.twitter.com/2/tweets?ids={}&{}".format(id_list, tweet_fields)
return url
#curl 'https://api.twitter.com/2/tweets?ids=1459764778088337413&tweet.fields=public_metrics&expansions=attachments.media_keys&media.fields=public_metrics' --header 'Authorization: Bearer $Bearer
def bearer_oauth(r):
"""
Method required by bearer token authentication.
"""
r.headers["Authorization"] = f"Bearer {bearer_token}"
return r
def connect_to_endpoint(url):
response = requests.request("GET", url, auth=bearer_oauth)
print(response.status_code)
if response.status_code != 200:
raise Exception(
"Request returned an error: {} {} {}".format(
response.status_code, response.text, ids
)
)
return url
return response.json()
def main():
def append_to_database(json_response):
#Loop through each tweet
for tweet in json_response['data']:
# Tweet ID
tweetid = tweet['id']
# Tweet metrics
retweet_count = tweet['public_metrics']['retweet_count']
reply_count = tweet['public_metrics']['reply_count']
like_count = tweet['public_metrics']['like_count']
quote_count = tweet['public_metrics']['quote_count']
connect(tweetid, retweet_count, reply_count, like_count, quote_count)
def connect(tweetid, retweet_count, reply_count, like_count, quote_count):
"""
connect to MySQL database and insert twitter data
"""
try:
con = mysql.connector.connect(host='localhost',
database='twitterdb', user='root', password='passsword', charset='utf8')
if con.is_connected():
"""
Insert twitter data
"""
cursor = con.cursor(buffered=True)
# twitter, golf
delete_previous_data_query = "truncate table Metrics"
query = "INSERT INTO Metrics (tweetid,retweet_count,reply_count,like_count,quote_count) VALUES (%s, %s, %s, %s, %s)"
cursor.execute(delete_previous_data_query)
cursor.execute(query, (tweetid,retweet_count,reply_count,like_count,quote_count))
con.commit()
except Error as e:
print(e)
cursor.close()
con.close()
return
url = create_url()
json_response = connect_to_endpoint(url)
append_to_database(json_response)
#Function to calculate sum of points and display it
class summer:
def __init__(self, name):
self.name = name
def sum_fun(self, name):
try:
con = mysql.connector.connect(host='localhost',
database='twitterdb', user='root', password='password', charset='utf8')
if con.is_connected():
cursor = con.cursor(buffered=True)
def create_points_table():
query= ("INSERT INTO twitterdb.points(username, rt_points,reply_points,like_points,total_points) (SELECT %s, SUM(quote_count + retweet_count) * 150, SUM(reply_count) * 50, SUM(like_count) * 10, SUM(quote_count + retweet_count) * 150 + SUM(reply_count) * 50 + SUM(like_count) * 10 FROM twitterdb.Metrics)")
cursor.execute(query, (name,))
con.commit()
create_points_table();
except Error as e:
print(e)
cursor.close()
con.close()
def clear():
"""
connect to MySQL database and insert twitter data
"""
try:
con = mysql.connector.connect(host='localhost',
database='twitterdb', user='root', password='password', charset='utf8')
if con.is_connected():
cursor = con.cursor(buffered=True)
clear_points = ("truncate table twitterdb.points")
cursor.execute(clear_points)
except Error as e:
print(e)
cursor.close()
con.close()
return
What happens here is that there's a database named twitterdb with the tables StreamData, metrics, and points.
StreamData containts tweetids and usernames of the posts that were tweeted with the specified hashtag and it is build with the Streaming API.
Here the issues is that, suppose I have the following usernames mark and ramon in the streamdata table. So when I input the username via the API as mark no issues happen, it returns the correct points for mark, but if I then enter something like mark1 or any random value, it returns the points for mark again. But then if I enter ramon it gives the right points for ramon but then if I enter the random values again, I get the same points for ramon.
Furthermore, the first time when we start the API and if we enter a random value, it returns an error that is specified in the exception as defined in connect_to_endpoint function.
The code logic here is that,
We enter a username via the API, and the get_tweet_ids function looks for that username in the streamdata table and selects all the tweet ids corresponding to that username and saves it to a list, ids. This list of ids is given to the twitter metrics API endpoint and the required values from the response is saved to the table metrics.
Then, the sum_fun is called to select the sum of values of likes, rts and replies from the metrics table, multiply it with the specified points and save it to the table points along with the username.
The API at last returns the values in the table points matching the username.
How can I get it to stop returning the values for random data? If an invalid data is given, it must raise the exception in connect_to_endpoint function, but it just returns whatever value is in the table points previously.
I tried multiple approaches to this like, clearing the values of points before all other functions and checking to return only the values corresponding to the username in the points table. But neither of them worked. When the username was checked in the points table after running it with random values, it contained the random value but with the points of the previous valid username.
NOTE: The table points is a temporary table and values are assigned only when an API call is made.
I am a complete beginner to all this and this is more of a pet project I have been working on, so please help out. Any and all help and guidance regarding my logic and design and a fix for this will be of much use. Thanks.
if the code that you have provided for metrics.py is correct your problem should comme from how you declare the variable ids.
in your code you have declare it as a global so it will not be reset at every function call or class instance creation.
what you should to is declare it in get_tweet_ids()
class tweet_id:
def __init__(self, name):
self.name = name
def get_tweet_ids(self, name):
ids=[] # modification here
try:
connection = mysql.connector.connect(host='localhost',
database='twitterdb',
user='root',
password='password')
cursor = connection.cursor()
query="truncate table twitterdb.points"
query1="truncate table twitterdb.Metrics"
sql_select_query = """SELECT tweetid FROM twitterdb.StreamData WHERE username = %s"""
# set variable in query
cursor.execute(query)
cursor.execute(query1)
cursor.execute(sql_select_query, (name,))
# fetch result
record = cursor.fetchall()
for row in record:
ids.append(int(row[0]))
return ids # modification here
except mysql.connector.Error as error:
print("Failed to get record from MySQL table: {}".format(error))
finally:
if connection.is_connected():
cursor.close()
connection.close()
with this you will have a new instance of ids at every get_tweet_ids call.
You will have to change the rest of your code according to this return statement
i want to retrieve image from database using python but i have a problem where i execute this code
import mysql.connector
import io
from PIL import Image
connection= mysql.connector.connect(
host ="localhost",
user ="root",
passwd ="pfe_altran",
database = "testes",
)
cursor=connection.cursor()
sql1 = "SELECT * FROM pfe WHERE id = 1 "
cursor.execute(sql1)
data2 = cursor.fetchall()
file_like2 = io.BytesIO(data2[0][0])
img1=Image.open(file_like2)
img1.show()
cursor.close()
connection.close()
and i have this error :
file_like2 = io.BytesIO(data2[0][0]) TypeError: a bytes-like object
is required, not 'int'
cursor.fetchall() returns a list of rows to your variable so you need to handle that using a looper
for row in data2:
file_like2 = io.BytesIO(row[0]) #assumming row[0] contains the byte form of your image
you can use cursor.fetchmany(size=1) or cursor.fetchone() if you know that your query will return only a single row or if you know that you only need one row, this way you can manipulate it directly and not use the loop.
I am trying to insert each row from about 2000 csv files into a mysql table. With the following code, I have inserted only one row from just one file. How can I automate the code so that it inserts all rows for each file? The insertions need to be done just once.
import pymysql.cursors
connection = pymysql.connect(host='localhost',
user='s',
password='n9',
db='si',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
sql = "INSERT INTO `TrainsS` (`No.`, `Name`,`Zone`,`From`,`Delay`,`ETA`,`Location`,`To`) VALUES (%s,%s,%s,%s,%s,%s,%s, %s)"
cursor.execute(sql, ('03', 'P Exp','SF','HWH', 'none','no arr today','n/a','ND'))
connection.commit()
finally:
connection.close()
How about checking this code?
To run this you can put all your .csv files in one folder and os.walk(folder_location) that folder to get locations of all the .csv files and then I've opened them one by one and inserted into the required DB (MySQL) here.
import pandas as pd
import os
import subprocess
import warnings
warnings.simplefilter("ignore")
cwd = os.getcwd()
connection = pymysql.connect(host='localhost',
user='s',
password='n9',
db='si',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
files_csv = []
for subdir, dir, file in os.walk(cwd):
files_csv += [ fi for fi in file if fi.endswith(".csv") ]
print(files_csv)
for i in range(len(files_csv)):
with open(os.path.join(cwd, files_csv[i])) as f:
lis=[line.split() for line in f]
for i,x in enumerate(lis):
#print("line{0} = {1}".format(i,x))
#HERE x contains the row data and you can access it individualy using x[0], x[1], etc
#USE YOUR MySQL INSERTION commands here and insert the x row here.
with connection.cursor() as cursor:
sql = "INSERT INTO `TrainsS` (`No.`, `Name`,`Zone`,`From`,`Delay`,`ETA`,`Location`,`To`) VALUES (%s,%s,%s,%s,%s,%s,%s, %s)"
cursor.execute(sql, (#CONVERTED VALUES FROM x))
connection.commit()
Update -
getting values for (#CONVERTED VALUES FROM X)
values = ""
for i in range(len(columns)):
values = values + x[i] + "," # Here x[i] gives a record data in ith row. Here i'm just appending the all values to be inserted in the sql table.
values = values[:-1] # Removing the last extra comma.
command = "INSERT INTO `TrainsS` (`No.`, `Name`,`Zone`,`From`,`Delay`,`ETA`,`Location`,`To`) VALUES (" + str(values) + ")"
cursor.execute(command)
#Then commit using connection.commit()
import psycopg2
import time
import csv
conn = psycopg2.connect(
host = "localhost",
database = "postgres",
user = "postgres",
password = "postgres"
)
cur = conn.cursor()
start = time.time()
with open('combined_category_data_100 copy.csv', 'r') as file:
reader=csv.reader(file)
ncol = len(next(reader))
next(reader)
for row in reader:
cur.execute(" insert into data values (%s = (no. of columns
))", row)
conn.commit()
print("data entered successfully")
end = time.time()
print(f" time taken is {end - start}")
cur.close()
i want to display all the data in my table to a webpage using python when i click a button. here is my app.py code can anyone help me to finish this please..
def POST(self):
form = web.input(name="a", newname="s", number="d")
conn = MySQLdb.connect(host= "localhost", user="root", passwd="", db="testdb")
x = conn.cursor()
x.execute("SELECT * FROM details")
conn.commit()
items = cursor.fetchall()
return items()
conn.rollback()
conn.close()
return render.index()
i want to display it on index.html.and table has 3 colums
def POST(self):
form = web.input(name="a", newname="s", number="d")
conn = MySQLdb.connect(host= "localhost", user="root", passwd="", db="testdb")
x = conn.cursor()
x.execute("SELECT * FROM details")
conn.commit()
items = cursor.fetchall()
for row in items:
print row[0], row[1]
conn.rollback()
conn.close()
return render.index(items)
Run this code and check output in terminal and let me know the data is comming or not
Whats the best / correct way to test a connection to a MySQL server.. can you for example ping it..? I'm using MySQLdb and python.
I want my program to be structured in the following way
....connect to MySQL server
database = MySQLdb.connect(host="127.0.0.1 etc...
While true:
**... Check to see if connection is still alive if not reconnect**
... send data to MySQL...
time.sleep(30)
This is what I have used.
import MySQLdb
try:
import MySQLdb.converters
except ImportError:
_connarg('conv')
def connect(host='ronak.local', user='my_dev_1', passwd='my_dev_1', db='my_dev1', port=3306):
try:
orig_conv = MySQLdb.converters.conversions
conv_iter = iter(orig_conv)
convert = dict(zip(conv_iter, [str,] * len(orig_conv.keys())))
print "Connecting host=%s user=%s db=%s port=%d" % (host, user, db, port)
conn = MySQLdb.connect(host, user, passwd, db, port, conv=convert)
except MySQLdb.Error, e:
print "Error connecting %d: %s" % (e.args[0], e.args[1])
return conn
def parse_data_and_description(cursor, data, rs_id):
res = []
cols = [d[0] for d in cursor.description]
for i in data:
res.append(OrderedDict(zip(cols, i)))
return res
rs_id=0;
def get_multiple_result_sets():
conn = connect()
cursor = conn.cursor( )
final_list = []
try:
conn.autocommit(True)
cursor.execute ("CALL %s%s" % (sp, args))
while True:
rs_id+=1
data = cursor.fetchall( )
listout = parse_data_and_description(cursor, data, rs_id)
print listout
if cursor.nextset( )==None:
# This means no more recordsets available
break
print "\n"
# Consolidate all the cursors in a single list
final_list.append(listout)
print final_list
except MySQLdb.Error, e:
# Lets rollback the transaction in case of an exception
conn.rollback()
print "Transaction aborted: %d: %s" % (e.args[0], e.args[1])
cursor.close( )
conn.close()
else:
# Commit the transaction in case of no failures/exceptions
conn.commit()
print "Transaction succeeded"
cursor.close( )
conn.close()