i just wrote a small script to port sqlite table to mysql but im realy not sure if i did right.
Im atleast sure, it works to the mysql-connection part, so atleast it already builds mysql connection.
from __future__ import with_statement
import sqlite3
import os
import pymysql
addonPath = "Path-To-SQLITe-Database"
addonPathOrginal = -"Path-To-Database.ini"
connection = sqlite3.connect(os.path.join(addonPath, "db.sqlite"))
#connection.row_factory = sqlite3.Row
connection.text_factory = str
cursor = connection.cursor()
sqliteexecute = cursor.execute
fetchall = cursor.fetchall
databaseinfo = os.path.join(addonPathOrginal, "database.ini")
d = {}
with open(databaseinfo, "r") as fs:
for l in fs.readlines():
l = l.strip(" ").strip("\n")
obj = l.split("=")
if len(obj) != 2: continue
if obj[1] == "": continue
d[obj[0]] = obj[1]
mysqlcon = pymysql.connect(**d)
mysqlcursor = mysqlcon.cursor()
execute = mysqlcursor.execute
execute("SET ##autocommit=1;")
execute("SET sql_notes = 1;")
execute("SET FOREIGN_KEY_CHECKS = 0;")
tablename = "jumptimes" # Convert Table 'jumptimes'
sqliteexecute("SELECT * FROM sqlite_master WHERE type='table' AND tbl_name='%s'" tablename)
#sqliteexecute("SELECT jumptimes FROM sqlite_master")
for x in fetchall():
print "Table: %s" % tablename
#if tablename == "sqlite_sequence": continue
sqliteexecute("SELECT * FROM %s" % tablename)
stuff = fetchall()
if len(stuff) == 0: continue
string = ("%s," * len(stuff[0]))[:-1]
print "Table %s, %i elements, length of string %i" % (tablename, len(stuff),len(stuff[0]))
for y in range(0, len(stuff), 5):
mysqlcursor.executemany("REPLACE INTO " + tablename + " VALUES (" + string + ")", stuff[y:y+5])
connection.close()
execute("SET FOREIGN_KEY_CHECKS = 1;")
mysqlcon.commit()
mysqlcon.close()
database.ini
host=""
user=""
passwd=""
db=""
Related
Is it possible to download all the history snapshots of a report at once? Preferably as a CSV. Would save a lot time instead of clicking into each one individually and selecting save as CSV.
I only see the option to Delete
In PowerShell, you can loop through each snapshot and save them using this example:
<#
Description: Save SSRS Report Snapshots
#>
$sql = "
DECLARE #ReportName NVARCHAR(200) = 'Your Report Name'; --change to NULL for every snapshot
DECLARE #FileFormat NVARCHAR(50) = 'CSV'; --HTML5,PPTX,ATOM,HTML4.0,MHTML,IMAGE,EXCEL (for .xls),EXCELOPENXML (for .xlsx),WORD (for .doc),WORDOPENXML (for .docx),CSV,PDF,XML
DECLARE #FileExtn NVARCHAR(50) = 'csv';
DECLARE #ServerName NVARCHAR(100) = 'http://YourServerName';
DECLARE #DateFrom DATE = CAST(DATEADD(DAY, -1, GETDATE()) AS DATE); --change to NULL for every snapshot
DECLARE #ExportPath NVARCHAR(200) = 'C:\Temp\';
SELECT
--[ReportID] = [c].[itemid]
-- , [ReportName] = [c].[name]
-- , [ReportPath] = [c].[path]
-- , [SnaphsotDate] = FORMAT([h].[snapshotdate], 'dd-MMM-yyyy')
-- , [SnapshotDescription] = [s].[DESCRIPTION]
-- , [SnapshotEffectiveParams] = [s].[effectiveparams]
-- , [SnapshotQueryParams] = [s].[queryparams]
-- , [ScheduleName] = [sc].[name]
-- , [ScheduleNextRunTime] = CONVERT(VARCHAR(20), [sc].[nextruntime], 113)
[ExportFileName] = #ExportPath + REPLACE([c].[name], ' ', '_') + '_' + FORMAT([h].[snapshotdate], 'yyyyMMdd_HHmm') + '.' + #FileExtn
, [SnapshotUrl] =
#ServerName
+ '/ReportServer/Pages/ReportViewer.aspx?'
+ [c].[path] + '&rs:Command=Render&rs:Format='
+ #FileFormat + '&rs:Snapshot='
+ FORMAT([h].[snapshotdate], 'yyyy-MM-ddTHH:mm:ss')
FROM
[ReportServer].[dbo].[History] AS [h] WITH(NOLOCK)
INNER JOIN [ReportServer].[dbo].[SnapshotData] AS [s] WITH(NOLOCK) ON [h].[snapshotdataid] = [s].[snapshotdataid]
INNER JOIN [ReportServer].[dbo].[Catalog] AS [c] WITH(NOLOCK) ON [c].[itemid] = [h].[reportid]
INNER JOIN [ReportServer].[dbo].[ReportSchedule] AS [rs] WITH(NOLOCK) ON [rs].[reportid] = [h].[reportid]
INNER JOIN [ReportServer].[dbo].[Schedule] AS [sc] WITH(NOLOCK) ON [sc].[scheduleid] = [rs].[scheduleid]
WHERE
1=1
AND [rs].[reportaction] = 2
AND (#ReportName IS NULL OR [c].[Name] = #ReportName)
AND (#DateFrom IS NULL OR [h].[snapshotdate] >= CAST(DATEADD(DAY, -1, GETDATE()) AS DATE))
ORDER BY
[c].[name]
, [h].[snapshotdate];
;"
$server = 'YourServerName';
$dbs = 'MASTER';
$dsn = "Data Source=$server; Initial Catalog=$dbs; Integrated Security=SSPI;";
$cn = New-Object System.Data.SqlClient.SqlConnection($dsn);
#execute merge statement here with parameters
$cn = New-Object System.Data.SqlClient.SqlConnection($dsn);
$cn.Open();
$cmd = $cn.CreateCommand();
$cmd.CommandText = $sql
$SqlAdapter = New-Object System.Data.SqlClient.SqlDataAdapter
$SqlAdapter.SelectCommand = $cmd
$cmd.Connection = $cn
$ds = New-Object System.Data.DataSet
$SqlAdapter.Fill($ds)
$cn.Close()
$Result = $ds.Tables[0]
Foreach ($item in $Result)
{
#Write-Host $item.name
$SnapshotUrl = $item.SnapshotUrl
$ExportFileName = $item.ExportFileName
(Invoke-WebRequest -Uri $SnapshotUrl -OutFile $ExportFileName -UseDefaultCredentials -TimeoutSec 240);
}
https://learn.microsoft.com/en-us/sql/reporting-services/url-access-parameter-reference?view=sql-server-ver15
Was having trouble with powershell, so thought I'd post simplified version of my rough Python solution inspired by the resource from #aduguid's answer.
import requests
from requests_negotiate_sspi import HttpNegotiateAuth
import os
def downloadFile(url, file_name, download_folder, session):
response = session.get(url, stream=True) # open the download link
file_path = os.path.join(download_folder, file_name)
with open(file_path, 'wb') as file: # create a new file with write binary mode
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
# Can also use '/Reports()' for non-linked reports.
# Can also pass in 'path="<report_path>"' instead of using id numbers,
# e.g. '.../Reports(path="/cool%20reports/my%20report")/HistorySnapshots'
api_url = r'http://<server_name>/reports/api/v2.0/LinkedReports(<item_id>)/HistorySnapshots'
session = requests.session()
session.auth = HttpNegotiateAuth() # uses windows log in
response = session.get(api_url)
hs_snapshot_list = response.json()['value']
for item_dict in hs_snapshot_list:
download_url = (r'http://<server_name>/ReportServer/Pages/ReportViewer.aspx?<report_path>'
+ '&rs:Snapshot=' + item_dict['HistoryId']
+ '&rs:Format=CSV')
downloadFile(download_url, '<your_file_name>', '<your_download_folder>', session)
SSRS API Resource:
https://app.swaggerhub.com/apis/microsoft-rs/SSRS/2.0#/Reports/GetReportHistorySnapshots
conn = mysql.connector.connect(
host="localhost",
user="root",
passwd="12123123412"
database='newdb')
cur = conn.cursor()
xx_zz = self.screen.get_screen('end').ids["rgr"].text
ee_zz = self.screen.get_screen('end').ids["gfd"].text
qur = f"SELECT * FROM (%s) WHERE bedrooms = '(%s)' "
val = (xx_zz, ee_zz)
cur.execute(qur, val)
records = cur.fetchall()
I suggest that we use a function to create the query string using match-case. This will avoid any risk of SQL injection as we are not using the string provided by the front end.
You will need to modify and complete the option values and table names and decide whether there should be a default table name or no result if the option provided is not found.
Obviously this code has not been tested.
def makeQuery( option ):
match option:
case 'option1':
return f"SELECT * FROM table_name_1 WHERE bedrooms = '(%s)' "
case 'option2':
return f"SELECT * FROM table_name_2 WHERE bedrooms = '(%s)' "
case _:
return f"SELECT * FROM default_table_name WHERE bedrooms = '(%s)' "
conn = mysql.connector.connect(
host="localhost",
user="root",
passwd="12123123412"
database='newdb')
cur = conn.cursor()
xx_zz = self.screen.get_screen('end').ids["rgr"].text
ee_zz = self.screen.get_screen('end').ids["gfd"].text
qur = makeQuery(xx_zz )
val = ( ee_zz )
cur.execute(qur, val)
records = cur.fetchall()
In textInput (field) you use hint_text to show a placeholder in a text field(input).
Is there a pattern for dealing with query params in a flask rest server? I know I can create a sql query word for word using string manipulation in python, but I find that to be ugly and error prone, I was wondering if there is a better way. Here's what I have:
param1 = request.args.get('param1', type = int)
param2 = request.args.get('param2', type = int)
if param1 is not None:
if param2 is not None:
cursor.execute("SELECT * FROM table WHERE p1 = %s AND p2 = %s", (str(param1), str(param2)))
else:
cursor.execute("SELECT * FROM table WHERE p1 = %s", (str(param1),))
else:
if param2 is not None:
cursor.execute("SELECT * FROM table WHERE p2 = %s", (str(param2),))
else:
cursor.execute("SELECT * FROM table")
It's easy to see the number of possible SQL statements is 2 to the number of parameters, which grows out of control... so, again, without using string manipulation to custom build the sql query, is there an idiom or pattern that is used to accomplish this in a more elegant way? Thanks.
Loop through your parameters.
params = []
for i in range(1, HoweverManyParamsYouNeed):
params.append(request.args.get('param' + str(i), type = int))
s = ""
for i in range(1, len(params)):
if params[ i ] is not None:
if not s:
s = "p" + str(i) + " = " + str(params[ i ])
else:
s = s + " AND p" + str(i) + " = " + str(params[ i ])
full = "SELECT * FROM table"
if s:
full = full + " WHERE " + s
cursor.execute(full)
You might need to correct this code, since I do not have a way to run it.
I suggest using ORM(https://en.wikipedia.org/wiki/Object-relational_mapping) instead of raw sql queries.
First you need install flask-sqlalchemy (https://flask-sqlalchemy.palletsprojects.com/)
Then define your model
class MyModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
column1 = db.Column(db.Integer)
column2 = db.Column(db.Integer)
Let's say you have your filter lookup somewhere
allowed_filters = {"column1", "column2"}
Finally instead of cursor you can use SQLAlchemy's ORM to retrieve your filtered objects.
query = MyModel.query
for field, value in request.args.items():
if field in allowed_filters:
query = query.filter(getattr(MyModel, field) == value)
my_object_list = list(query.all())
If you really want to create your queries manually you can always iterate over args:
where_clause = ""
params = []
for field, value in request.args.items():
if field in allowed_filters:
if len(where_clause) > 0:
where_clause += " AND "
where_clause += "{} = %s".format(field)
params.append(value)
if len(where_clause) > 0:
cursor.execute("SELECT * FROM table WHERE {}".format(where_clause), tuple(params))
else:
cursor.execute("SELECT * FROM table")
I've a django app which fetches data from mysql db whenever a request is received.This works fine when request is processed by one user but, when more than user send request, I get a error message saying "InterfaceError at url (0, '')".
I'm using Django version 1.9.
As per my research , I included CONN_MAX_AGE in my settings.py but still I got the same error.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
**'CONN_MAX_AGE': None**
}
}
my models.py
connection = pymysql.connect(host='localhost',user='user',password='password',db='db_name',port=3306,charset='utf8',cursorclass=pymysql.cursors.DictCursor)
def execute(query):
cur = connection.cursor()
cur.execute(query)
data = cur.fetchall()
connection.commit()
cur.close()
return data
def trending_assets():
sql = "select * from media_recommendation_engine.content_table t1 inner join (SELECT movieId,rank from" \
" media_recommendation_engine.asset_ranks limit 10) t2 on t1.movieId = " \
"t2.movieId order by t2.rank asc ;;"
data = execute(sql)
return data
views.py
#permission_classes((permissions.IsAuthenticated,))
class Trending(viewsets.GenericViewSet):
def list(self,request):
if request.query_params['type']=='trending':
result_data = models.trending_assets()
return Response(models.formatter(result_data))
#
else:
return JsonResponse({'message': 'Wrong Argument pass'},status= 400)
You should connect the db each time you need to when a request need to be processed. Earlier I used a connection globally. And do not use db.close().
def execute(query):
connection = pymysql.connect(host='localhost', user='user', password='passsword', db='db_name', port=3306, charset='utf8', cursorclass=pymysql.cursors.DictCursor)
cur = connection.cursor()
cur.execute(query)
data = cur.fetchall()
connection.commit()
cur.close()
return data
def trending_assets():
sql = "select * from media_recommendation_engine.content_table t1 inner join (SELECT movieId,rank from" \
" media_recommendation_engine.asset_ranks limit 10) t2 on t1.movieId = " \
"t2.movieId order by t2.rank asc ;;"
data = execute(sql)
return data
After much trying on python3 (as of still new in this language), the line whereby cursor.execute will prevent the for loop to continue when condition met. However when I comment cursor.execute line, the looping able to continue until the end. How can I made it continue till the last result of the loop?
Objective to achieve: -
I am trying to filter a bunch of data from CFC_xxxx table, process and put it back into SENSOR_TREEHUGGERS table.
Line where the show stopper happened: -
cursor.execute(sqlInsert,
(xxGatewayId,qqqGatewayId,treeDiameter,temperature,recordTime,dateTime,treeHuggerID))
Python3 Code: -
import base64
import struct
import pymysql.cursors
import sys
import datetime
from contextlib import closing
connection = pymysql.connect(host='localhost',
user='xxx',
password='xxx',
db='xxx',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
with closing(connection.cursor()) as cursor:
sql = "SELECT * FROM CFC_xxxx"
cursor.execute(sql)
for row in cursor:
check = struct.unpack('>15x2f4B1L1x', base64.b64decode(row['Value']))
if check[6] > 20000 or check[6] < 10000:
continue
else:
xxGatewayId = int(row['Node_ID'])
qqqGatewayId = int(row['Gateway_ID'])
treeDiameter = int(check[0])
temperature = int(check[1])
recordTime = str(row['Timestamp'])
year = datetime.datetime.fromtimestamp(row['Timestamp']).strftime('%Y')
if check[2] == 0:
hours = '00'
else:
hours = str(check[2])
if check[3] == 0:
minute = '00'
else:
minute = str(check[3])
if check[4] < 10:
day = '0'+str(check[4])
else:
day = str(check[4])
if check[5] < 10:
month = '0'+str(check[5])
else:
month = str(check[5])
dateTime = str(year + '-' + month + '-' + day + ' ' + hours + ':' + minute + ':00')
treeHuggerID = int(check[6])
sqlInsert = "INSERT INTO SENSOR_TREEHUGGERS(`xx_Gateway_Id`,`qqq_Gateway_Id`,`treeDiameter`,`temperature`,`recordTime`,`dateTime`,`TreeHuggerId`) VALUES (%s,%s,%s,%s,%s,%s,%s)"
cursor.execute(sqlInsert, (xxGatewayId,qqqGatewayId,treeDiameter,temperature,recordTime,dateTime,treeHuggerID))
connection.commit()
Below is how I get my trouble solved. Using array to append all the processed data and use executemany to save them at once. In beforehand, have to modify mysql config max_allowed_packet = 500M
A pain but valuable lesson.
Answer: -
import base64
import struct
import pymysql.cursors
import sys
import datetime
from contextlib import closing
collectData = []
connection = pymysql.connect(host='localhost',
user='xxx',
password='xxx',
db='xxx',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
with closing(connection.cursor()) as cursor:
sql = "SELECT * FROM CFC_xxxx"
cursor.execute(sql)
for row in cursor:
check = struct.unpack('>15x2f4B1L1x', base64.b64decode(row['Value']))
if check[6] > 20000 or check[6] < 10000:
continue
else:
xxGatewayId = int(row['Node_ID'])
qqqGatewayId = int(row['Gateway_ID'])
treeDiameter = int(check[0])
temperature = int(check[1])
recordTime = str(row['Timestamp'])
year = datetime.datetime.fromtimestamp(row['Timestamp']).strftime('%Y')
if check[2] == 0:
hours = '00'
else:
hours = str(check[2])
if check[3] == 0:
minute = '00'
else:
minute = str(check[3])
if check[4] < 10:
day = '0'+str(check[4])
else:
day = str(check[4])
if check[5] < 10:
month = '0'+str(check[5])
else:
month = str(check[5])
dateTime = str(year + '-' + month + '-' + day + ' ' + hours + ':' + minute + ':00')
treeHuggerID = int(check[6])
collectData.append(xxGatewayId,qqqGatewayId,treeDiameter,temperature,recordTime,dateTime,treeHuggerID)
c1 = connection.cursor()
sqlInsert = "INSERT INTO SENSOR_TREEHUGGERS(`xx_Gateway_Id`,`qqq_Gateway_Id`,`treeDiameter`,`temperature`,`recordTime`,`dateTime`,`TreeHuggerId`) VALUES (%s,%s,%s,%s,%s,%s,%s)"
c1.executemany(sqlInsert, collectData)
connection.commit()