'list' object not callable when INSERT into database - json

I'm trying to populate a database using a query API there are multiple lists within the JSON Payload but I can't seem to get the it right.
When I print I get: 'list' object is not callable.
The program runs fine and my data is runs but the SQL statement trips up what I am trying to do. Do I need to declare a dictionary?
import json
import urllib.request
import sqlite3
import urllib.parse
connection = sqlite3.connect("database.db")
cursor = connection.cursor()
# API endpoint
API_KEY = ''
API_ENDPOINT = "https://api.newsfilter.io/public/actions?token={}".format(API_KEY)
print(API_ENDPOINT)
# Define the filter parameters
queryString = "symbols: FRSX AND publishedAt:[2021-03-20 TO 2021-04-02]"
#{}".format(mydict)
payload = {
"type": "filterArticles",
"queryString": queryString,
"from": 0,
"size": 200
}
# Format your payload to JSON bytes
jsondata = json.dumps(payload)
jsondataasbytes = jsondata.encode('utf-8')
# Instantiate the request
req = urllib.request.Request(API_ENDPOINT)
# Set the correct HTTP header: Content-Type = application/json
req.add_header('Content-Type', 'application/json; charset=utf-8')
# Set the correct length of your request
req.add_header('Content-Length', len(jsondataasbytes))
# Send the request to the API
response = urllib.request.urlopen(req, jsondataasbytes)
# Read the response
res_body = response.read()
# Transform the response into JSON
assets = json.loads(res_body.decode("utf-8"))
articles = assets["articles"]
for asset in articles:
#print(asset)
try:
cursor.execute("""
INSERT INTO news_SEC (id, title, description, url, imageurl, publishedAt, source, symbol, cik)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""",(articles('id'), articles('title'), articles('description'), articles('url'), articles('imageurl'), articles('publishedAT'), articles('source'), articles('symbol'), articles('cik')))
except Exception as e:
print(e)
print(asset)
connection.commit()

Related

Need to create a python script to authenticate via token and call data through rest-api from multiple urls

I need to create a python script to fetch data from multiple URLs and populate the same in the my sql data base. the authenticate method i need to use is bearer token. so once i get token from all three URLs of a application, i need to process it to fetch data from another 3 urls (to get the configuration item data).
Attached
import json
# to send request to the server
import requests
import urllib3
# to ignore the SSL verification warning
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
from Get_Token import Generate_TK
# Generat_TK claas generates the token
obj = Generate_TK()
# Token
tk = obj.token
resp = 0
def get_CIs_data():
url1 = "https://abc"
# pass the token in the header
header = {
"accept" : "application/json",
"Authorization": "Bearer "+str(tk),
}
response = requests.get(url1, headers=header , verify=False)
return response
resp = get_CIs_data()
if resp.status_code != 200:
print('error: ' + str(resp.status_code))
elif resp.status_code != 401: # code 401 token expired
#if token expires generates new token
tk = obj.create_token()
data = get_CIs_data()
else:
print('connected Successfully')
# Convert response object into JSON
data = resp.json()
# returns data in list of tuples
# content = data.items()
# unpacking tuple
for key , value in data.items():
print(key , value)
is the script i am currently using only for single authentication
Expecting the results from not only 1 url, but require a script which could fetch data from 3 different urls and send data to my sql database.

Convert a Bytes String to Dictionary in Python

Basic Information
I am creating a python script that can encrypt and decrypt a file with previous session data.
The Problem
I am able to decrypt my file and read it using a key. This returns a bytes string which I can in turn convert to a string. However, this string needs to be converted to a dictionary, which I cannot do. Using ast, json and eval I have run into errors.
Bytes string
decrypted = fernet.decrypt(encrypted)
String
string = decrypted.decode("UTF-8").replace("'", '"')
If I use eval() or ast.literal_eval() I get the following error:
Then I tried using json.loads() and I get the following error:
The information blocked out on both images is to protect my SSH connections. In the first image it is giving me a SyntaxError at the last digit of my ip address.
The Function
The function that is responsible for this when called looks like this:
def FileDecryption():
with open('enc_key.key', 'rb') as filekey:
key = filekey.read()
filekey.close()
fernet = Fernet(key)
with open('saved_data.txt', 'rb') as enc_file:
encrypted = enc_file.read()
enc_file.close()
decrypted = fernet.decrypt(encrypted)
print(decrypted)
string = decrypted.decode("UTF-8").replace("'", '"')
data = f'{string}'
print(data)
#data = eval(data)
data = json.loads(data)
print(type(data))
for key in data:
#command_string = ["load", data[key][1], data[key][2], data[key][3], data[key][4]]
#SSH.CreateSSH(command_string)
print(key)
Any help would be appreciated. Thanks!
Your data seems like it was written incorrectly in the first place, but without a complete example hard to say.
Here's a complete example that round-trips a JSON-able data object.
# requirement:
# pip install cryptography
from cryptography.fernet import Fernet
import json
def encrypt(data, data_filename, key_filename):
key = Fernet.generate_key()
with open(key_filename, 'wb') as file:
file.write(key)
fernet = Fernet(key)
encrypted = fernet.encrypt(json.dumps(data).encode())
with open(data_filename, 'wb') as file:
file.write(encrypted)
def decrypt(data_filename, key_filename):
with open(key_filename, 'rb') as file:
key = file.read()
fernet = Fernet(key)
with open(data_filename, 'rb') as file:
return json.loads(fernet.decrypt(file.read()))
data = {'key1': 'value1', 'key2': 'value2'}
encrypt(data, 'saved_data.txt', 'enc_key.key')
decrypted = decrypt('saved_data.txt', 'enc_key.key')
print(decrypted)
Output:
{'key1': 'value1', 'key2': 'value2'}

How to Convert a data set from a database query to a specific JSON format for input to a REST api

I am fairly new to python and would like some assistance with a problem.
I have a SQL select query that returns a column with values. I would like to pass the records in a header request to a REST API call. The issue is that the API call expects the data in a specific JSON format.
How can I convert the data returned by the query to the specific JSON format shown below?
Query from SQL returns:
InfoId
------
1
2
3
4
5
6
I need to pass these values to a REST API as JSON in the following format:
{
"InfoId":[
1,2,3,4,5,6
]
}
I have tried couple of options to solve this problem.
I have tried converting the data into json using the pandas datatable.to_json method with the various orient parameters but none of them return the desired format as shown above.
import requests
import json
import pyodbc
import pandas as pd
conn = pyodbc.connect('Driver={SQL SERVER};'
'Server=myServer;'
'Database=TestDb;'
'Trusted_Connection=yes;'
)
cursor = conn.cursor()
sql_query = pd.read_sql_query('SELECT InfoId FROM tbl_info', conn)
#print(sql_query)
print(sql_query.to_json(orient='values', index=False))
url = "http://ldapiserver:5000/patterns/v1/swirl?idType=Info"
#sample payload
#payload = "{\r\n \"InfoId\": [\r\n 1,2,3,4,5,6\r\n ]\r\n}"
payload = sql_query.to_json(orient='records')
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=json.dumps(payload, indent=4))
resp_body = response.json()
print(resp_body)
print(response.elapsed.total_seconds())
The 2nd method I have tried is to convert the rows from SQL query into an list object and then form the json string. It works that way but I would like to automate is so that irrespective of the query it can for the json string.
import requests
import json
import pyodbc
conn = pyodbc.connect('Driver={SQL SERVER};'
'Server=myServer;'
'Database=TestDb;'
'Trusted_Connection=yes;'
)
cursor = conn.cursor()
cursor.execute("""
SELECT InfoId FROM tbl_info
""")
rows = cursor.fetchall()
# Convert query to row arrays
rowarray_list = []
for row in rows:
t = (row.InfoId)
rowarray_list.append(t)
j = json.dumps(rowarray_list)
conn.close()
txt = '{"InfoId": ', j, '}'
# print(txt)
payload = txt[0]+txt[1]+txt[2]
url = "http://ldapiserver:5000/patterns/v1/swirl?idType=Info"
# payload = "{\r\n \"InfoId\": [\r\n 72,74\r\n ]\r\n}"
#print(json .dumps(payload, indent=4))
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
resp_body = response.json()
print(resp_body)
print(response.elapsed.total_seconds())
Appreciate any help with this.
Thank you.
To convert your SQL query to JSON,
.
.
.
rows = cursor.fetchall()
# convert to list
json_rows = [dict(zip([key[0] for key in cursor.description], row)) for row in rows]
Then you can return your response as you like

api data directly insert into PostgreSQL database with python

main.py
import json, urllib.request, requests
import psycopg2
from psycopg2.extras import execute_values
# Retrieve Json Data from Within API
url = "https://datahead.herokuapp.com/api/employeers/"
response = urllib.request.urlopen(url)
data = json.loads(response.read())
# ***** connect to the db *******
try:
conn = psycopg2.connect("dbname='datahead' user='postgres' host='localhost' password='datahead'")
except:
print("I am unable to connect to the database")
# cursor
cur = conn.cursor()
fields = [
'id', #integer
'name', #varchar
'log_date', #date
'log_time', #timestamp
'login', #timestamp
'logout' #timestamp
]
for item in data:
my_data = [item[field] for field in fields]
insert_query = "INSERT INTO employee VALUES (%s, %s, %s, %s, %s, %s)"
cur.execute(insert_query, tuple(my_data))
conn.commit()
# close the cursor
cur.close()
# close the connection
conn.close()
please visit my api data format https://datahead.herokuapp.com/api/employeers/
Will it change here timestamp type, if yes so which data type i've to use?
Line 56 what happened?
I think login_time should be of type time without time zone in PostgresSQL.
But you would probably be better off using timestamp with time zone to represent date and time together.

Failing to test POST in Django

Using Python3, Django, and Django Rest Frameworks.
Previously I had a test case that was making a POST to an endpoint.
The payload was a dictionary:
mock_data = {some data here}
In order to send the data over the POST I was doing:
mock_data = base64.b64encode(json.dumps(mock_data).encode('utf-8'))
When doing the POST:
response = self.client.post(
'/some/path/here/', {
...,
'params': mock_data.decode('utf-8'),
},
)
And on the receiving end, in validate() method I was doing:
params = data['params']
try:
params_str = base64.b64decode(params).decode('utf-8')
app_data = json.loads(params_str)
except (UnicodeDecodeError, ValueError):
app_data = None
That was all fine, but now I need to use some hmac validation, and the json I am passing can no longer be a dict - its ordering would change each time, so hmac.new(secret, payload, algorithm) would be different.
I was trying to use a string:
payload = """{data in json format}"""
But when I am doing:
str_payload = payload.encode('utf-8')
b64_payload = base64.b64encode(str_payload)
I cannot POST it, and getting an error:
raise TypeError(repr(o) + " is not JSON serializable")
TypeError: b'ewog...Cn0=' is not JSON serializable
even if I do b64_payload.decode('utf-8') like before, still getting similar error:
raise TypeError(repr(o) + " is not JSON serializable")
TypeError: b'\xa2\x19...\xff' is not JSON serializable
Some python in the terminal:
>>> d = {'email': 'user#mail.com'} # regular dictionary
>>> d
{'email': 'user#mail.com'}
>>> dn = base64.b64encode(json.dumps(d).encode('utf-8'))
>>> dn
b'eyJlbWFpbCI6ICJ1c2VyQG1haWwuY29tIn0='
>>> s = """{"email": "user#mail.com"}""" # string
>>> s
'{"email": "user#mail.com"}'
>>> sn = base64.b64encode(json.dumps(s).encode('utf-8'))
>>> sn
b'IntcImVtYWlsXCI6IFwidXNlckBtYWlsLmNvbVwifSI=' # different!
>>> sn = base64.b64encode(s.encode('utf-8'))
>>> sn
b'eyJlbWFpbCI6ICJ1c2VyQG1haWwuY29tIn0=' # same
The issue is resolved.
The problem was NOT with the payload, but with a parameter I was passing:
signature': b'<signature'>
Solved by passing the signature field like this:
'signature': signature_b64.decode('utf-8')
Thank you!