Exception thrown with simple Siphon ACIS Request - python-siphon

I'm using the following code in Google Colab with siphon 0.9:
from siphon.simplewebservice import acis
import json
params = {"sid":"KPIH","elems":"maxt,mint,avgt,pcpn,snow,snwd"}
ds = acis.acis_request("StnData", params)
Resulting error:
JSONDecodeError: Expecting value: line 1 column 1 (char 0)
During handling of the above exception, another exception occurred:
AcisApiException Traceback (most recent call last)
/usr/local/lib/python3.7/dist-packages/siphon/simplewebservice/acis.py in acis_request(method, params)
60 raise AcisApiException('Bad URL. Check your ACIS connection method string.')
61 except ValueError:
---> 62 raise AcisApiException('No data returned! The ACIS parameter dictionary'
63 'may be incorrectly formatted')
64
AcisApiException: No data returned! The ACIS parameter dictionarymay be incorrectly formatted
Using Jupyter Lab resulted in the same error.

Fixed the issue by adding the start and end date to the query.
params = {"sid":"KPIH","elems":"maxt,mint,avgt,pcpn,snow,snwd","sdate":"por","edate":"por"}

Related

How to resolve Json decode error in Ubuntu

I am running python script on Window 10.
In the python script, I am using json library.
When I run the same script on Ubuntu 20.04(running on VMware), I do see json decode error happening.
This behaviour I dont see when I run in Windows 10.
The following is the error I do get when I run the script in Ubuntu
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/usr/lib/python3.8/threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "uiControl.py", line 83, in getTcpData
self.taskObj = json.loads(data.decode('utf-8'))
File "/usr/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/usr/lib/python3.8/json/decoder.py", line 340, in decode
raise JSONDecodeError("Extra data", s, end)
json.decoder.JSONDecodeError: Extra data: line 5 column 2 (char 73)
In function on_message, I am printing the data received.
The following is the data I receive :
b'{"code":"101","user":"ssmr","evNumber":"TS15EC1100"}'
I call the function addToTaskQueue() to store the received data and then try to parse the data using function BackendParser()
def on_message(self,client, userdata, msg):
print(msg.payload)
self.taskObj = json.loads(msg.payload )
self.taskObj["commType"]= "mqtt"
self.taskObj["transactionType"]= "rx"
taskScheduler.addToTaskQueue(self.taskObj)
def BackendParser(msg):
if(msg["code"] == "101"):
Backend.userName = msg["user"]
Backend.evNumber = msg["evNumber"]
Backend.evChargeControl = "On"
if(Backend.requestStatus == ""):
Backend.requestStatus = "new"
class taskScheduler():
global qTaskList
qTaskList = queue.Queue()
def __init__(self):
super().__init__()
self.tcpCon = tcpServerClient("client")
self.mqttCon = mqttComm()
print("Initiated Task Scheduler class")
#staticmethod
def addToTaskQueue(item):
if not qTaskList.full():
#print("Task added")
qTaskList.put(item)
def executeFromTaskQueue(self):
if not qTaskList.empty():
item = qTaskList.get()
if("mqtt" == item["commType"]):
if("tx" == item["transactionType"]):
pubTopic = item["topic"]
del item["commType"]
del item["transactionType"]
del item["topic"]
self.mqttCon.mqttSend(item,pubTopic)
elif("rx" == item["transactionType"]):
BackendParser(item)
elif("tcp" == item["commType"]):
if("tx" == item["transactionType"]):
del item["commType"]
del item["transactionType"]
tcpServerClient.sendTcpData(item)
elif("rx" == item["transactionType"]):
BackendParser(item)
I figured out the error
I was using the following function getTcpData to receive the data.
I tried printing the data as received and noticed that there were \n characters in the message received.This was not issue when the script was executed in Windows 10. I now added the routine to remove the \n character and now it works fine in Ubuntu.
def getTcpData(self):
print("Waiting for tcp data")
while True:
if(tcpServerClient.clientsocket != None):
data=tcpServerClient.clientsocket.recv(1024)
if data:
print(data)
self.taskObj = json.loads(data.decode('utf-8'))
self.taskObj["commType"]= "tcp"
self.taskObj["transactionType"]= "rx"
taskScheduler.addToTaskQueue(self.taskObj)

JSONDecodeError: Expecting value: line 1 column 1 error in AWS Lambda

I am trying to write a aws lambda function which will push the SQS queue output in a s3 bucket.
But the lambda function is failing to push the message , the cloudwatch log is showing
JSONDecodeError: Expecting value: line 1 column 1
i am posting the lambda function which i am using
import json
import boto3
def lambda_handler(event, context):
s3 = boto3.client("s3")
data = json.loads(event["Records"][0]["body"]) --getting error in this line
print(data)
s3.put_object(Bucket="sqsmybucket",key="data.json", Body=json.dumps(data))
#print(event)
return {
'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}
The cloud watch log is showing
2020-05-30T23:51:45.276+05:30
[ERROR] JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Traceback (most recent call last):
File "/var/task/lambda_function.py", line 6, in lambda_handler
data = json.loads(event["Records"][0]["body"])
File "/var/lang/lib/python3.8/json/__init__.py", line 357, in loads
return _default_decoder.decode(s)
File "/var/lang/lib/python3.8/json/decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/var/lang/lib/python3.8/json/decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
I have formatted the message and saved it to the cloud below is the link
[formatted JSON code][1]
formatted JSON
Please help , thanks in advance
Your event["Records"][0]["body"] is a plain string, not json:
"body": "A difficult message."
Therefore, json.loads(event["Records"][0]["body"]) is equivalent to json.loads("A difficult message.") which obviously fails.
To get body's value you can do the following instead:
data = event["Records"][0]["body"]
However, since later you have the following statment:
Body=json.dumps(data)
The Body will be:
Body='"A difficult message."'
which may or may not be what you desire.

Python3 Error: TypeError: 'str' object is not callable

I'd like to get the latest posts id from a subreddit. Reddit is have basic api for this. You can get json so i want gives data and decode it but i have a error.
root#archi-sunucu:~/yusuf/www# python3 reddit.py
Traceback (most recent call last):
File "reddit.py", line 24, in <module>
json = json.loads(resp.text())
TypeError: 'str' object is not callable
root#archi-sunucu:~/yusuf/www# python3 reddit.py
my code:
url = "https://www.reddit.com/r/" + subreddit + "/" + feed + ".json?sort=" + feed + "&limit=6"
resp = requests.get(url, verify=False)
json = json.loads(resp.text())
print(json["data"]["children"][0]["data"]["id"])
thanks for helps...
You complained that this expression raises an error:
json.loads(resp.text())
Well, let's break that down into something simpler,
so the line number tells us exactly what part of your code is failing.
temp = resp.text()
json.loads(temp)
Now we see that the 2nd line doesn't even execute,
it fails in the 1st line attempting to compute something
to assign to the temporary variable.
Examine resp and its attribute with tools
like help(resp), dir(resp), type(resp.text), repr(resp.text).
You will soon learn the .text attribute is a str.
That is not a callable function, so python raises an error.
Use the value directly, without a call:
json = json.loads(resp.text)

How to create a net which takes unlabeled "dummy data" as input?

I currently work myself through the caffe/examples/ to learn more about caffe/pycaffe.
In the 02-fine-tuning.ipynb-notebook there is a codecell which shows how to create a caffenet which takes unlabeled "dummmy data" as input, allowing us to set its input images externally. The notebook can be found here:
https://github.com/BVLC/caffe/blob/master/examples/02-fine-tuning.ipynb
There is a given code-cell, which throws an error:
dummy_data = L.DummyData(shape=dict(dim=[1, 3, 227, 227]))
imagenet_net_filename = caffenet(data=dummy_data, train=False)
imagenet_net = caffe.Net(imagenet_net_filename, weights, caffe.TEST)
error:
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-6-9f0ecb4d95e6> in <module>()
1 dummy_data = L.DummyData(shape=dict(dim=[1, 3, 227, 227]))
----> 2 imagenet_net_filename = caffenet(data=dummy_data, train=False)
3 imagenet_net = caffe.Net(imagenet_net_filename, weights, caffe.TEST)
<ipython-input-5-53badbea969e> in caffenet(data, label, train, num_classes, classifier_name, learn_all)
68 # write the net to a temporary file and return its filename
69 with tempfile.NamedTemporaryFile(delete=False) as f:
---> 70 f.write(str(n.to_proto()))
71 return f.name
~/anaconda3/envs/testcaffegpu/lib/python3.6/tempfile.py in func_wrapper(*args, **kwargs)
481 #_functools.wraps(func)
482 def func_wrapper(*args, **kwargs):
--> 483 return func(*args, **kwargs)
484 # Avoid closing the file as long as the wrapper is alive,
485 # see issue #18879.
TypeError: a bytes-like object is required, not 'str'
Anyone knows how to do this right?
tempfile.NamedTemporaryFile() opens a file in binary mode ('w+b') by default. Since you are using Python3.x, string is not the same type as for Python 2.x, hence providing a string as input to f.write() results in error since it expects bytes. Overriding the binary mode should avoid this error.
Replace
with tempfile.NamedTemporaryFile(delete=False) as f:
with
with tempfile.NamedTemporaryFile(delete=False, mode='w') as f:
This has been explained in a previous post:
TypeError: 'str' does not support the buffer interface

error loading json using topsy

When i load single record json is created just fine when i try to load multiple records i get this error. Sorry i am new to python http://tny.cz/ce1baaba
Traceback (most recent call last):
File "TweetGraber.py", line 26, in <module>
get_tweets_by_query(topic)
File "TweetGraber.py", line 15, in get_tweets_by_query
json_tree = json.loads(source)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/__init__.py", line 338, in loads
return _default_decoder.decode(s)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/json/decoder.py", line 368, in decode
raise ValueError(errmsg("Extra data", s, end, len(s)))
ValueError: Extra data: line 2 column 1 - line 11 column 1 (char 2380 - 46974)
Here is my code
def get_tweets_by_query(query, count=10):
"""A function that gets the tweets by a query."""
Tweets=[]
queryEncoded=urllib.quote(query)
api_key = "xxxxx"
source=urllib.urlopen("http://api.topsy.com/v2/content/bulktweets.json?q=%s&type=tweet&offset=0&perpage=%s&window=realtime&apikey=%s" % (queryEncoded, count, api_key)).read()
json_tree = json.loads(source)
pprint(json_tree)
topic = raw_input("Please enter a topic: ")
get_tweets_by_query(topic)
Thanks Timusan I was able to correct my json The problem with the original it was missing the root element "[" which showed we are expecting array and there "," was missing after end of each object. So here is fixed code.
So here is the code
def get_tweets_by_query(query, count=10):
"""A function that gets the tweets by a query."""
Tweets=[]
queryEncoded=urllib.quote(query)
api_key = "xx"
source=urllib.urlopen("http://api.topsy.com/v2/content/bulktweets.json?q=%s&type=tweet&offset=0&perpage=%s&window=realtime&apikey=%s" % (queryEncoded, count, api_key)).read()
source="["+source+"]"
source=source.replace("}\n{","},{")
json_tree = json.loads(source)
pprint(json_tree)