Httpc Error Erlang - json

I am starting development with Erlang and need to make a REST HTTP call to a server where I send a JSON and receive JSON confirmation.
Follows the code
Method = put,
URL = "http://api.teste.com:8080/v1/user_auth",
Header = [],
Type = "application/json",
Json = <<"{ \"data\" : { \"test-one\" : \"123\", \"test-two\" : \"return test 2\" } }">>,
HTTPOptions = [],
Options = [],
application:start(ssl),
application:start(inets),
httpc:request(Method, {URL, Header, Type, Json}, HTTPOptions, Options).\
When you run this code I am with the following error:
=ERROR REPORT==== 5-Dec-2015::14:21:01 ===
Error in process <0.161.0> on node 'middleware#127.0.0.1' with exit value:
{[{reason,undef},
{mfa,{user_account_handler,handle_post,2}},
{stacktrace,[{httpc,request,
[put,
{"http://api.teste.com:8080/v1/user_auth",[],
"application/json",
<<"{ \"data\" : { \"test-one\" : \"123\", \"test-two\" : \"return test 2\" } }">>},
[],[]],
[]},
{cowboy_rest,call,3,[{file,"src/cowboy_rest.erl"},{line,972}]},
{cowboy_rest,process_content_type,3,
[{file,"src/cowboy_rest.erl"},{line,773}]},
{cowboy_protocol,execute,4,
[{file,"src/cowboy_protocol.erl"},
{line,442}]}]},
{req,[{socket,#Port<0.479>},
{transport,ranch_tcp},
{connection,keepalive},
{pid,<0.161.0>},
{method,<<"POST">>},
{version,'HTTP/1.1'},
{peer,{{127,0,0,1},49895}},
{host,<<"localhost">>},
{host_info,undefined},
{port,8080},
{path,<<"/v1/create_user_account">>},
{path_info,undefined},
{qs,<<>>},
{qs_vals,undefined},
{bindings,[]},
{headers,[{<<"host">>,<<"localhost:8080">>},
{<<"connection">>,<<"keep-alive">>},
{<<"content-length">>,<<"58">>},
{<<"cache-control">>,<<"no-cache">>},
{<<"origin">>,
<<"chrome-extension://fhbjgbiflinjbdggehcddcbncdddomop">>},
{<<"content-type">>,<<"application/json">>},
{<<"user-agent">>,
<<"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.73 Safari/537.36">>},
{<<"postman-token">>,
<<"2dc302f2-7c93-b9f9-2143-cff41bfeb45a">>},
{<<"accept">>,<<"*/*">>},
{<<"accept-encoding">>,<<"gzip, deflate">>},
{<<"accept-language">>,
<<"en-US,en;q=0.8,es;q=0.6,pt;q=0.4">>}]},
{p_headers,[{<<"content-type">>,{<<"application">>,<<"json">>,[]}},
{<<"if-modified-since">>,undefined},
{<<"if-none-match">>,undefined},
{<<"if-unmodified-since">>,undefined},
{<<"if-match">>,undefined},
{<<"accept">>,[{{<<"*">>,<<"*">>,[]},1000,[]}]},
{<<"connection">>,[<<"keep-alive">>]}]},
{cookies,undefined},
{meta,[{media_type,{<<"application">>,<<"json">>,[]}},
{charset,undefined}]},
{body_state,waiting},
{buffer,<<"{\n \"username\":\"igor#gmail.com\"\n , \"password\":\"123\"\n}">>},
{multipart,undefined},
{resp_compress,false},
{resp_state,waiting},
{resp_headers,[{<<"content-type">>,
[<<"application">>,<<"/">>,<<"json">>,<<>>]}]},
{resp_body,<<>>},
{onresponse,undefined}]},
{state,undefined}],
[{cowboy_rest,process_content_type,3,
[{file,"src/cowboy_rest.erl"},{line,773}]},
{cowboy_protocol,execute,4,[{file,"src/cowboy_protocol.erl"},{line,442}]}]}

Related

JSON parsing of event parameter in AWS Lambda extracts undefined

This was working, and suddenly fails on extracting the "event.body" JSON object passed into this AWS Lambda nodeJS function:
exports.handler = function (event, context, callback) {
console.log('Event: ' + JSON.stringify(event));
console.log('Event.Body: ' + event.body);
//console.log('Parsed Event: ' + JSON.parse(event));
let body = event.body;
console.log('Body: ' + body);
const tgQueryName = body.queryName;
const tgQueryParams = body.queryParams;
console.log('tgQueryName: ' + tgQueryName);
console.log('tgQueryParams: ' + tgQueryParams);
...
Both tgQueryName and tgQueryParams are 'undefined' - see CloudWatch log:
INFO Event: {"version":"2.0","routeKey":"POST /tg-query","rawPath":"/dev/tg-query","rawQueryString":"","headers":{"accept":"application/json, text/plain, */*","accept-encoding":"gzip, deflate","accept-language":"he-IL,he;q=0.9,en-US;q=0.8,en;q=0.7","cache-control":"no-cache","content-length":"51","content-type":"application/json; charset=UTF-8","host":"p6ilp2ts0g.execute-api.us-east-1.amazonaws.com","origin":"http://localhost","referer":"http://localhost/","sec-fetch-dest":"empty","sec-fetch-mode":"cors","sec-fetch-site":"cross-site","user-agent":"Mozilla/5.0 (Linux; Android 11; Redmi Note 8 Build/RKQ1.201004.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/101.0.4951.61 Mobile Safari/537.36","x-amzn-trace-id":"Root=1-629b960c-072e8fa475ad26f56893c6f9","x-forwarded-for":"89.139.32.60","x-forwarded-port":"443","x-forwarded-proto":"https","x-requested-with":"com.skillblaster.simplify.dev"},"requestContext":{"accountId":"140360121027","apiId":"p6ilp2ts0g","domainName":"p6ilp2ts0g.execute-api.us-east-1.amazonaws.com","domainPrefix":"p6ilp2ts0g","http":{"method":"POST","path":"/dev/tg-query","protocol":"HTTP/1.1","sourceIp":"89.139.32.60","userAgent":"Mozilla/5.0 (Linux; Android 11; Redmi Note 8 Build/RKQ1.201004.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/101.0.4951.61 Mobile Safari/537.36"},"requestId":"TNRh_gq-oAMESEw=","routeKey":"POST /tg-query","stage":"dev","time":"04/Jun/2022:17:27:40 +0000","timeEpoch":1654363660597},"body":"{\"queryName\":\"getActiveCountries\",\"queryParams\":{}}","isBase64Encoded":false}
INFO Event.Body: {"queryName":"getActiveCountries","queryParams":{}}
INFO Body: {"queryName":"getActiveCountries","queryParams":{}}
INFO tgQueryName: undefined
INFO tgQueryParams: undefined
I also tried: body["queryName"] - same result.
What am I missing?
Your body content is a string and you need to JSON.parse it:
let body = JSON.parse(event.body);
It was only clear when I stuck your initial event JSON into a JSON beautifier and it was a little clearer.

How to send json message body in aws SNS using console

I am doing a hands on where I want to add an SNS trigger to a lambda function which then sends a message to a slack channel. There is a blueprint for this lambda in python and also a template test event which looks like the following
{
"Records": [
{
"EventVersion": "1.0",
"EventSubscriptionArn": "arn:aws:sns:EXAMPLE",
"EventSource": "aws:sns",
"Sns": {
"SignatureVersion": "1",
"Timestamp": "1970-01-01T00:00:00.000Z",
"Signature": "EXAMPLE",
"SigningCertUrl": "EXAMPLE",
"MessageId": "12345",
"Message": {
"AlarmName": "SlackAlarm",
"NewStateValue": "OK",
"NewStateReason": "Threshold Crossed: 1 datapoint (0.0) was not greater than or equal to the threshold (1.0)."
},
"MessageAttributes": {
"Test": {
"Type": "String",
"Value": "TestString"
},
"TestBinary": {
"Type": "Binary",
"Value": "TestBinary"
}
},
"Type": "Notification",
"UnsubscribeUrl": "EXAMPLE",
"TopicArn": "arn:aws:sns:EXAMPLE",
"Subject": "TestInvoke"
}
}
]
The code in lambda handler from the blueprint is as follows
import boto3
import json
import logging
import os
from base64 import b64decode
from urllib.request import Request, urlopen
from urllib.error import URLError, HTTPError
HOOK_URL = os.environ['kmsEncryptedHookUrl']
SLACK_CHANNEL = os.environ['slackChannel']
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def lambda_handler(event, context):
logger.info("Event: " + str(event))
message = event['Records'][0]['Sns']['Message']
logger.info("Message: " + str(message))
alarm_name = message['AlarmName']
new_state = message['NewStateValue']
reason = message['NewStateReason']
slack_message = {
'channel': SLACK_CHANNEL,
'text': "%s state is now %s: %s" % (alarm_name, new_state, reason)
}
req = Request(HOOK_URL, json.dumps(slack_message).encode('utf-8'))
try:
response = urlopen(req)
response.read()
logger.info("Message posted to %s", slack_message['channel'])
except HTTPError as e:
logger.error("Request failed: %d %s", e.code, e.reason)
except URLError as e:
logger.error("Server connection failed: %s", e.reason)
When I run the test event, the lambda runs successfully.
I wanted to publish a message in SNS topic from the console to see if the lambda is triggered correctly. But when I try to publish the JSON object as a message body, I am getting the error
[ERROR] TypeError: string indices must be integersTraceback (most recent call last):  File "/var/task/lambda_function.py", line 21, in lambda_handler    alarm_name = message['AlarmName']
I tried giving plain json
{
"AlarmName": "PublishedAlarm",
"NewStateValue": "OK",
"NewStateReason": "This alarm is published"
}
I tried giving a stringified JSON
"{\"AlarmName\": \"PublishedAlarm\",\"NewStateValue\": \"OK\",\"NewStateReason\": \"This alarm is published\"}"
I tried choosing Custom payload for each delivery message structure and then gave the following message body
{
"default": "Sample fallback message",
"email": "Sample message for email endpoints",
"sqs": "Sample message for Amazon SQS endpoints",
"lambda": "{\"AlarmName\": \"PublishedAlarm\",\"NewStateValue\": \"OK\",\"NewStateReason\": \"This alarm is published\"}",
"http": "Sample message for HTTP endpoints",
"https": "Sample message for HTTPS endpoints",
"sms": "Sample message for SMS endpoints",
"firehose": "Sample message for Amazon Kinesis Data Firehose endpoints",
"APNS": "{\"aps\":{\"alert\": \"Sample message for iOS endpoints\"} }",
"APNS_SANDBOX": "{\"aps\":{\"alert\":\"Sample message for iOS development endpoints\"}}",
"APNS_VOIP": "{\"aps\":{\"alert\":\"Sample message for Apple VoIP endpoints\"}}",
"APNS_VOIP_SANDBOX": "{\"aps\":{\"alert\": \"Sample message for Apple VoIP development endpoints\"} }",
"MACOS": "{\"aps\":{\"alert\":\"Sample message for MacOS endpoints\"}}",
"MACOS_SANDBOX": "{\"aps\":{\"alert\": \"Sample message for MacOS development endpoints\"} }",
"GCM": "{ \"data\": { \"message\": \"Sample message for Android endpoints\" } }",
"ADM": "{ \"data\": { \"message\": \"Sample message for FireOS endpoints\" } }",
"BAIDU": "{\"title\":\"Sample message title\",\"description\":\"Sample message for Baidu endpoints\"}",
"MPNS": "<?xml version=\"1.0\" encoding=\"utf-8\"?><wp:Notification xmlns:wp=\"WPNotification\"><wp:Tile><wp:Count>ENTER COUNT</wp:Count><wp:Title>Sample message for Windows Phone 7+ endpoints</wp:Title></wp:Tile></wp:Notification>",
"WNS": "<badge version=\"1\" value=\"42\"/>"
}
Nothing worked. I've also subscribed an email address to the topic and I'm getting emails without any issues.
How can I simulate the test event given in lambda event templates from the SNS?
When you send your plain json message using SNS, it will be delivered to lambda in in the format:
'Message': '{\n "AlarmName": "PublishedAlarm",\n "NewStateValue": "OK",\n "NewStateReason": "This alarm is published"\n}'
You can parse it using ast' literal_eval method:
import ast
#...
#...
def lambda_handler(event, context):
logger.info("Event: " + str(event))
message = event['Records'][0]['Sns']['Message']
logger.info("Message: " + str(message))
message = ast.literal_eval(event['Records'][0]['Sns']['Message'])
alarm_name = message['AlarmName']
new_state = message['NewStateValue']
reason = message['NewStateReason']
#...
#...

How to handle http JSON POST response in MicroPyton? (ESP8266)

I'm an absolute beginner in get/post requests and micropython.
I'm programming my ESP8266 Wemos D1 mini as a HTTP server with micropython. My project consists of using a website to control the RGB values of a neopixel matrix hooked up to the D1 (all the code is on my GitHub here: https://github.com/julien123123/NeoLamp-Micro).
Basically, the website contains three sliders: one for Red, one for Green and one for Blue. A javascript code reads the value of each slider and sends it to the micropython code with using the POST method as follows :
getColors = function() {
var rgb = new Array(slider1.value, slider2.value, slider3.value);
return rgb;
};
postColors = function(rgb) {
var xmlhttp = new XMLHttpRequest();
var npxJSON = '{"R":' + rgb[0] + ', "G":' + rgb[1] + ', "B":' + rgb[2] + '}';
xmlhttp.open('POST', 'http://' + window.location.hostname + '/npx', true);
xmlhttp.setRequestHeader('Content-type', 'application/json');
xmlhttp.send(npxJSON);
};
To recieve the resquest in micropython here's my code:
conn, addr = s.accept()
request = conn.recv(1024)
request = str(request)
print(request)
The response prints as follows:
b'POST /npx HTTP/1.1\r\nHost: 192.xxx.xxx.xxx\r\nConnection: keep-alive\r\nContent-Length: 27\r\nOrigin: http://192.168.0.110\r\nUser-Agent: Mozilla/5.0 (X11; CrOS x86_64 10323.46.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.107 Safari/537.36\r\nContent-type: application/json\r\nAccept: */*\r\nReferer: http://192.xxx.xxx.xxx/\r\nAccept-Encoding: gzip, deflate\r\nAccept-Language: fr,en;q=0.9,fr-CA;q=0.8\r\n\r\n{"R":114, "G":120, "B":236}'
The only important bit for me is at the end : {"R":114, "G":120, "B":236}. I want to use those values to change the color values of my neopixel object.
My question to you is how to I process the response so that I keep only the dictionary containing the RGB variables at the end of the response??
Thanks in advance (I'm almost there!)
This is more related to generic python data type. The data type of request is in bytes as indicated by prefix b in b'POST /npx HTTP/1.1...\r\n{"R":114, "G":120, "B":236}'. You will have to use decode() to convert it to string
import json
request = b'POST /npx HTTP/1.1\r\nHost: 192.xxx.xxx.xxx\r\nConnection: keep-alive\r\nContent-Length: 27\r\nOrigin: http://192.168.0.110\r\nUser-Agent: Mozilla/5.0 (X11; CrOS x86_64 10323.46.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.107 Safari/537.36\r\nContent-type: application/json\r\nAccept: */*\r\nReferer: http://192.xxx.xxx.xxx/\r\nAccept-Encoding: gzip, deflate\r\nAccept-Language: fr,en;q=0.9,fr-CA;q=0.8\r\n\r\n{"R":114, "G":120, "B":236}'
data = request.decode() # convert to str
rgb = data.split('\r\n')[-1:] #split the str and discard the http header
for color in rgb:
print(color, type(color))
d = json.loads(color)
print(d, type(d))
The result of color is a str representation of an json object, the d will give you a python dict object to be used for further manipulation:
{"R":114, "G":120, "B":236} <class 'str'>
{'R': 114, 'G': 120, 'B': 236} <class 'dict'>

Weird response from API

For learning purposes I'm trying to reproduce Instagram internal API with Ruby and Faraday. However, the response's body I get when making a POST is somehow encoded instead of JSON:
What the response's body should look like:
{
"status": "ok",
"media": {
"page_info": {
"start_cursor": "1447303180937779444_4460593680",
"has_next_page": true,
"end_cursor": "1447303180937779444",
"has_previous_page": true
},
...
What I get:
#=> \x1F\x8B\b\x00#\x15\x9EX\x02\xFF...
Question:
Any idea (i) why I'm getting a response's body like that and (ii) how can I convert that to JSON?
Flow:
When you hit https://www.instagram.com/explore/locations/127963847/madrid-spain/ in your browser Instagram makes two requests (among others):
GET: https://www.instagram.com/explore/locations/127963847/madrid-spain/
POST: https://www.instagram.com/query/
I used Postman to intercept requests and just copied headers and parameters for the second (/query/) request. This is my implementation (get status '200'):
class IcTest
require 'open-uri'
require "net/http"
require "uri"
def self.faraday
conn = Faraday.new(:url => 'https://www.instagram.com') do |faraday|
faraday.request :url_encoded # form-encode POST params
faraday.response :logger # log requests to STDOUT
faraday.adapter Faraday.default_adapter # make requests with Net::HTTP
end
res = conn.post do |req|
req.url '/query/'
req.headers['Origin'] = 'https://www.instagram.com'
req.headers['X-Instagram-AJAX'] = '1'
req.headers['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
# req.headers['Accept'] = '*/*'
req.headers['X-Requested-With'] = 'XMLHttpRequest'
req.headers['X-CSRFToken'] = 'SrxvROytxQHAesy1XcgcM2PWrEHHuQnD'
req.headers['Referer'] = 'https://www.instagram.com/explore/locations/127963847/madrid-spain/'
req.headers['Accept-Encoding'] = 'gzip, deflate, br'
req.headers['Accept-Language'] = 'es,en;q=0.8,gl;q=0.6,pt;q=0.4,pl;q=0.2'
req.headers['Cookie'] = 'mid=SJt50gAEAAE6KZ50GByVoStJKLUH; sessionid=IGSC514a2e9015f548b09176228f83ad5fe716f32e7143f6fe710c19a71c08b9828b%3Apc2KPxgwvokLyZhfZHcO1Qzfb2mpykG8%3A%7B%22_token%22%3A%2233263701%3Ai7HSIbxIMLj70AoUrCRjd0o1g7egHg79%3Acde5fe679ed6d86011d70b7291901998b8aae7d0aaaccdf02a2c5abeeaeb5908%22%2C%22asns%22%3A%7B%2283.34.38.249%22%3A3352%2C%22time%22%3A1486584547%7D%2C%22last_refreshed%22%3A1436584547.2838287%2C%22_platform%22%3A4%2C%22_token_ver%22%3A2%2C%22_auth_user_backend%22%3A%22accounts.backends.CaseInsensitiveModelBackend%22%2C%22_auth_user_id%22%3A33233701%2C%22_auth_user_hash%22%3A%22%22%7D; ds_user_id=31263701; csrftoken=sxvROytxQHAesy1XcgcM2PWrEHHuQnD; s_network=""; ig_vw=1440; ig_pr=2;'
req.body = { :q => "ig_location(127963847) { media.after('', 60) { count, nodes { caption, code, comments { count }, comments_disabled, date, dimensions { height, width }, display_src, id, is_video, likes { count }, owner { id }, thumbnail_src, video_views }, page_info} }",
:ref => "locations::show",
:query_id => "" }
end
end
Thanks.
Josh comment made it! :-)
The body's content was gzip.
Solution here.

Logstash json filter for only one level

I use the following filters configuration:
filter {
if [type] == "client-log" {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}
urldecode{
field => "request"
}
mutate {
gsub => [ "request", '/log/', '' ]
}
json {
source => "request"
}
}
}
It works fine when request is only a one level json object. If it gets more then one level Logstash is getting parse error. Any advice?
request example:
{
session_id: "123",
message: {
id: 1221
//....
}
//....
}
error:
{:timestamp=>"2016-09-07T15:53:34.712000+0100", :message=>"Error parsing json", :source=>"request", :raw=>"{\"session_id\":\"8d078da0-74f9-11e6-8d31-6925e76dde0e\",\"level\":\"Debug\",\"methodName\":\"fetchExternalData\",\"class\":\"fetchExternalData\",\"lineNumber\":78,\"message\":\"{\"0\":\"fetchExternalData doFetch assets \",\"1\":\"http://lab6services:8080/alerts\",\"2\":{\"method\":\"post\",\"headers\":{},\"body\":\"{\\\"results\\\":{\\\"types\\\":[\\\"alerts\\\"],\\\"format\\\":\\\"table\\\"},\\\"filter\\\":{\\\"title\\\":\\\"new alerts\\\",\\\"filterType\\\":\\\"PROPERTY\\\",\\\"operator\\\":\\\"range\\\",\\\"field\\\":\\\"createdAt\\\",\\\"type\\\":\\\"alert\\\",\\\"values\\\":[{\\\"value\\\":\\\"07/09/2016 15:49:44\\\"},{\\\"value\\\":\\\"07/09/2016 15:51:44\\\"}]},\\\"aggregate\\\":null}\"}}\",\"version\":\"1.2.0\",\"user\":\"user\",\"timestamp\":\"2016-09-07T12:51:44.953Z\"}", :exception=>#<LogStash::Json::ParserError: Unexpected character ('0' (code 48)): was expecting comma to separate OBJECT entries
at [Source: [B#51b925ff; line: 1, column: 161]>, :level=>:warn}
log line:
127.8.4.1 - - [07/Sep/2016:15:54:07 +0100] "GET /log/%7B%22session_id%22:%228d078da0-74f9-11e6-8d31-6925e76dde0e%22,%22level%22:%22Debug%22,%22methodName%22:%22fetchExternalData%22,%22class%22:%22fetchExternalData%22,%22lineNumber%22:78,%22message%22:%22%7B%220%22:%22fetchExternalData%20doFetch%20assets%20%22,%221%22:%22http://lab6services:8080/alerts%22,%222%22:%7B%22method%22:%22post%22,%22headers%22:%7B%7D,%22body%22:%22%7B%5C%22results%5C%22:%7B%5C%22types%5C%22:%5B%5C%22alerts%5C%22%5D,%5C%22format%5C%22:%5C%22table%5C%22%7D,%5C%22filter%5C%22:%7B%5C%22title%5C%22:%5C%22new%20alerts%5C%22,%5C%22filterType%5C%22:%5C%22PROPERTY%5C%22,%5C%22operator%5C%22:%5C%22range%5C%22,%5C%22field%5C%22:%5C%22createdAt%5C%22,%5C%22type%5C%22:%5C%22alert%5C%22,%5C%22values%5C%22:%5B%7B%5C%22value%5C%22:%5C%2207/09/2016%2015:49:44%5C%22%7D,%7B%5C%22value%5C%22:%5C%2207/09/2016%2015:52:22%5C%22%7D%5D%7D,%5C%22aggregate%5C%22:null%7D%22%7D%7D%22,%22version%22:%221.2.0%22,%22user%22:%22user%22,%22timestamp%22:%222016-09-07T12:52:22.928Z%22%7D HTTP/1.1" 200 0 "http://localhost:3000/main.worker.js" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36"
--- EDIT ---
I want to parse only the first level of 'resquest'. How can I prevent the filter to parse any nested json elements?