Unable to resolve the Azure Storage connection named 'Storage' - Azure durable functions - json

My Project
package.json
{
"name": "azure-functions",
"version": "1.0.0",
"description": "",
"scripts": {
"start": "func start",
"test": "echo \"No tests yet...\""
},
"dependencies": {
"durable-functions": "^2.0.2"
}
}
host.json
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingSettings": {
"isEnabled": true,
"excludedTypes": "Request"
}
}
},
"extensionBundle": {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[3.*, 4.0.0)"
}
}
DurableFunctionsHttpStart/function.json
{
"bindings": [
{
"authLevel": "anonymous",
"name": "req",
"type": "httpTrigger",
"direction": "in",
"route": "orchestrators/{functionName}",
"methods": [
"post",
"get"
]
},
{
"name": "$return",
"type": "http",
"direction": "out"
},
{
"name": "starter",
"type": "orchestrationClient",
"direction": "in"
}
]
}
DurableFunctionsHttpStart/index.js
const df = require("durable-functions");
module.exports = async function (context, req) {
const client = df.getClient(context);
const instanceId = await client.startNew(req.params.functionName, undefined, req.body);
context.log(`Started orchestration with ID = '${instanceId}'.`);
return client.createCheckStatusResponse(context.bindingData.req, instanceId);
};
Hello/function.json
{
"bindings": [
{
"name": "name",
"type": "activityTrigger",
"direction": "in"
}
]
}
Hello/index.js
/*
* This function is not intended to be invoked directly. Instead it will be
* triggered by an orchestrator function.
*
* Before running this sample, please:
* - create a Durable orchestration function
* - create a Durable HTTP starter function
* - run 'npm install durable-functions' from the wwwroot folder of your
* function app in Kudu
*/
module.exports = async function (context) {
return `Hello ${context.bindings.name}!`;
};
HelloOrchestrator/function.json
{
"bindings": [
{
"name": "context",
"type": "orchestrationTrigger",
"direction": "in"
}
]
}
HelloOrchestrator/index.js
/*
* This function is not intended to be invoked directly. Instead it will be
* triggered by an HTTP starter function.
*
* Before running this sample, please:
* - create a Durable activity function (default name is "Hello")
* - create a Durable HTTP starter function
* - run 'npm install durable-functions' from the wwwroot folder of your
* function app in Kudu
*/
const df = require("durable-functions");
module.exports = df.orchestrator(function* (context) {
const outputs = [];
// Replace "Hello" with the name of your Durable Activity Function.
outputs.push(yield context.df.callActivity("Hello", "Tokyo"));
outputs.push(yield context.df.callActivity("Hello", "Seattle"));
outputs.push(yield context.df.callActivity("Hello", "London"));
// returns ["Hello Tokyo!", "Hello Seattle!", "Hello London!"]
return outputs;
});
Upon running npm start on the root of the page, i am getting the following error.
Azure Functions Core Tools
Core Tools Version: 4.0.4736 Commit hash: N/A (64-bit)
Function Runtime Version: 4.8.1.18957
[2022-09-05T11:52:51.483Z] A host error has occurred during startup operation '5dd1dd91-e64a-4866-......'.
[2022-09-05T11:52:51.483Z] Microsoft.Azure.WebJobs.Extensions.DurableTask: Unable to resolve the Azure Storage connection named 'Storage'.
Value cannot be null. (Parameter 'provider')
What could be the reason, i followed this tutorial https://learn.microsoft.com/en-us/azure/azure-functions/durable/quickstart-js-vscode
I didn’t get any prompt to select azure account as mentioned here https://learn.microsoft.com/en-us/azure/azure-functions/durable/quickstart-js-vscode#test-the-function-locally

That exception suggests that the runtime cannot find the value of AzureWebJobsStorage. You should have a local.settings.json file in your project that should look something like this:
{
"IsEncrypted": false,
"Values": {
"AzureWebJobsStorage": "DefaultEndpointsProtocol=https;AccountName=....",
"FUNCTIONS_WORKER_RUNTIME": "node"
}
}
The value of AzureWebJobsStorage should be set to a storage connection string that the Azure function runtime requires.
See: App settings reference for Azure Functions

Related

FLuxMonitor locally: FROM address in transaction is wrong

I'm trying to run decentralized-model locally. I've managed to deploy:
Link contract
AggregatorProxy
FluxAggregator
Consumer contract
Oracle node (offchain)
External adapters (coingecko + coinapi)
I'm mainly struggling for the last piece which is creating a Job which uses the FluxMonitor initiator.
I've created the following job where "0x5379A65A620aEb405C5C5338bA1767AcB48d6750" is the address of FluxAggregator contract
{
"initiators": [
{
"type": "fluxmonitor",
"params": {
"address": "0x5379A65A620aEb405C5C5338bA1767AcB48d6750",
"requestData": {
"data": {
"from": "ETH",
"to": "USD"
}
},
"feeds": [
{
"bridge": "coinapi_cl_ea"
},
{
"bridge": "coingecko_cl_ea"
}
],
"threshold": 1,
"absoluteThreshold": 1,
"precision": 8,
"pollTimer": {
"period": "15m0s"
},
"idleTimer": {
"duration": "1h0m0s"
}
}
}
],
"tasks": [
{
"type": "NoOp"
}
]
}
Unfortunately, it doesn't work, it makes my local ganache fail with this error "Error: The nonce generation function failed, or the private key was invalid"
I've put my Ganache in debug mode in order to log requests to the blockchain. Noticed the following call
eth_call
{
"jsonrpc": "2.0",
"id": 28,
"method": "eth_call",
"params": [
{
"data": "0xfeaf968c",
"from": "0x0000000000000000000000000000000000000000",
"to": "0x5379a65a620aeb405c5c5338ba1767acb48d6750"
},
"latest"
]
}
the signature of the function is correct
"latestRoundData()": "feaf968c"
However , what seems weird is that the from address is "0x0" . Any idea why my Oracle node doesn't use its key to sign the transaction?
thanks a lot
Problem from Ganache. In fact , I wrote a truffle script which:
calls "latestRoundData()" populating the "FROM" with a valid address
calls "latestRoundData()" populating the "FROM" with a 0x0 address
Then I ran the script 2 times:
Connecting to Ganache-cli --> 1st call is successful while the 2nd call fails
Connecting to Kovan testnet --> both calls are successful
I've just opened an issue for ganache-cli team: https://github.com/trufflesuite/ganache-cli/issues/840

Using a local Open API Standard file to to create an ARM template for a web service

I am working on an old web service where I generate the rest endpoints documentation that comply with OAS standards using a custom tool. Using this OAS json file I can deploy the API to Azure API Managements services through the portal and it all works fine. However, I need to automate this process and hence need to use ARM templates to deploy all web services to Azure APIM. I have been looking into the examples provided https://learn.microsoft.com/en-us/azure/templates/microsoft.apimanagement/service/apis but just can't seem to wrap my head around how to use a local OAS.json file or a file in github.
{
"$schema": "http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"location": {
"type": "string",
"defaultValue": "[resourceGroup().location]",
"metadata": {
"description": "Location for all resources."
}
}
},
"variables": {
"apiManagementServiceName": "price-capture"
},
"resources": [
{
"apiVersion": "2018-01-01",
"type": "Microsoft.ApiManagement/service/apis",
"name": "[variables('apiManagementServiceName')]",
"properties": {
"displayName": "Service display Name",
"apiRevision": "1",
"description": "API description",
//need help since it's not a swagger url
//wondering if there is a way to ref a local file like the option
//provided in the portal when we register api's manually.
"serviceUrl": "----",
"path": "----",
"protocols": [
"https"
],
"isCurrent": true,
"apiVersion": "v1",
"apiVersionDescription": "apiVersionDescription"
}
}
]
}
You can deploy and configure an entire API on API Management via ARM templates, but you cannot use a local file to provide the OpenApi/Swagger.
In your case the OpenApi/Swagger needs to be publicly accessible so the resource manager can read from it, so if the Github URL is freely accessible it should work.
I typically store the OpenApi/Swagger to a storage account and use the SAS token to access it from the ARM template.
You can check out this blog for details on automating API deployment in APIM:
https://blog.eldert.net/api-management-ci-cd-using-arm-templates-linked-template/
You can deploy the API using an Azure Resource Manager template of type Microsoft.ApiManagement/service/apis, and to use an Open API / swagger definition you need to specify the contentValue and and contentFormat parameters of the template
{
"name": "awesome-api-management/petstore",
"type": "Microsoft.ApiManagement/service/apis",
"apiVersion": "2018-06-01-preview",
"properties": {
"path": "petstore"
"contentValue": "petstore swagger file contents here", // or it's URL
"contentFormat": "swagger-json", // or swagger-link-json if externally available
}
}
I don't think it's possible to deploy the APIs configs via templates.
I've been trying to figure this out myself but I'm pretty sure you can't include the actual APIs you want in the service.
From what I can tell, you can't do that with the GIT repo either because that needs authentication that is manually created in the portal
I think the only thing you can automate with the ARM template is the actual API Management service and then you need to use the Azure API to add and configure the APIs on it.
However, I have yet to figure out how to do that myself.
I actually have a service ticket open to get help on that.
The API has changed slightly so this works:
The yaml file (calculatorApiFile) needs to be uploaded first to a blob storage, but this can be done as part of the deployment pipeline
{
"type": "Microsoft.ApiManagement/service/apis",
"apiVersion": "2019-01-01",
"name": "[concat(parameters('service_name'), '/b12b1d5ab8204cg6b695e3e861fdd709')]",
"dependsOn": [
"[resourceId('Microsoft.ApiManagement/service', parameters('service_name'))]"
],
"properties": {
"displayName": "Calculator",
"apiRevision": "1",
"description": "A simple Calculator ",
"path": "calc",
"value": "[concat(parameters('containerUri'), parameters('calculatorApiFile'), parameters('containerSasToken'))]",
"format": "openapi-link",
"protocols": [
"https"
],
"isCurrent": true
}
}
I figured out the answer ..all I had to do was write an azure function that fetches the oas.yaml file from a private github repository.
"variables":{
"swagger_json":"[concat(parameters('url_of_azurefunctionwithaccesskey'),'&&githuburi='parameter('raw_url'),'&githubaccesstoken=',parameter('personalaccesstoken')]"
},
"resources": [
{
"type": "Microsoft.ApiManagement/service/apis",
"name": "[concat(parameters('apimName') ,'/' ,parameters('serviceName'))]",
"apiVersion": "2018-06-01-preview",
"properties": {
"apiRevision": "[parameters('apiRevision')]",
"path": "pricecapture",
"contentValue": "[variables('swagger_json')]",
"contentFormat": "openapi-link"
}
}]
The Azure function that I had to write was something like this:
#r "Newtonsoft.Json"
using System.Net;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Primitives;
using Newtonsoft.Json;
using System.IO;
using System.Text;
public static async Task<HttpResponseMessage> Run(HttpRequest req, ILogger log)
{
log.LogInformation("C# HTTP trigger function processed a request.");
var gitHubUri = req.Query["githuburi"];
var gitHubAccessToken = req.Query["githubaccesstoken"];
var encoding = Encoding.ASCII;
if (string.IsNullOrEmpty(gitHubUri))
{
var errorcontent = new StringContent("please pass the raw file content URI (raw.githubusercontent.com) in the request URI string", Encoding.ASCII);
return new HttpResponseMessage
{
StatusCode = HttpStatusCode.BadRequest,
Content = errorcontent
};
}
else if (string.IsNullOrEmpty(gitHubAccessToken))
{
var errorcontent = new StringContent("please pass the GitHub personal access token in the request URI string", Encoding.ASCII);
return new HttpResponseMessage
{
StatusCode = HttpStatusCode.BadRequest,
Content = errorcontent
};
}
else
{
var strAuthHeader = "token " + gitHubAccessToken;
var client = new HttpClient();
client.DefaultRequestHeaders.Add("Accept", "application/vnd.github.v3.raw");
client.DefaultRequestHeaders.Add("Authorization", strAuthHeader);
var response = await client.GetAsync(gitHubUri);
return response;
}
}
If you load your YAML into a variable, that can be passed to the ARM template and be passed as the value:
deploy.bat:
SETLOCAL EnableDelayedExpansion
set API_DEPLOYMENT=<deployment name>
set API_GROUP=<deployment group>
set API=<api file path.yml>
set OPENAPI=
for /f "delims=" %%x in ('type %API%') do set "OPENAPI=!OPENAPI!%%x\n"
call az deployment group create -n %API_DEPLOYMENT% -g %API_GROUP% --mode Complete -f deploy.json -p openApi="!OPENAPI!"
ENDLOCAL
deploy.json (note the use of replace)
...
{
"type": "Microsoft.ApiManagement/service/apis",
"apiVersion": "2020-12-01",
"name": "[variables('apiName')]",
"properties": {
"path": "[variables('service')]",
"apiType": "http",
"displayName": "[variables('apiDisplayName')]",
"format": "openapi",
"value": "[replace(parameters('openApi'), '\\n', '\n')]"
},
...
},
...

action SDK not working

I have updated my index.js as below
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {actionssdk} = require('actions-on-google');
const app = actionssdk({debug: true});
exports.dairyProduct = functions.https.onRequest((request, response) => {
console.log("request-------------->",request);
console.log("response----------------->",response);
function handleMainIntent(app) {
console.log("Inside Main Intent");
app.ask("Main Indent "+app.getRawInput());
}
function handleTextIntent() {
console.log("Inside Main Intent");
app.tell("First Text Indent");
}
let app = new ActionsSdkApp({request, response});
let actionMap = new Map();
console.log("app---------->",app);
actionMap.set(app.StandardIntents.MAIN, handleMainIntent)
actionMap.set(app.StandardIntents.TEXT, handleTextIntent);
app.ask("This sample application is developing by Thirumani Selvam.M ");
console.log("actionMap---------->",actionMap);
app.handleRequest(actionMap);
});
My updated action.json
{
"actions": [
{
"description": "Default Welcome Intent",
"name": "MAIN",
"fulfillment": {
"conversationName": "testapp"
},
"intent": {
"name": "actions.intent.MAIN",
"trigger": {
"queryPatterns": [
"Talk to Dairy Product"
]
}
}
}
],
"conversations": {
"testapp": {
"name": "testapp",
"url": "https://us-central1-samplejs6-id.cloudfunctions.net/dairyProduct",
"fulfillmentApiVersion": 2,
"inDialogIntents": [
{
"name": "actions.intent.CANCEL"
}
]
}
},
"locale": "en"
}
My package.json code
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"lint": "eslint .",
"serve": "firebase serve --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions",
"logs": "firebase functions:log"
},
"dependencies": {
"actions-on-google": "^2.0.1",
"firebase-admin": "~5.12.0",
"firebase-functions": "^1.0.1"
},
"devDependencies": {
"eslint": "^4.12.0",
"eslint-plugin-promise": "^3.6.0"
},
"private": true
}
i have deployed using "firebase deploy --only functions"
I have updated action using "gactions update --action_package action.json --project samplejs6-id"
I have updated test of actions using"gactions test --action_package action.json --project samplejs6-id"
I didn't get errors in firebase logs.
i have updated title and name in gactions as "Dairy team". it is recommending to type "Talk to Dairy team" . If i type "Talk to Dairy team" , i am getting response as "We're sorry, but something went wrong. Please try again."
Please let me know, how to fix this issue. Thanks in advance
The problem is that you're using the "actions-on-google" library version 2, but your code is written using the version 1 objects and functions. There have been some dramatic changes between the two versions. See the migration guide for details how to upgrade to the new version, or change your package.json file to use version 1.

How to 'run' an IVR saved in JSON?

I am working on a IVR solution for small businesses in my local area but I am having trouble wrapping my head around how Node will handle menus. I could make a seperate Node server for each of my customers but I would like to have a single server that pulls each customer's IVR setup from a Mongo database or file when their number is called. I have an idea on how to save the menu structure in JSON but I am lost when it comes to turning that JSON into responses to <gather> inputs. I was thinking I could use a JSON structure like this in the DB (or maybe as a .json file on Amazon S3):
{
"menu": {
"id": 1,
"name": "Main",
"script": "Thank you for calling Local Company. To speak to sales press 1, ...",
"options": [
{
"name": "",
"action": "",
"value": "",
"next": ""
},
{
"name": "Sales",
"action": "dial",
"value": 12345678901,
"next": ""
},
{
"name": "Support",
"action": "dial",
"value": 12345678902,
"next": ""
},
{
"name": "Directions",
"action": "say",
"value": "Our offices are located at...",
"next": 1
},
{
"name": "Mailbox",
"action": "mailbox",
"value": "main",
"next": 1
}
]
}
}
Twilio developer evangelist here.
If you can return the JSON based on the number a user is dialling, then you could do something like this:
const Twilio = require('twilio');
app.post('/voice', (req, res) => {
const dialledNumber = req.body.To;
getIVRObjectFromPhoneNumber(dialledNumber, (IVRObject) => {
const twiml = Twilio.twiml.VoiceResponse();
if (typeof req.body.Digits !== 'undefined') {
// A user has pressed a digit, do the next thing!
const action = IVRObject.menu.options[req.body.Digits]
twiml[action.action](action.value);
} else {
// No digits yet, return the <Gather>
const gather = twiml.gather({
numDigits: 1
});
gather.say(IVRObject.script);
}
res.send(twiml.toString());
});
});
This doesn't quite use all of your object, I'm not sure what the values for next mean, but hopefully it's a start. The getIVRObjectFromPhoneNumber method is my made up, asynchronous method that returns a JavaScript object parsed from your example JSON above.
Let me know if this helps at all.

How to get the composer.json from a package

I want to load an package over composer.
I created the composer.json in the root level of the package. Looks like this:
{
"name": "platform/pollbundle",
"description": {
"text" : "This is the poll bundle"
},
"type": "symfony-bundle",
"authors": [
{
"name": "NAME",
"email": "EMAIL"
}
],
"autoload": {
"psr-0": {
"Platform\\Bundle\\PollBundle": ""
}
},
"extra": {
"servicePath": "odwawdadwa",
"branch-alias": {
"dev-master": "0.1.x-dev"
}
}
}
For handling the data i start to write an scripthandler which call this function:
$event->getComposer()->getPackage()->getDescription();
After the install of course. The output of this function is the description text from the project composer.json :
The "Symfony Standard Edition" distribution
But what i want is the description text of the package (This is the poll bundle).
So my question is: how do i get it?
I assume you are now registered to the post-install event. That only applies to the root package. You should register to the post-package-install event instead.
Using this event, you get a PackageEvent instance, which gives you access to the current operation (using PackageEvent#getOperation()). The operation contains the current installed package:
use Composer\Script\PackageEvent;
class YourInstaller
{
public static function postPackageUpdate(PackageEvent $event)
{
$packageName = $event->getOperation()->getPackage()->getName();
// ... do something great
}
}