Exception using a naming convention w/ Breeze Angular mySql Node Express stack - mysql

I'm able to successfully connect and query data from a mySql db via a Breeze/Angular client, following the todo-angular example. I switched out the db table and the GUI and was still ok. The problem starts when I try to use a naming convention. (I don't have control over the db that I have to connect to & I really don't want to use Uppercase_Underscored_Words in my client!)
I'm getting the following exception:
/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/node_modules/breeze-client/breeze.debug.js:1852
throw new Error("Unable to locate a registered object by the name: " + k
^
Error: Unable to locate a registered object by the name: NamingConvention.underscoreCamelCase
at Object.__config._fetchObject (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/node_modules/breeze-client/breeze.debug.js:1852:13)
at MetadataStore.proto.importMetadata (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/node_modules/breeze-client/breeze.debug.js:6517:40)
at new module.exports.MetadataMapper (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/MetadataMapper.js:19:8)
at SequelizeManager.importMetadata (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/SequelizeManager.js:46:24)
at createSequelizeManager (/Users/Sherri/Sites/awdb-web/server/routes.js:114:8)
at /Users/Sherri/Sites/awdb-web/server/routes.js:23:27
When I take the "namingConvention": "camelCase" line out of the metadata.json file, the error goes away, but of course, the database property is not able to be correctly converted.
Here is the relevant code I use to set up the Entity Manager: (EDIT: I'm pretty sure my problem is server side and has nothing to do with this code, though)
var namingConvention = new UnderscoreCamelCaseConvention();
namingConvention.setAsDefault();
breeze.core.config.initializeAdapterInstance("uriBuilder", "json");
var serviceName = 'breeze/awdb';
var manager = new breeze.EntityManager(serviceName);
// Take any server property name and make it camelCase for the client to use.
// also, save it so that we can convert from the client back to the server's name
function UnderscoreCamelCaseConvention() {
var serverNames = {
netPoints: 'netPoints',
netPointsSpent: 'netPointsSpent'
}; // every translated server name
return new breeze.NamingConvention({
name: 'underscoreCamelCase',
clientPropertyNameToServer: clientPropertyNameToServer,
serverPropertyNameToClient: serverPropertyNameToClient
});
function clientPropertyNameToServer(clientPropertyName) {
return serverNames[clientPropertyName];
}
function serverPropertyNameToClient(serverPropertyName) {
var clientName = _.camelCase(serverPropertyName);
serverNames[clientName] = serverPropertyName;
return clientName;
}
}
And here is a snippet of my metadata.json file:
{
"metadataVersion": "1.0.5",
"namingConvention": "underscoreCamelCase",
"localQueryComparisonOptions": "caseInsensitiveSQL",
"dataServices": [
{
"serviceName": "breeze/awdb/",
"hasServerMetadata": true,
"jsonResultsAdapter": "webApi_default",
"useJsonp": false
}
],
"structuralTypes": [
{
"shortName": "person",
"namespace": "AWdb.Models",
"autoGeneratedKeyType": "Identity",
"defaultResourceName": "people",
"dataProperties": [
{
"name": "Person_ID",
"dataType": "Int32",
"isNullable": false,
"defaultValue": 0,
"isPartOfKey": true,
"validators": [
{
"name": "required"
},
{
"min": -2147483648,
"max": 2147483647,
"name": "int32"
}
]
},
{
"name": "Household_ID",
"dataType": "Int32",
"validators": [
{
"min": -2147483648,
"max": 2147483647,
"name": "int32"
}
]
},
....
]
}
],
"resourceEntityTypeMap": {"people": "person:#AWdb.Models"}
}
EDIT:
Here is code from my routes.js file that gets the metadata.
var fs = require('fs');
var breezeSequelize = require('breeze-sequelize');
var SequelizeManager = breezeSequelize.SequelizeManager;
var SequelizeQuery = breezeSequelize.SequelizeQuery;
var SequelizeSaveHandler = breezeSequelize.SequelizeSaveHandler;
var breeze = breezeSequelize.breeze;
var EntityQuery = breeze.EntityQuery;
var dbConfig = {
host: 'localhost',
user: 'xx',
password: 'xx',
dbName: 'xx'
};
var _sequelizeManager = createSequelizeManager();
// _sequelizeManager.sync(true).then(seed).then(function(){
// console.log('db init successful');
// });
exports.init = init;
function init(app) {
app.get('/breeze/awdb/Metadata', function (req, res, next) {
try {
var metadata = readMetadata();
res.send(metadata);
} catch(e){
next(e);
}
});
function createSequelizeManager() {
var metadata = readMetadata();
var sm = new SequelizeManager(dbConfig);
sm.importMetadata(metadata);
return sm;
}
function readMetadata() {
var filename = "server/AWdbMetadata.json";
if (!fs.existsSync(filename)) {
filename = "AWdbMetadata.json";
if (!fs.existsSync(filename)) {
throw new Error("Unable to locate file: " + filename);
}
}
var metadata = fs.readFileSync(filename, 'utf8');
return JSON.parse(metadata);
}
Any ideas? Should I be able to use a custom naming convention when I'm on a node.js server, using a metadata.json file instead of a .net entity framework?

If I'm looking at this correctly, then I think your issue is the metadata on the server. If I understand correctly, your table and column names follow the Uppercase_Underscored_Word pattern. The Breeze/Sequelize stack on the server currently doesn't have the ability to convert names, so you must use the names of entities and properties exactly as they are in the DB schema. Otherwise, the Breeze to Sequelize translation will fail. You can still use a naming convention on the client to turn the underscored server names into whatever you want them to be on the client.
So, you need two metadata files. One for the server that is used by the Breeze/Sequelize stack and that uses names exactly as they are in the DB and then a separate metadata file for the client, where you can do the translation.

Related

Error: No handler for requested intent at WebhookClient.handleRequest

Default intent calling a cloud function gives error
Error: No handler for requested intent
at WebhookClient.handleRequest (/user_code/node_modules/dialogflow-fulfillment/src/dialogflow-fulfillment.js:287:29)
at exports.dialogflowFirebaseFulfillment.functions.https.onRequest (/user_code/index.js:73:11)
at cloudFunction (/user_code/node_modules/firebase-functions/lib/providers/https.js:57:9)
at /var/tmp/worker/worker.js:783:7
at /var/tmp/worker/worker.js:766:11
at _combinedTickCallback (internal/process/next_tick.js:73:7)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
as my webresponse in diagnostic info log shows this.
{
"responseId": "86043a10-8bc2-4ee7-8e8b-1e997289ad7c",
"queryResult": {
"queryText": "hi",
"action": "input.welcome",
"parameters": {},
"allRequiredParamsPresent": true,
"fulfillmentText": "Hi. Am Uma. Kindly let me know your experience facing an issue.",
"fulfillmentMessages": [
{
"text": {
"text": [
"Hi. Am Uma and welcome to support. Kindly let me know your experience facing an issue."
]
}
}
],
"outputContexts": [
{
"name": "projects/handymanticketagent/agent/sessions/e416a522-da87-ebd1-348e-9fdea1efbf65/contexts/defaultwelcomeintent-followup",
"lifespanCount": 2
}
],
"intent": {
"name": "projects/handymanticketagent/agent/intents/c58f706f-6cb6-499d-9ce2-459e8054ddc1",
"displayName": "Default Welcome Intent"
},
"intentDetectionConfidence": 1,
"diagnosticInfo": {
"webhook_latency_ms": 10001
},
"languageCode": "en"
},
"webhookStatus": {
"code": 4,
"message": "Webhook call failed. Error: Request timeout."
}
}
Based on the stack overflow answers here, Have added an intent mapped to function but am still getting error and could progress further. Where and how the cloud function console says am missing a handler for my request?
Update : As #prisoner said, including my cloud function code.
'use strict';
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const { WebhookClient } = require('dialogflow-fulfillment');
process.env.DEBUG = 'dialogflow:*'; // enables lib debugging statements
admin.initializeApp(functions.config().firebase);
const db = admin.firestore();
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
console.log(request.body.queryResult.fulfillmentText);
console.log(request);
console.log(response);
const agent = new WebhookClient({ request, response });
console.log(agent);
function writeToDb(agent) {
// Get parameter from Dialogflow with the string to add to the database
const databaseEntry = agent.parameters.databaseEntry;
console.log(databaseEntry);
// Get the database collection 'dialogflow' and document 'agent' and store
// the document {entry: "<value of database entry>"} in the 'agent' document
const dialogflowAgentRef = db.collection('dialogflow').doc('agent');
console.log(dialogflowAgentRef);
return db.runTransaction(t => {
t.set(dialogflowAgentRef, { entry: databaseEntry });
console.log(Promise.resolve('Write complete'));
return Promise.resolve('Write complete');
}).then(doc => {
agent.add('Wrote "${databaseEntry}" to the Firestore database.');
return null;
}).catch(err => {
if (err) {
console.log(err.stack);
}
console.log('Error writing to Firestore: ${err}');
agent.add('Failed to write "${databaseEntry}" to the Firestore database.');
});
}
function readFromDb(agent) {
console.log(agent);
// Get the database collection 'dialogflow' and document 'agent'
const dialogflowAgentDoc = db.collection('dialogflow').doc('agent');
console.log(dialogflowAgentDoc);
// Get the value of 'entry' in the document and send it to the user
return dialogflowAgentDoc.get()
.then(doc => {
if (!doc.exists) {
agent.add('No data found in the database!');
} else {
agent.add(doc.data().entry);
}
return Promise.resolve('Read complete');
}).catch(() => {
agent.add('Error reading entry from the Firestore database.');
agent.add('Please add a entry to the database first by saying, "Write <your phrase> to the database"');
});
}
function defaultwelcomeintent_function(agent) {
console.log(agent);
}
// Map from Dialogflow intent names to functions to be run when the intent is matched
let intentMap = new Map();
intentMap.set('defaultwelcomeintent-followup', defaultwelcomeintent_function);
intentMap.set('ReadFromFirestore', readFromDb);
intentMap.set('WriteToFirestore', writeToDb);
console.log(intentMap);
agent.handleRequest(intentMap);
});
The diagnostic info says that the intent's display name for that fulfillment is "Default Welcome Intent":
"intent": {
"name": "projects/handymanticketagent/agent/intents/c58f706f-6cb6-499d-9ce2-459e8054ddc1",
"displayName": "Default Welcome Intent"
},
So you'd need to create a mapping for it like this:
intentMap.set('Default Welcome Intent', defaultwelcomeintent_function);
Where defaultwelcomeintent_function is the handler you have defined within your cloud function.
I had the same issue with the exact error from Dialogflow : Error: No handler for requested intent, in my case I'm using async/await in order to make synchronous calls through a cloud function in dialogflow fulfillement.
I noticed that in one of my main function mapped to an agent, I wasn't returning anything. Since the function was using async I added a return statment with the promise that I was waiting for at the beggining.
async function getInfo(agent) {
var hh = await getUserInfos(request.body.originalDetectIntentRequest.payload.uuid);
// Do what you want here
var yy = hh.aa[0].zz.yy;
agent.setFollowupEvent({ "name": "xxx", "parameters": { "xxx": yy } });
// Return your promise
return hh;
}

kafka avro schema - 500 internal server error after 1000 messages

I am using confluent 0.9 rest proxy and schema registry to produce avro messages in a topic.
I am using "kafka-rest" node module. After 1000 messages it starts giving 500 internal server error. After reading though some posts I understand that this error is related to a setting for max number for subjects that can be registered for a topic and reusing the schema is the solution for it.
kafka-rest documentation says the schema reuse is done transparently. Below is the sample code I used to test. I am still getting the same error.
Could someone help me understand how I can reuse the schema?
Thanks,
Rajesh
"use strict";
var async = require('async');
var kafkaRest = require('kafka-rest');
var topicName = "avro-test";
var valueSchema = new kafkaRest.AvroSchema({
"name": "UserInfo",
"type": "record",
"fields": [
{ "name": "id", "type": "int" },
{ "name": "name", "type": "string" }]
});
var kafka = new kafkaRest({"url": "http://localhost:8082"});
var target = kafka.topic(topicName);
// Avro key and value schema.
for(var i = 0; i <= 1005; i++) {
var line = {'id': 1, 'name': 'Bob '};
target.produce(valueSchema, line, handleResponse);
}
function handleResponse( err, res) {
if (err) {
console.log("Error producing messages " + err);
}
}
Looks like a fix is coming in soon https://github.com/confluentinc/kafka-rest-node/issues/21
Meanwhile, we are using the schema Id to produce messages

Chrome Extension Json

need to know if i can call a specific part of the following script
chrome.identity.getAuthToken({
interactive: true
}, function(token) {
if (chrome.runtime.lastError) {
alert(chrome.runtime.lastError.message);
return;
}
var x = new XMLHttpRequest();
x.open('GET', 'https://www.googleapis.com/oauth2/v1/userinfo?alt=json&access_token=' + token);
x.onload = function() {
alert(x.response);
};
x.send();
});
I currently use document.write(x.response) which writes out the users information but would like to call only the id
{ "id": "XXX",
"email": "XXX",
"verified_email": true,
"name": "XXX", }
what I would like to do is turn the id into a variable.
var user_id = x.response['id']... Or something like that.
the original code can be found
chrome.identity User Authentication in a Chrome Extension
Try :
var user_id = JSON.parse(x.response).id;

How to update data in cloudant using nodejs cloudant module?

Below mentioned sample json documents.It contains two fields.
{
"_id": "daef4a0e39c0c7a00feb721f6c4ce8b9",
"_rev": "2-8c7ef28df59ecbdaa23b536e58691416",
"name": "sukil",
"skills": "java"
}
In server.js
var express = require('express');
var app = express();
var cloudant = require('cloudant');
cloudant({account:"test", password:"test"}, function(err, cloudant) {
var alice = cloudant.use('opti-update')
alice.atomic("_design/sample", "inplace", "daef4a0e39c0c7a00feb721f6c4ce8b9", {field: "name", value: "bar"}, function (error, response) {
console.log(error+""+response);
})
})
Here _design/sample is a design document name and inplace is update function name then next is document id.It returns error is document update conflict and response is undefined.
In design document mentioned below
{
"_id": "_design/sample",
"_rev": "9-94393ee4665bdfd6fb283e3419a53f24",
"updates": {
"inplace": "function(doc,req){var field = req.body.field;var value = req.body.value;doc[field] = value;return [doc,''];}"
}
}
I want to update the data in cloudant using node cloudant module. I want to update the name field in json document.Above method i tried but it shows document update conflict error.How to resolve this?
The atomic method assumes the first parameter as the design document only. So need to explicitly specify "_design".
alice.atomic("sample", "inplace", "daef4a0e39c0c7a00feb721f6c4ce8b9", {field: "name", value: "bar"}, function (error, response) {
console.log(error+""+response);
})
This may be causing the problem.

Transform file full-content nodejs

I am building a website with NodeJS which asks for a data-file to be uploaded, then I have to check and (if needed) transform the content of this file.
The source file is a JSON or XML configuration file, I just need to ensure its content is well-formatted for the rest of the application.
I am wondering what would be the best way to check the global file's content.
I usually manipulate files with Streams, but I am not sure if it allows me to do what I want...
The source file has a similar format :
{
"parameters": [{
"name": "name",
"settings": {
"key": "value"
}
}],
"data": [{
"id": "1",
"label": "label 1",
}, {
"id": "2",
"label": "label 2"
}]
}
What I need to do is to parse the file's content, and check if the file-format is good ;
Otherwise transform the file to a well-formatted one :
// Read the file content
var parameters = [],
data = [],
p = parameters.length,
d = data.length;
// Loop on the parameters, and check the format
while (p--) {
var parameter = parameters[p];
if (name in parameter && typeof parameter.name == "string") {
// Add several rules
parameters.push(parameter);
}
}
// Do a similar control for "data".
// Then save the well-formatted parameters and data into a file
The thing is that the uploaded file might be very large...
Can I perform it with transform Streams ? Because I need to check the full-content of the file as a object !
How can I be sure a Stream transformer won't give a chunk with just a part of data, for instance ?
I'd first try something like this:
var fs = require('fs');
try {
var inputFile = require('./config.json');
} catch (e) {
console.log(e.message); // Do proper error handling.
}
// Loop on the parameters, and check the format
if (!'parameters' in inputFile) {
console.log("Got a problem here!");
}
var parameters = [];
var p = inputFile['parameters'].length;
while(p--) {
var parameter = inputFile['parameters'][p];
if ('name' in parameter && typeof parameter.name == 'string') {
// Add several rules
parameters.push(parameter);
}
}
// Do a similar control for "data".
var data = inputFile['data'];
// More code needed here...
// Then save the well-formatted parameters and data into a file
fs.writeFileSync('./data.json', JSON.stringify({parameters: parameters, data: data}, null, 4) , 'utf-8');
If you are dealing with mammoth files that cannot fit into memory, you've got a HUGELY more difficult task ahead of you. In general, you cannot guarantee that a partial read will give you enough of the JSON to parse anything out of (e.g. {"data": ["<FOUR PETABYTE STRING>"]}).