How to update data in cloudant using nodejs cloudant module? - json

Below mentioned sample json documents.It contains two fields.
{
"_id": "daef4a0e39c0c7a00feb721f6c4ce8b9",
"_rev": "2-8c7ef28df59ecbdaa23b536e58691416",
"name": "sukil",
"skills": "java"
}
In server.js
var express = require('express');
var app = express();
var cloudant = require('cloudant');
cloudant({account:"test", password:"test"}, function(err, cloudant) {
var alice = cloudant.use('opti-update')
alice.atomic("_design/sample", "inplace", "daef4a0e39c0c7a00feb721f6c4ce8b9", {field: "name", value: "bar"}, function (error, response) {
console.log(error+""+response);
})
})
Here _design/sample is a design document name and inplace is update function name then next is document id.It returns error is document update conflict and response is undefined.
In design document mentioned below
{
"_id": "_design/sample",
"_rev": "9-94393ee4665bdfd6fb283e3419a53f24",
"updates": {
"inplace": "function(doc,req){var field = req.body.field;var value = req.body.value;doc[field] = value;return [doc,''];}"
}
}
I want to update the data in cloudant using node cloudant module. I want to update the name field in json document.Above method i tried but it shows document update conflict error.How to resolve this?

The atomic method assumes the first parameter as the design document only. So need to explicitly specify "_design".
alice.atomic("sample", "inplace", "daef4a0e39c0c7a00feb721f6c4ce8b9", {field: "name", value: "bar"}, function (error, response) {
console.log(error+""+response);
})
This may be causing the problem.

Related

how to check if bucket object is already translated

I've uploaded file to oss and have object id, if bucket object is not yet translated then how to check derivatives info. with object id?
It's straightforward, just base64 encode your objectId, then call GET {urn}/manifest. If it returns a 404 http status code, then it means this URN hasn't got translated.
If your file is stored on BIM360/ACC, you will need to get derivative URN from the file's version tip. Please follow this tutorial, but find relationships.data.derivatives.data.id instead for the URN like the below for example.
https://forge.autodesk.com/en/docs/bim360/v1/tutorials/document-management/download-document/#step-4-find-the-storage-object-id-for-the-file
"derivatives": {
"data": {
"type": "derivatives",
"id": "dXJuOmFkc2sud2lwcHJvZDpmcy5maWxlOnZmLkVueWtrU3FjU0lPVTVYMGhRdy1mQUM_dmVyc2lvbj0x"
},
// ...
},
Node.js code sample tested with yiskang/forge-viewmodels-nodejs-svf2
const {
DerivativesApi
} = require('forge-apis');
const { getClient, getPublicToken } = require('./routes/common/oauth');
const derivativeApi = new DerivativesApi();
const urn = 'dXJuOmFkc2sub2JqZWN0czpvcy5vYmplY3Q6bXlidWNrZXQvdGVzdC5ydnQ';
getPublicToken().then(accessToken => {
derivativeApi.getManifest(urn, {}, null, accessToken).then(function (res) {
console.log(res.statusCode, res.statusMessage);
},
function (err) {
// When the urn hasn't got translated, it goes here
console.error('error', err.statusCode, err.statusMessage);
// if you want to redire page to some where, write your codes here
});
}, function (err) {
console.error(err);
});
ref: https://stackoverflow.com/a/70664111/7745569

Selecting 2nd object in an array in Postman

AH, a very simple question, but I can't seem to find the answer anywhere. This is my first set of postman tests. I have a request that returns this JSON response:
{
"requestId": "3dd0#170fa14fb64",
"result": [
{
"id": 52508,
"status": "deleted"
},
{
"id": 52507,
"status": "deleted"
}
],
"success": true
I want to write a test that verifies that both of the status objects within the result array will have the value "deleted" but I don't know the correct syntax to do so...so far I have this
pm.test("Test users deleted successfully", function () {
var jsonData = pm.response.json();
pm.expect(jsonData.result[0].status).to.eql("deleted");
});
This works great for verifying the first status object, but how do I target the second one in that array?
You would need to loop through the result array:
pm.test("Test users deleted successfully", () => {
var jsonData = pm.response.json();
_.each(jsonData.result, (item) => {
pm.expect(item.status).to.eql("deleted");
});
});
I've used the Lodash .each() function here, which is built-in to Postman, but you can do this with a native JS for loop, it works the same way I just prefer this syntax.
More info:
https://www.w3schools.com/js/js_loop_for.asp

kafka avro schema - 500 internal server error after 1000 messages

I am using confluent 0.9 rest proxy and schema registry to produce avro messages in a topic.
I am using "kafka-rest" node module. After 1000 messages it starts giving 500 internal server error. After reading though some posts I understand that this error is related to a setting for max number for subjects that can be registered for a topic and reusing the schema is the solution for it.
kafka-rest documentation says the schema reuse is done transparently. Below is the sample code I used to test. I am still getting the same error.
Could someone help me understand how I can reuse the schema?
Thanks,
Rajesh
"use strict";
var async = require('async');
var kafkaRest = require('kafka-rest');
var topicName = "avro-test";
var valueSchema = new kafkaRest.AvroSchema({
"name": "UserInfo",
"type": "record",
"fields": [
{ "name": "id", "type": "int" },
{ "name": "name", "type": "string" }]
});
var kafka = new kafkaRest({"url": "http://localhost:8082"});
var target = kafka.topic(topicName);
// Avro key and value schema.
for(var i = 0; i <= 1005; i++) {
var line = {'id': 1, 'name': 'Bob '};
target.produce(valueSchema, line, handleResponse);
}
function handleResponse( err, res) {
if (err) {
console.log("Error producing messages " + err);
}
}
Looks like a fix is coming in soon https://github.com/confluentinc/kafka-rest-node/issues/21
Meanwhile, we are using the schema Id to produce messages

Backbone: fetching from URL in router gets undefined, but it works when collection gets JSON from a variable

From a JSON stored in a variable I can get the name of the current id from a router function called show: function(id). However, when I fetch collection from an URL instead of using a JSON variable I get an undefined TypeError.
console.log(this.collection.get(id).get('name'));
What I have seen is that when I use a JSON variable the show function works fine, but when I fetch from URL, show function executes after fetch succeed.
What I am doing wrong? Why fetching from URL gets undefined? How can I make it work?
The following code is fictional, it only shows the relevant part of my code. See the two cases at the end of the code block.
jsFiddle here
// Data 1 with variable
var heroes = [
{"id": "1", "name": "Batman"},
{"id": "2", "name": "Superman"},
];
// Data 2 from url: http://example.com/heroes.json
[
{"id": "1", "name": "Batman"},
{"id": "2", "name": "Superman"},
];
HeroesCollection = Backbone.Collection.extend({
model: HeroesModel,
url: 'http://example.com/heroes.json'
});
HeroesRouter = Backbone.Router.extend({
// I use two shows to graphic this example
routes: {
'': 'index',
':id': 'show'
},
initialize: function(options) {
this.collection = options.collection;
this.collection.fetch();
// this.collection.fetch({async:false}); this fixes my problem, but I heard it is a bad practice
},
index: function() {
},
show: function(id) {
console.log(this.collection.get(id).get('name'));
// Case #1: When Collection loads from a Variable
// id 1 returns: 'Batman'
// Case #2: When Collection fetchs from URL, id 1 returns:
// TypeError: this.collection.get(...) is undefined
}
});
// Case #1: collection loads JSON from a variable
var heroesCollection = new HeroesCollection(heroes);
// Case #2: collection loads JSON with fetch in router's initialize
// var heroesCollection = new HeroesCollection();
var heroesRouter = new HeroesRouter({collection: heroesCollection});
How about this? It's been awhile, but this seems like a better approach to what you are trying to achieve. The basic concept is that once you navigate to your show route, it will execute show. This method will create a new, empty collection, and then fetch the data for it. Along with that, we pass in a success method (as François illustrated) which will execute when the request is finished with the JSON (which creates a collection of Heros).
I believe the reason you were running into the issue with the remote data is that you were trying to access this.collection before it was populated with data from the request.
You have to remember the request is asynchronous, which mean code execution continues while the request is processing.
HeroesCollection = Backbone.Collection.extend({
model: HeroesModel,
url: 'http://example.com/heroes.json'
});
HeroesRouter = Backbone.Router.extend({
routes: {
'': 'index',
':id': 'show'
},
index: function() {
},
show: function(id) {
this.herosCollection = new HerosCollection();
this.herosCollection.fetch({
success: function(collection, response, options) {
console.log(this.get(id).get('name'));
}
});
}
});
you need to trigger the router 'show' function when the collection has ended to load.
this.collection.fetch({async:false}); fixes your problem because the whole javascript code is waiting (async:false) the ajax call to be ended before going further.
The other and best solution is to wait that your collection is fetched before you try to use the results.
Basically:
MyCollection.fetch({
success: function(model, reponse) {
// do wtv you want with the result here or trigger router show method...
}
});

Exception using a naming convention w/ Breeze Angular mySql Node Express stack

I'm able to successfully connect and query data from a mySql db via a Breeze/Angular client, following the todo-angular example. I switched out the db table and the GUI and was still ok. The problem starts when I try to use a naming convention. (I don't have control over the db that I have to connect to & I really don't want to use Uppercase_Underscored_Words in my client!)
I'm getting the following exception:
/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/node_modules/breeze-client/breeze.debug.js:1852
throw new Error("Unable to locate a registered object by the name: " + k
^
Error: Unable to locate a registered object by the name: NamingConvention.underscoreCamelCase
at Object.__config._fetchObject (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/node_modules/breeze-client/breeze.debug.js:1852:13)
at MetadataStore.proto.importMetadata (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/node_modules/breeze-client/breeze.debug.js:6517:40)
at new module.exports.MetadataMapper (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/MetadataMapper.js:19:8)
at SequelizeManager.importMetadata (/Users/Sherri/Sites/awdb-web/node_modules/breeze-sequelize/SequelizeManager.js:46:24)
at createSequelizeManager (/Users/Sherri/Sites/awdb-web/server/routes.js:114:8)
at /Users/Sherri/Sites/awdb-web/server/routes.js:23:27
When I take the "namingConvention": "camelCase" line out of the metadata.json file, the error goes away, but of course, the database property is not able to be correctly converted.
Here is the relevant code I use to set up the Entity Manager: (EDIT: I'm pretty sure my problem is server side and has nothing to do with this code, though)
var namingConvention = new UnderscoreCamelCaseConvention();
namingConvention.setAsDefault();
breeze.core.config.initializeAdapterInstance("uriBuilder", "json");
var serviceName = 'breeze/awdb';
var manager = new breeze.EntityManager(serviceName);
// Take any server property name and make it camelCase for the client to use.
// also, save it so that we can convert from the client back to the server's name
function UnderscoreCamelCaseConvention() {
var serverNames = {
netPoints: 'netPoints',
netPointsSpent: 'netPointsSpent'
}; // every translated server name
return new breeze.NamingConvention({
name: 'underscoreCamelCase',
clientPropertyNameToServer: clientPropertyNameToServer,
serverPropertyNameToClient: serverPropertyNameToClient
});
function clientPropertyNameToServer(clientPropertyName) {
return serverNames[clientPropertyName];
}
function serverPropertyNameToClient(serverPropertyName) {
var clientName = _.camelCase(serverPropertyName);
serverNames[clientName] = serverPropertyName;
return clientName;
}
}
And here is a snippet of my metadata.json file:
{
"metadataVersion": "1.0.5",
"namingConvention": "underscoreCamelCase",
"localQueryComparisonOptions": "caseInsensitiveSQL",
"dataServices": [
{
"serviceName": "breeze/awdb/",
"hasServerMetadata": true,
"jsonResultsAdapter": "webApi_default",
"useJsonp": false
}
],
"structuralTypes": [
{
"shortName": "person",
"namespace": "AWdb.Models",
"autoGeneratedKeyType": "Identity",
"defaultResourceName": "people",
"dataProperties": [
{
"name": "Person_ID",
"dataType": "Int32",
"isNullable": false,
"defaultValue": 0,
"isPartOfKey": true,
"validators": [
{
"name": "required"
},
{
"min": -2147483648,
"max": 2147483647,
"name": "int32"
}
]
},
{
"name": "Household_ID",
"dataType": "Int32",
"validators": [
{
"min": -2147483648,
"max": 2147483647,
"name": "int32"
}
]
},
....
]
}
],
"resourceEntityTypeMap": {"people": "person:#AWdb.Models"}
}
EDIT:
Here is code from my routes.js file that gets the metadata.
var fs = require('fs');
var breezeSequelize = require('breeze-sequelize');
var SequelizeManager = breezeSequelize.SequelizeManager;
var SequelizeQuery = breezeSequelize.SequelizeQuery;
var SequelizeSaveHandler = breezeSequelize.SequelizeSaveHandler;
var breeze = breezeSequelize.breeze;
var EntityQuery = breeze.EntityQuery;
var dbConfig = {
host: 'localhost',
user: 'xx',
password: 'xx',
dbName: 'xx'
};
var _sequelizeManager = createSequelizeManager();
// _sequelizeManager.sync(true).then(seed).then(function(){
// console.log('db init successful');
// });
exports.init = init;
function init(app) {
app.get('/breeze/awdb/Metadata', function (req, res, next) {
try {
var metadata = readMetadata();
res.send(metadata);
} catch(e){
next(e);
}
});
function createSequelizeManager() {
var metadata = readMetadata();
var sm = new SequelizeManager(dbConfig);
sm.importMetadata(metadata);
return sm;
}
function readMetadata() {
var filename = "server/AWdbMetadata.json";
if (!fs.existsSync(filename)) {
filename = "AWdbMetadata.json";
if (!fs.existsSync(filename)) {
throw new Error("Unable to locate file: " + filename);
}
}
var metadata = fs.readFileSync(filename, 'utf8');
return JSON.parse(metadata);
}
Any ideas? Should I be able to use a custom naming convention when I'm on a node.js server, using a metadata.json file instead of a .net entity framework?
If I'm looking at this correctly, then I think your issue is the metadata on the server. If I understand correctly, your table and column names follow the Uppercase_Underscored_Word pattern. The Breeze/Sequelize stack on the server currently doesn't have the ability to convert names, so you must use the names of entities and properties exactly as they are in the DB schema. Otherwise, the Breeze to Sequelize translation will fail. You can still use a naming convention on the client to turn the underscored server names into whatever you want them to be on the client.
So, you need two metadata files. One for the server that is used by the Breeze/Sequelize stack and that uses names exactly as they are in the DB and then a separate metadata file for the client, where you can do the translation.