Cloud Function to Trigger DataPrep Dataflow Job - google-cloud-functions

I have a small pipeline im trying to execute:
file placed into GCS Bucket > 2. Cloud Function triggers Dataflow job when file is placed in GCS bucket (not working) > 3. Writes to Big Query table (this part working)
I've created a Dataflow job through Dataprep as it has nice UI to do all my transformations before writing to a BigQuery table (writing to BigQuery works fine), and the Cloud function triggers when a file is uploaded to the GCS bucket. However the Cloud Function doesn't trigger the Dataflow job (which I wrote in Dataprep).
Please, have a look at my sample code below of my Cloud Function, if I can get any pointers as to why the Dataflow job is not triggering.
/**
* Triggered from a message on a Cloud Storage bucket.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
exports.processFile = (event, callback) => {
console.log('Processing file: ' + event.data.name);
callback();
const google = require('googleapis');
exports.CF_GCStoDataFlow_v2 = function(event, callback) {
const file = event.data;
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-dataprep-csvtobq-v2-281345',
gcsPath: 'gs://mygcstest-pipeline-staging/temp/'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
};
};

This snippet may help, it uses a different method of the dataflow api (launch), it worked for me, be aware you need to specify template's url and also check the metadata file (you can find it in the same directory as the template when executed through the dataprep interface) file you are including the right parameters
dataflow.projects.templates.launch({
projectId: projectId,
location: location,
gcsPath: jobTemplateUrl,
resource: {
parameters: {
inputLocations : `{"location1" :"gs://${file.bucket}/${file.name}"}`,
outputLocations: `{"location1" : "gs://${destination.bucket}/${destination.name}"}"}`,
},
environment: {
tempLocation: `gs://${destination.bucket}/${destination.tempFolder}`,
zone: "us-central1-f"
},
jobName: 'my-job-name',
}
}

Have you submitted you Dataproc job? Has it started running?
The below documentation can give some idea to get started!
https://cloud.google.com/dataproc/docs/concepts/jobs/life-of-a-job

Looks like you are putting CF_GCStoDataFlow_v2 inside processFile, so the Dataflow part of the code is not executing.
Your function should look like this:
/**
* Triggered from a message on a Cloud Storage bucket.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
exports.CF_GCStoDataFlow_v2 = (event, callback) => {
const google = require('googleapis');
if (file.resourceState === 'exists' && file.name) {
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputFile: `gs://${file.bucket}/${file.name}`
},
jobName: '<JOB_NAME>',
gcsPath: '<BUCKET_NAME>'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
}
callback();
};
Make sure you change the value under “Function to execute” to CF_GCStoDataFlow_v2

Related

Google cloud functions - cannot read property 'getApplicationDefault'

I have deployed a cloud function to invoke a dataflow pipeline template and trying to trigger the function by placing the file in cloud storage bucket.
As node.js prerequisite I have done,
npm init
npm install --save googleapis
Index.js
const google = require('googleapis');
exports.goWithTheDataFlow = function(event, callback) {
const file = event.data;
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/userinfo.email'
]);
}
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-fn-dataflow-test',
gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
Command used to deploy cloud function:
gcloud beta functions deploy goWithTheDataFlow --stage-bucket cf100stage --trigger-bucket cf100
Dataflow(Apache beam):
I was able to execute the dataflow template from console and below is the path of the template,
'gs://jaison/templates/ApacheBeamTemplate'
Function crashes with below error:
TypeError: Cannot read property 'getApplicationDefault' of undefined
at exports.goWithTheDataFlow (/user_code/index.js:11:17) at
/var/tmp/worker/worker.js:695:16 at /var/tmp/worker/worker.js:660:9 at
_combinedTickCallback (internal/process/next_tick.js:73:7) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
Looks like I am missing libraries. Not sure how to fix this. Please help.
My cloud function works with below changes,
1.Setting up GOOGLE_APPLICATION_CREDENTIALS to service account json file
export GOOGLE_APPLICATION_CREDENTIALS="/path/of/svc/json/file.json"
2.index.js
var {google} = require('googleapis');
exports.TriggerBeam = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`
},
jobName: 'cloud-fn-beam-test',
gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};

NodeJS Failing to load in credentials file AWS

This is what my code looks like:
'use strict';
process.env.AWS_PROFILE
// Load the AWS SDK for Node.js
const AWS = require('aws-sdk');
// Create EC2 service object
var ec2 = new AWS.EC2({apiVersion: '2016-11-15'});
// Load credentials and set region from JSON file
AWS.config.loadFromPath('/Users/testuser/.aws/credentials');
// Load in security group parameters
const securityParams = require('./securityParams.json');
module.exports = {
//Exports creation of Security Groups
CreateSecurityGroup: (req, res) => {
ec2.createSecurityGroup(securityParams, function(err, data) {
if (err) {
return (console.log("Error", err));
}
// Pass the Json as a parameter in this function
ec2.authorizeSecurityGroupIngress(securityParams, function(err, data) {
if (err) {
res.serverError(err, err.stack);
} else {
res.ok(data);
console.log('Ingress Security Rules Created');
}
})
// Pass the Json as a parameter in this function
ec2.authorizeSecurityGroupEgress(securityParams, function(err, data) {
if (err) {
res.serverError(err, err.stack);
} else {
res.ok(data);
console.log('Egress Security Rules Created');
}
})
})
}
}
I'm trying to have the script load configurations from two files; one aws credentials file, and one json. However its throwing errors on the credentials file which looks like this:
[default]
aws_access_key_id=**************
aws_secret_access_key**************
I'm not sure what I'm missing to get it to read the properties in correctly.
Here is the error I'm seeing:
undefined:1
[default]
^
SyntaxError: Unexpected token d in JSON at position 1
at JSON.parse (<anonymous>)
credentials is a plain Ascii file, it's not json file
// Load credentials and set region from JSON file
AWS.config.loadFromPath('/Users/testuser/.aws/credentials');
You can check file type with command file /Users/testuser/.aws/credentials
sample snippet to read props file and set AWS config
var PropertiesReader = require('properties-reader');
var AWS = require('aws-sdk')
var properties = PropertiesReader('/Users/username/.aws/credentials');
AWS.config.update({
accessKeyId : properties.get('aws_access_key_id'),
secretAccessKey : properties.get('aws_secret_access_key'),
region : 'us-west-2'
})
console.log(AWS.config)
Ref:https://www.npmjs.com/package/properties-reader

Querying a MySQL database from a NodeJS AWS Lambda Function

I'm having a problem querying my MySQL database (hosted remotely from AWS) inside of my AWS Lambda function.
This is my code except for the parts I need for the rest of Lambda function (which is being called for an Alexa Skill):
var mysql = require('mysql');
var connection = mysql.createConnection({
host : '<myserver>',
user : '<myusername>',
password : '<mypw>',
database : '<mydatabase>'
});
connection.connect(function(err){
if(!err) {
console.log("Database is connected ... nn");
}
else {
console.log("Error connecting database ... nn");
}
});
connection.query("INSERT INTO Users (user_id) VALUES ('TESTNAME')");
connection.end();
This works just fine when I run it with node from my command prompt:
node index.js
I'm using the "mysql" module installed via npm in the directory with index.js and zip it and upload it to my Lambda function.
Again, this works on my development machine, but gives no indicator when testing my Lambda function as to why it doesn't effect my database at all.
My question extends into Alexa and Lambda as much as it does the proper usage of the mysql Node.JS module.
Here's my current code for my Lambda, and the problem here, of course, is still that my test value -> a username called "TESTNAME" doesn't get added to my MySQL database.
I put the query into the connect callback as the first comment suggests, and I'm putting my new code instead of updating my old code above just to keep a record of what how I think the code should transition to being in my Alexa's Lambda function:
Updated code:
var mysql = require('mysql');
var connection = mysql.createConnection({
host : '<myserver>',
user : '<myusername>',
password : '<mypw>',
database : '<mydatabase>'
});
exports.handler = (event, context) => {
try {
if (event.session.new) {
// New Session
console.log("NEW SESSION");
}
switch (event.request.type) {
case "LaunchRequest":
// Launch Request
console.log(`LAUNCH REQUEST`);
context.succeed(
generateResponse({},
buildSpeechletResponse("Welcome to an Alexa Skill, this is running on a deployed lamda function", true)
)
);
break;
case "IntentRequest":
// Intent Request
console.log(`Intent Request`);
console.log('Then run MySQL code:');
connection.connect(function(err) {
console.log('Inside connection.connect() callback');
if (!err) {
console.log("Database is connected ... ");
connection.query("INSERT INTO Users (user_id) VALUES ('TESTNAME')",
function(err, result) {
console.log("Inside connection.query() callback")
if (!err) {
console.log("Query Successful! Ending Connectection.");
connection.end();
} else {
console.log("Query error!");
}
});
} else {
console.log("Error connecting database ..." + err.message);
}
});
context.succeed(
generateResponse({},
buildSpeechletResponse("Welcome to the incredible intelligent MySQLable Alexa!", true)
)
);
break;
case "SessionEndedRequest":
// Session Ended Request
console.log(`SESSION ENDED REQUEST`);
break;
default:
context.fail(`INVALID REQUEST TYPE: ${event.request.type}`);
}
} catch (error) {
context.fail(`Exceptiodn: ${error}`)
}
};
//Helpers
buildSpeechletResponse = (outputText, shouldEndSession) => {
return {
outputSpeech: {
type: "PlainText",
text: outputText
},
shouldEndSession: shouldEndSession
};
};
generateResponse = (sessionAttributes, speechletResponse) => {
return {
version: "1.0",
sessionAttributes: sessionAttributes,
response: speechletResponse
};
};
And my console output:
START RequestId: 5d4d17a7-0272-11e7-951c-b3d6944457e1 Version: $LATEST
2017-03-06T13:39:47.561Z 5d4d17a7-0272-11e7-951c-b3d6944457e1 Intent Request
2017-03-06T13:39:47.562Z 5d4d17a7-0272-11e7-951c-b3d6944457e1 Then run MySQL code:
END RequestId: 5d4d17a7-0272-11e7-951c-b3d6944457e1
REPORT RequestId: 5d4d17a7-0272-11e7-951c-b3d6944457e1 Duration: 82.48 ms Billed Duration: 100 ms Memory Size: 128 MB Max Memory Used: 14 MB
The problem was that I needed to put my context.succeed inside of my callbacks. Many thanks to sqlbot, as his talk of callbacks led me to study where things were actually ending their execution.
So apparently when using AWS Lambda, if the "context" ends before your callbacks get called, you don't get your callbacks. So even though I had placed all of my callbacks like so: connect -> query -> end, the first callback of the chain from connect never gets called because "context.succeed" was getting called right afterwards, which ended execution.
Here's my code as of now (getting a proper query happening now):
var mysql = require('mysql');
var connection = mysql.createConnection({
...
});
exports.handler = (event, context) => {
try {
if (event.session.new) {
// New Session
console.log("NEW SESSION");
}
switch (event.request.type) {
case "LaunchRequest":
// Launch Request
console.log(`LAUNCH REQUEST`);
context.succeed(
generateResponse({},
buildSpeechletResponse("Welcome to an Alexa Skill, this is running on a deployed lamda function", true)
)
);
break;
case "IntentRequest":
// Intent Request
console.log(`Intent Request`);
console.log('Then run MySQL code:');
connection.connect(function(err) {
console.log('Inside connection.connect() callback');
if (!err) {
console.log("Database is connected ... ");
connection.query("INSERT INTO Users (user_id) VALUES ('TESTNAME')",
function(err, result) {
console.log("Inside connection.query() callback")
if (!err) {
console.log("Query Successful! Ending Connection.");
connection.end();
} else {
console.log("Query error!");
}
});
} else {
console.log("Error connecting database ..." + err.message);
}
context.succeed(
generateResponse({},
buildSpeechletResponse("Welcome to the incredible intelligent MySQLable Alexa!", true)
)
);
});
break;
case "SessionEndedRequest":
// Session Ended Request
console.log(`SESSION ENDED REQUEST`);
break;
default:
context.fail(`INVALID REQUEST TYPE: ${event.request.type}`);
}
} catch (error) {
context.fail(`Exceptiodn: ${error}`)
}
};
//Helpers
buildSpeechletResponse = (outputText, shouldEndSession) => {
return {
outputSpeech: {
type: "PlainText",
text: outputText
},
shouldEndSession: shouldEndSession
};
};
generateResponse = (sessionAttributes, speechletResponse) => {
return {
version: "1.0",
sessionAttributes: sessionAttributes,
response: speechletResponse
};
};

Process exited before completing request (Lambda + DynamoDB)

For some reason I am getting the Process exited before completing request error.
Here is my code:
var http = require('http');
var aws = require('aws-sdk');
var ddb = new aws.DynamoDB();
function getUser(userid) {
var q = ddb.getItem({
TableName: "clients",
Key: {
ClientID: { S: userid } }
}, function(err, data) {
if (err) {
console.log(err);
return err;
}
else {
console.log(data);
}
});
//console.log(q);
}
exports.handler = function(event, context) {
getUser('user23');
console.log("called DynamoDB");
};
After googling a few people suggested changing the time out to a higher amount. Which I did to one minute.
However the function only took:
Duration : 2542.23 ms
I have also checked and double checked the table name and the key name etc...
The console log has this :
2016-03-21T04:09:46.390Z - Received event
2016-03-21T04:09:46.751Z - called DynamoDB
2016-03-21T04:09:47.012Z - {}
END RequestId: id123
Can anyone see why this is not working?
Edit
As per the answer below I tried:
console.log('Loading event');
var AWS = require('aws-sdk');
var dynamodb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
exports.handler = function(event, context) {
console.log(JSON.stringify(event, null, ' '));
dynamodb.listTables(function(err, data) {
console.log(JSON.stringify(data, null, ' '));
});
var tableName = "clients";
var datetime = new Date().getTime().toString();
dynamodb.getItem({
TableName: tableName,
Key: {
ClientID: { S: "gr5f4sgnca25hki" } }
}, function(err, data) {
if (err) {
context.done('error','putting item into dynamodb failed: '+err);
}
else {
context.done(data);
}
});
};
but now my response is:
"errorMessage": "[object Object]"
What I am trying to do is this: Check if Item exists in database. Get the parameters from the entry if exists, then do something with the parameters
Can anyone help me?
First of all, context.done expects an Error object as the first argument, not a string containing the word "error".
Second, if the Error object is null or undefined, then the termination will be taken as a succeed.
Now, consider your callback function:
function (err, data)
{
if (err) {
context.done('error', 'putting item into dynamodb failed: ' + err);
}
else {
context.done(data);
}
}
If you have an error, then your lambda will terminate in a failure, which is expected, but the errorMessage you'll get would simply be "error", which isn't much informative.
If you don't have an error, then your lambda will also terminate in a failure, because you are passing in data as the first argument to context.done, and remember that the first argument is always the Error object.
To fix this, you can simply do:
function (err, data)
{
if (err) {
context.done(err);
} else {
context.done(null, data);
}
}
Or even better:
function (err, data)
{
context.done(err, data);
}
If you don't want to handle the item and just return it immediately, you can use context.done as your callback function to the DynamoDB operation:
dynamodb.getItem({
TableName: tableName,
Key: {
ClientID: { S: "gr5f4sgnca25hki" }
}
}, context.done);
You need to signal Lambda end of function.
Important
To properly terminate your Lambda function execution, you must call context.succeed(), context.fail(), or context.done() method. If you don't, either your Lambda function will continue to run until the Node.js event queue is empty, or your Lambda function times out.
Here is an example:
https://gist.github.com/markusklems/1e7218d76d7583f1f7b3
"errorMessage": "[object Object]"
can be solved by a small change as follows
function(err, data) {
if (err) {
context.done(err);
}
else {
context.succeed(data);
}
});
Note where context.succeed differs() from context.done()

Auto-create mysql table with StrongLoop

I am trying to use Strongloop with MySql but cannot figure out how to migrate or automatically create tables into a MySql database.
Is there at least a way to export the models into MySql schemas or do I have to manually create the tables?
I've been trying with the mysql demo app, and going over the docs for a while but no luck - http://docs.strongloop.com/display/DOC/MySQL+connector
Thanks!
I created /server/boot/autoupdate.js. It runs when the app boots. It loads "model-config" and "datasources" JSON and migrates or updates all models to the datasources defined for them.
# /server/boot/autoupdate.js
module.exports = function(app) {
var path = require('path');
var models = require(path.resolve(__dirname, '../model-config.json'));
var datasources = require(path.resolve(__dirname, '../datasources.json'));
function autoUpdateAll(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
app.dataSources[models[key].dataSource].autoupdate(key, function (err) {
if (err) throw err;
console.log('Model ' + key + ' updated');
});
}
}
});
}
function autoMigrateAll(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
app.dataSources[models[key].dataSource].automigrate(key, function (err) {
if (err) throw err;
console.log('Model ' + key + ' migrated');
});
}
}
});
}
//TODO: change to autoUpdateAll when ready for CI deployment to production
autoMigrateAll();
//autoUpdateAll();
};
You can simply migrate models by adding following lines to your server.js file before app.start method:
app.datasources['mySqlConnection'].automigrate(['orders','customers', 'User', 'ACL'], function(err) {
console.log(err);
});
Add models to the array as per your need.
Run the application by slc run.
Note: mySqlConnection is the connection name, replace it by your own connection name.
To update and/or create all mysql tables for your models:
var dataSource = app.dataSources.mysql;
dataSource.autoupdate(null, function (err) {
if(err) return cb(err);
return cb();
});
LoopBack calls it auto-migration. Check these links and search for that term:
Recipes for LoopBack Models, part 5 of 5: Model Synchronization with Relational Databases
Data sources and connectors
In my case, I manually created MySQL tables and then created the models. For existing MySQL tables, I create the models where property names are the same as MySQL field's names.
So here are my steps in using StrongLoop LoopBack with MySQL Database:
Create MySQL Database and Tables (or use existing database).
Install MySQL connector using npm install loopback-connector-mysql --save
Add your MySQL Database details on datasources.json file.
Create a model for each table using slc lb model tablename -i OR edit models.json file and add the properties manually. (document: http://docs.strongloop.com/display/DOC/Creating+a+LoopBack+application#CreatingaLoopBackapplication-Creatingmodels)
Properties' names should be the same as MySQL field's names (more information on mapping MySQL to JSON data types: http://docs.strongloop.com/display/DOC/MySQL+connector#MySQLconnector-MySQLtoJSONtypes)
In the same kind of issue, if you need to automatically create a database, you can use the createDatabase option in your dataSource JSON file.
"mysql": {
"host": "localhost",
"port": 0,
"database": "db",
"username": "root",
"password": "",
"name": "mysql",
"connector": "mysql",
"debug": false,
"createDatabase": true
}
So you don't need to write yourself the queries to create the base.
Hope it helps.
jduhls answer is beautiful, but I needed to tweak it slightly to add some static data into tables. Here's my tweaked version, along with an example of loading data into a simple SystemSettings table (id, settingName, settingValue):
var async = require('async');
var SYSTEM_SETTINGS = [
{
"settingName": "mustPayInAdvance",
"settingValue": "false",
}
];
module.exports = function(app) {
var path = require('path');
var models = require(path.resolve(__dirname, '../model-config.json'));
var datasources = require(path.resolve(__dirname, '../datasources.json'));
var modelUpdates = [];
function buildModelListForOperation(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
modelUpdates.push({operation: app.dataSources[models[key].dataSource], key: key});
}
}
});
}
function createStaticData() {
app.models.SystemSettings.create(SYSTEM_SETTINGS, function(err, created) {
if (err)
throw err;
else
console.log('Sample data was imported.');
});
}
function processModelsAndData(operationType) {
buildModelListForOperation();
// Create all models
async.each(modelUpdates, function(item, callback) {
item.operation[operationType](item.key, function (err) {
if (err) throw err;
console.log('Model ' + item.key + ' migrated');
callback();
});
}, function (err) {
if (err) throw err;
createStaticData();
});
}
//TODO: change to 'autoupdate' when ready for CI deployment to production
processModelsAndData('automigrate');
};
i discovered an easy way to accomplish this task. The reference link is: Clique Here
You can use prototype or not, in my case, i do nott used.
For the documentation, you should use:
ds.autoupdate (models, function (error) {
if (!error) {
   console.log( "Updated models.");
   }else{
   console.log( "An error has occurred:" + error);
   }
   ds.disconnect();
});
Where:
var path = require ( 'path');
var app = require (path.resolve (__ dirname, '../server/server'));
var ds = app.datasources.x;
and x is datasource attribute name, example of /server/datasources.json:
{
  "x": {
    "Host": "localhost"
    "Port": 3306,
    "Database", "loopapp"
    "Password": "",
    "Name": "x"
    "User", "root"
    "Connector": "mysql"
  }
}
Note (1): Models can be the string model name or the array of string (models names).
Note (2): If you prefer not to put models, all models of the file whose base attribute equals "PersistedModel", will be updated.
With that, i used like this:
autoupdate function () {
ds.autoupdate (function (error) {
     if (!error) {
         console.log( "Updated all models");
      }else {
         console.log( "An error has occurred:" + error);
      }
      ds.disconnect();
   });
}
and i called the: autoupdate();
You can put this code in a file.js and call the command line: node file.js.
If you want this file to be called every time you start the program, put it on /server/boot/file.js path.
Obviously, if you want to use automigrate, only replace the autoupdate word in the code above, by automigrate.