how to solve undefined symbols for architecture x86_64 error when install caffe in macos 10.13 - caffe

How to solve undefined symbols for architecture x86_64 error.
Undefined symbols for architecture x86_64 error occured when I install Caffe in MacOs High Sierra.
MacOs High Sierra Version 10.13.4.
Python Version: 2.7.10.
Clang Version: Apple LLVM version 9.0.0 (clang-900.0.38)
Protobuf Version: libprotoc 3.3.2
Other dependencies requeired in caffe installation was installed also.
Undefined symbols for architecture x86_64:
"google::protobuf::MessageLite::ParseFromString(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&)", referenced from:
caffe::DataLayer<float>::DataLayerSetUp(std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&, std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&) in data_layer.cpp.o
caffe::DataLayer<float>::load_batch(caffe::Batch<float>*) in data_layer.cpp.o
caffe::DataLayer<double>::DataLayerSetUp(std::__1::vector<caffe::Blob<double>*, std::__1::allocator<caffe::Blob<double>*> > const&, std::__1::vector<caffe::Blob<double>*, std::__1::allocator<caffe::Blob<double>*> > const&) in data_layer.cpp.o
caffe::DataLayer<double>::load_batch(caffe::Batch<double>*) in data_layer.cpp.o
"google::protobuf::MessageFactory::InternalRegisterGeneratedFile(char const*, void (*)(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&))", referenced from:
caffe::protobuf_caffe_2eproto::(anonymous namespace)::AddDescriptorsImpl() in libcaffeproto.a(caffe.pb.cc.o)
"google::protobuf::io::CodedOutputStream::WriteStringWithSizeToArray(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, unsigned char*)", referenced from:
caffe::Datum::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::FillerParameter::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetParameter::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverParameter::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverState::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetState::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetStateRule::InternalSerializeWithCachedSizesToArray(bool, unsigned char*) const in libcaffeproto.a(caffe.pb.cc.o)
...
"google::protobuf::internal::NameOfEnum(google::protobuf::EnumDescriptor const*, int)", referenced from:
caffe::InfogainLossLayer<float>::get_normalizer(caffe::LossParameter_NormalizationMode, int) in infogain_loss_layer.cpp.o
caffe::InfogainLossLayer<double>::get_normalizer(caffe::LossParameter_NormalizationMode, int) in infogain_loss_layer.cpp.o
caffe::ReductionLayer<float>::Forward_cpu(std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&, std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&) in reduction_layer.cpp.o
caffe::ReductionLayer<float>::Backward_cpu(std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&, std::__1::vector<bool, std::__1::allocator<bool> > const&, std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&) in reduction_layer.cpp.o
caffe::ReductionLayer<double>::Forward_cpu(std::__1::vector<caffe::Blob<double>*, std::__1::allocator<caffe::Blob<double>*> > const&, std::__1::vector<caffe::Blob<double>*, std::__1::allocator<caffe::Blob<double>*> > const&) in reduction_layer.cpp.o
caffe::ReductionLayer<double>::Backward_cpu(std::__1::vector<caffe::Blob<double>*, std::__1::allocator<caffe::Blob<double>*> > const&, std::__1::vector<bool, std::__1::allocator<bool> > const&, std::__1::vector<caffe::Blob<double>*, std::__1::allocator<caffe::Blob<double>*> > const&) in reduction_layer.cpp.o
caffe::SigmoidCrossEntropyLossLayer<float>::get_normalizer(caffe::LossParameter_NormalizationMode, int) in sigmoid_cross_entropy_loss_layer.cpp.o
...
"google::protobuf::internal::WireFormatLite::WriteString(int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, google::protobuf::io::CodedOutputStream*)", referenced from:
caffe::NetParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetState::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetStateRule::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::LayerParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::V1LayerParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
"google::protobuf::internal::WireFormatLite::WriteBytesMaybeAliased(int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, google::protobuf::io::CodedOutputStream*)", referenced from:
caffe::Datum::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
"google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(int, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, google::protobuf::io::CodedOutputStream*)", referenced from:
caffe::FillerParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverState::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::ParamSpec::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::LayerParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
caffe::TransformationParameter::SerializeWithCachedSizes(google::protobuf::io::CodedOutputStream*) const in libcaffeproto.a(caffe.pb.cc.o)
...
"google::protobuf::internal::WireFormatLite::ReadBytes(google::protobuf::io::CodedInputStream*, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >*)", referenced from:
caffe::Datum::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
caffe::FillerParameter::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetParameter::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverParameter::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
caffe::SolverState::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetState::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
caffe::NetStateRule::MergePartialFromCodedStream(google::protobuf::io::CodedInputStream*) in libcaffeproto.a(caffe.pb.cc.o)
...
"google::protobuf::internal::AssignDescriptors(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, google::protobuf::internal::MigrationSchema const*, google::protobuf::Message const* const*, unsigned int const*, google::protobuf::MessageFactory*, google::protobuf::Metadata*, google::protobuf::EnumDescriptor const**, google::protobuf::ServiceDescriptor const**)", referenced from:
caffe::protobuf_caffe_2eproto::(anonymous namespace)::protobuf_AssignDescriptors() in libcaffeproto.a(caffe.pb.cc.o)
"google::protobuf::internal::fixed_address_empty_string", referenced from:
caffe::LayerParameter::set_type(char const*) in infogain_loss_layer.cpp.o
caffe::LSTMLayer<float>::FillUnrolledNet(caffe::NetParameter*) const in lstm_layer.cpp.o
caffe::LayerParameter::set_type(char const*) in lstm_layer.cpp.o
caffe::LayerParameter::set_name(char const*) in lstm_layer.cpp.o
caffe::ParamSpec::set_name(char const*) in lstm_layer.cpp.o
caffe::LSTMLayer<double>::FillUnrolledNet(caffe::NetParameter*) const in lstm_layer.cpp.o
caffe::RecurrentLayer<float>::LayerSetUp(std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&, std::__1::vector<caffe::Blob<float>*, std::__1::allocator<caffe::Blob<float>*> > const&) in recurrent_layer.cpp.o
...
"leveldb::DB::Open(leveldb::Options const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, leveldb::DB**)", referenced from:
caffe::db::LevelDB::Open(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, caffe::db::Mode) in db_leveldb.cpp.o
"google::protobuf::Message::DebugString() const", referenced from:
caffe::Net<float>::Init(caffe::NetParameter const&) in net.cpp.o
caffe::Net<double>::Init(caffe::NetParameter const&) in net.cpp.o
caffe::Solver<float>::Init(caffe::SolverParameter const&) in solver.cpp.o
caffe::Solver<double>::Init(caffe::SolverParameter const&) in solver.cpp.o
"google::protobuf::Message::GetTypeName() const", referenced from:
vtable for caffe::BlobShape in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::BlobProto in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::BlobProtoVector in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::Datum in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::FillerParameter in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::NetParameter in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::SolverParameter in libcaffeproto.a(caffe.pb.cc.o)
...
"google::protobuf::Message::SerializeToOstream(std::__1::basic_ostream<char, std::__1::char_traits<char> >*) const", referenced from:
caffe::WriteProtoToBinaryFile(google::protobuf::Message const&, char const*) in io.cpp.o
"google::protobuf::Message::InitializationErrorString() const", referenced from:
vtable for caffe::BlobShape in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::BlobProto in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::BlobProtoVector in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::Datum in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::FillerParameter in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::NetParameter in libcaffeproto.a(caffe.pb.cc.o)
vtable for caffe::SolverParameter in libcaffeproto.a(caffe.pb.cc.o)
...
ld: symbol(s) not found for architecture x86_64
clang: error: linker command failed with exit code 1 (use -v to see invocation)
make[2]: *** [lib/libcaffe.1.0.0.dylib] Error 1
make[1]: *** [src/caffe/CMakeFiles/caffe.dir/all] Error 2
make: *** [all] Error 2
Thanks.

Related

squelize issues when running express.js app

I'm trying to run a simple application with Express, Sequelize, and MySQL
I get this error:
Executing (default): CREATE TABLE IF NOT EXISTS `tasks` (`id` INTEGER NOT NULL auto_increment , `title` VARCHAR(255) NOT NULL, `description` VARCHAR(255) NOT NULL, `status` NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` DATETIME NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB;
err: Error
at Query.run (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\sequelize\lib\dialects\mysql\query.js:52:25)
at C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\sequelize\lib\sequelize.js:313:28
at processTicksAndRejections (internal/process/task_queues.js:95:5)
at async MySQLQueryInterface.createTable (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\sequelize\lib\dialects\abstract\query-interface.js:94:12)
at async Function.sync (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\sequelize\lib\model.js:939:5)
at async Sequelize.sync (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\sequelize\lib\sequelize.js:377:9) {
name: 'SequelizeDatabaseError',
parent: Error: You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` ' at line 1
at Packet.asError (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\packets\packet.js:728:17)
at Query.execute (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\commands\command.js:29:26)
at Connection.handlePacket (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\connection.js:456:32)
at PacketParser.onPacket (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\connection.js:85:12)
at PacketParser.executeStart (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\packet_parser.js:75:16)
at Socket.<anonymous> (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\connection.js:92:25)
at Socket.emit (events.js:375:28)
at addChunk (internal/streams/readable.js:290:12)
at readableAddChunk (internal/streams/readable.js:265:9)
at Socket.Readable.push (internal/streams/readable.js:204:10) {
code: 'ER_PARSE_ERROR',
errno: 1064,
sqlState: '42000',
sqlMessage: "You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` ' at line 1",
sql: 'CREATE TABLE IF NOT EXISTS `tasks` (`id` INTEGER NOT NULL auto_increment , `title` VARCHAR(255) NOT NULL, `description` VARCHAR(255) NOT NULL, `status` NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` DATETIME NOT NULL, PRIMARY KEY
(`id`)) ENGINE=InnoDB;',
parameters: undefined
},
original: Error: You have an error in your SQL syntax; check the manual that corresponds
to your MySQL server version for the right syntax to use near 'NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` ' at line 1
at Packet.asError (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\packets\packet.js:728:17)
at Query.execute (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\commands\command.js:29:26)
at Connection.handlePacket (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\connection.js:456:32)
at PacketParser.onPacket (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\connection.js:85:12)
at PacketParser.executeStart (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\packet_parser.js:75:16)
at Socket.<anonymous> (C:\Users\Kryolos.Hakeem\desktop\vodafone-tech-test\back-end-v2\node_modules\mysql2\lib\connection.js:92:25)
at Socket.emit (events.js:375:28)
at addChunk (internal/streams/readable.js:290:12)
at readableAddChunk (internal/streams/readable.js:265:9)
at Socket.Readable.push (internal/streams/readable.js:204:10) {
code: 'ER_PARSE_ERROR',
errno: 1064,
sqlState: '42000',
sqlMessage: "You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` ' at line 1",
sql: 'CREATE TABLE IF NOT EXISTS `tasks` (`id` INTEGER NOT NULL auto_increment , `title` VARCHAR(255) NOT NULL, `description` VARCHAR(255) NOT NULL, `status` NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` DATETIME NOT NULL, PRIMARY KEY
(`id`)) ENGINE=InnoDB;',
parameters: undefined
},
sql: 'CREATE TABLE IF NOT EXISTS `tasks` (`id` INTEGER NOT NULL auto_increment , `title` VARCHAR(255) NOT NULL, `description` VARCHAR(255) NOT NULL, `status` NUMBER NOT NULL, `createdBy` NUMBER, `createdAt` DATETIME NOT NULL, `updatedAt` DATETIME NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB;',
parameters: {}
}
[nodemon] clean exit - waiting for changes before restart
so here is my connection:
const { Sequelize } = require('sequelize');
require('dotenv').config()
const Sequelized = new Sequelize(
process.env.DATABASE_NAME,
process.env.USER_NAME,
process.env.PASSWORD,
{
dialect: 'mysql',
host: process.env.DATABASE_HOST
}
);
module.exports = Sequelized;
and here is my entry-point for the app
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const router = require('./routes/tasks');
const Sequelized = require('./utiles/database');
const app = express();
require('dotenv').config();
app.use(cors());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(router);
Sequelized.sync()
.then((result) => {
console.log('result: ', result);
app.listen(3001, () => console.log('App is Listining on port 3001'));
})
.catch((err) => {
console.log('err: ', err);
});
and here is my task model definition
const { DataTypes } = require('sequelize');
const Sequelized = require('../utiles/database');
const Task = Sequelized.define('task', {
id: {
type: DataTypes.INTEGER,
autoIncrement: true,
allowNull: false,
primaryKey: true,
},
title: {
type: DataTypes.STRING,
allowNull: false,
},
description: {
type: DataTypes.STRING,
allowNull: false,
},
status: {
type: DataTypes.NUMBER,
allowNull: false,
},
createdBy: {
type: DataTypes.NUMBER,
allowNull: true
}
});
module.exports = Task;
and here is my attempt to run it
const Task = require("../models/task");
exports.CreateTask = (req, res) => {
const { title, description, userId } = req.body;
Task.CreateTask({
title,
description,
createdBy: userId,
status: 1
}).then(res => {
console.log('res: ', res)
}).catch((err) => {
console.log('err: ', err);
})
}
this issue happened only when I tried to use Task, to start creating tasks, I think the issue is re-priducable,
any help is appreciated for sure,
thanks
The problem is that you defined the model Taks and in the last piece of code, you export a function named CreateTask.
But inside this arrow function, you try to create a new Task but you're calling CreateTask again. And you also missed to specifie for title and description what are the keys you want to assign those values.
You should use Task.creat(... like this:
const Task = require("../models/task");
exports.CreateTask = (req, res) => {
const { title, description, userId } = req.body;
Task.create({
title: title,
description: description,
createdBy: userId,
status: 1
}).then(res => {
console.log('res: ', res)
}).catch((err) => {
console.log('err: ', err);
})
}

Unknown column 'base64str' in 'field list' But field exists in Database and everywhere

I keep getting this as Error
D:\node_apps\financeplus>node financeplus.js
App running on Port: 7000
D:\node_apps\financeplus\node_modules\mysql\lib\protocol\Parser.js:437
throw err; // Rethrow non-MySQL errors
^
Error: ER_BAD_FIELD_ERROR: Unknown column 'base64str' in 'field list'
at Query.Sequence._packetToError (D:\node_apps\financeplus\node_modules\mysql\lib\protocol\sequences\Sequence.js:47:14)
at Query.ErrorPacket (D:\node_apps\financeplus\node_modules\mysql\lib\protocol\sequences\Query.js:79:18)
at Protocol._parsePacket (D:\node_apps\financeplus\node_modules\mysql\lib\protocol\Protocol.js:291:23)
at Parser._parsePacket (D:\node_apps\financeplus\node_modules\mysql\lib\protocol\Parser.js:433:10)
at Parser.write (D:\node_apps\financeplus\node_modules\mysql\lib\protocol\Parser.js:43:10)
at Protocol.write (D:\node_apps\financeplus\node_modules\mysql\lib\protocol\Protocol.js:38:16)
at Socket.<anonymous> (D:\node_apps\financeplus\node_modules\mysql\lib\Connection.js:88:28)
at Socket.<anonymous> (D:\node_apps\financeplus\node_modules\mysql\lib\Connection.js:526:10)
at Socket.emit (events.js:400:28)
at addChunk (internal/streams/readable.js:290:12)
--------------------
at Pool.query (D:\node_apps\financeplus\node_modules\mysql\lib\Pool.js:199:23)
at D:\node_apps\financeplus\financeplus.js:77:8
at Layer.handle [as handle_request] (D:\node_apps\financeplus\node_modules\express\lib\router\layer.js:95:5)
at next (D:\node_apps\financeplus\node_modules\express\lib\router\route.js:137:13)
at Route.dispatch (D:\node_apps\financeplus\node_modules\express\lib\router\route.js:112:3)
at Layer.handle [as handle_request] (D:\node_apps\financeplus\node_modules\express\lib\router\layer.js:95:5)
at D:\node_apps\financeplus\node_modules\express\lib\router\index.js:281:22
at Function.process_params (D:\node_apps\financeplus\node_modules\express\lib\router\index.js:335:12)
at next (D:\node_apps\financeplus\node_modules\express\lib\router\index.js:275:10)
at urlencodedParser (D:\node_apps\financeplus\node_modules\body-parser\lib\types\urlencoded.js:82:7) {
code: 'ER_BAD_FIELD_ERROR',
errno: 1054,
sqlMessage: "Unknown column 'base64str' in 'field list'",
sqlState: '42S22',
index: 0,
sql: "INSERT INTO financeplusacct (fullname, address, city,state, tel,email,nationalID,gender,birth_date, bal, ccy, accNum, base64str) VALUES ('Allen Hommer','102 Whitewater Road','Smithfield','North Carolina','(919)800-3322','a.hommer#aol.com','A89360','Male','2021-11-03','2500','USD','00343515949','data:image/jpeg;base64,/9j/4AAQSkZJRgABAQEAYABgAAD.........')"
}
D:\node_apps\financeplus>
The information is not saved to the Mysql Database. I have created the tables to accomodate base64str And just adding it, it gives me that Error
the code to insert to Database
app.post('/api/createaccount',function(req,res){
var prologue = '00343';
var digits = Math.floor(Math.random() * 900000) + 100000;
var accNum = prologue + digits;
var fullname = req.body.fullname;
var address = req.body.address;
var city = req.body.city;
var state = req.body.state;
var tel = req.body.tel;
var email = req.body.email;
var nationalID = req.body.nationalID;
var gender = req.body.gender;
var birth_date = req.body.birth_date;
var bal = req.body.bal;
var ccy = req.body.ccy;
var base64str = req.body.base64str;
dbConn.query('INSERT INTO financeplusacct (fullname, address, city,state, tel,email,nationalID,gender,birth_date, bal, ccy, accNum, base64str) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)',[fullname, address, city,state, tel,email,nationalID,gender,birth_date, bal, ccy, accNum, base64str], function (error, results, fields){
if (error) throw error;
return res.send({error: false,data: results, message: 'Account setup Complete'})
});
});
Now Calling the REST api from React JS front end like this
function createCustomerAcc(){
let base64str = localStorage.getItem('postImage');
let item = {fullname, address, city, state, tel, email, nationalID, gender, birth_date, bal,ccy, base64str};
fetch('http://localhost:7000/api/createaccount',{
method : 'POST',
mode : 'cors',
headers:{
'Accept': 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify(item)
}).then((response) => response.json())
.then((responseJson) =>{
if(responseJson.message =='Account setup Complete'){
alert('Account Setup Complete');
}else{
alert(responseJson.message);
}
}).catch((error)=>{
console.error(error);
})
}
Gives me that Error in console. Why is this so? Please what do I have to do in this case? Its firing back the MySQL Error, and I dont get to see where the challenge comes from as even the column exists in the Database. Aand the funny thing is, the Column exists in all areas. Please what am I not getting right?
Edits
The CREATE Table is given as
CREATE TABLE `financeplusacct` (
`id` int(11) NOT NULL,
`fullname` varchar(150) NOT NULL,
`address` varchar(300) NOT NULL,
`city` varchar(150) NOT NULL,
`state` varchar(150) NOT NULL,
`tel` varchar(150) NOT NULL,
`email` varchar(150) NOT NULL,
`nationalID` varchar(150) NOT NULL,
`gender` varchar(150) NOT NULL,
`birth_date` varchar(150) NOT NULL,
`ccy` varchar(150) NOT NULL,
`bal` decimal(18,2) NOT NULL,
`accNum` varchar(150) NOT NULL,
`base64str` text NOT NULL,
`created_at` timestamp NOT NULL DEFAULT current_timestamp(),
`updated_at` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp()
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

Query all rows from Mysql table and store as cache

I have one table required to do some charging in my application. I am trying to load this table in a JSON struct and use it as a cache. This is my approach:
table:
CREATE TABLE `lwratecarddefinition` (
`RATECARDID` int(10) DEFAULT NULL AUTO_INCREMENT,
`RATECARDGROUPID` int(10) DEFAULT NULL,
`SERVICEID` varchar(10) DEFAULT NULL,
`USAGETYPEID` varchar(10) DEFAULT NULL,
`CURRENCYCODE` varchar(4) DEFAULT NULL,
`LEDGERID` varchar(15) DEFAULT NULL,
`PULSE` varchar(10) DEFAULT NULL,
`SPECIALPULSE` varchar(10) DEFAULT NULL,
`NORMALRATE` varchar(10) DEFAULT NULL,
`OFFPEAKSET` varchar(2) DEFAULT NULL,
`OFFPEAKRATE` varchar(10) DEFAULT NULL,
`PEAKRATE` varchar(10) DEFAULT NULL,
`ONDEMANDSET` varchar(2) DEFAULT NULL,
`ONDEMANDRATE` varchar(2) DEFAULT NULL,
KEY `idx_lwratecarddefinition_RATECARDID` (`RATECARDID`),
KEY `idx_lwratecarddefinition_RATECARDGROUPID` (`RATECARDGROUPID`),
KEY `idx_lwratecarddefinition_USAGETYPEID` (`USAGETYPEID`),
KEY `idx_lwratecarddefinition_SERVICEID` (`SERVICEID`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8
main.go code snippet - ( my objective is to fetch records in every 1 min using go routine )
// load the ratecard first
lwratecardefinition = getRatecard()
// update data every 1 second
go func() {
for {
time.Sleep(time.Second)
lwratecardefinition = getRatecard()
fmt.Println(lwratecardefinition)
}
}()
dbfunction.go
package main
import (
"database/sql"
_ "github.com/go-sql-driver/mysql"
)
type Lwratecardefinition struct {
Ratecardid int `json:"ratecardid"`
Ratecardgroupid int `json:"ratecardgroupid"`
Serviceid int `json:"serviceid"`
Usagetypeid int `json:"usagetypeid"`
Currencycode int `json:"currencycode"`
Ledgerid string `json:"ledgerid"`
Pulse int `json:"pulse"`
Specialpulse int `json:"specialpulse"`
Normalrate int `json:"normalrate"`
Offpeakset int `json:"offpeakset"`
Offpeakrate int `json:"offpeakrate"`
Peakrate int `json:"peakrate"`
Ondemandset int `json:"ondemandset"`
Ondemandrate int `json:"ondemandrate"`
}
var lwratecardefinition Lwratecardefinition
func getRatecard() Lwratecardefinition {
db, err := sql.Open("mysql", "user:password#tcp(127.0.0.1:3306)/bcsdb")
// if there is an error opening the connection, handle it
if err != nil {
panic(err.Error())
}
// defer the close till after the main function has finished
// executing
defer db.Close()
var ratecardid,ratecardgroupid,serviceid,usagetypeid,currencycode,pulse,specialpulse,normalrate,offpeakset,offpeakrate,peakrate,ondemandset,ondemandrate int
var ledgerid string
results, err := db.Query("select ratecardid,ratecardgroupid,serviceid,usagetypeid,currencycode,ledgerid,pulse,specialpulse,normalrate,offpeakset,offpeakrate,peakrate,ondemandset,ondemandrate from lwratecarddefinition")
if err != nil {
panic(err.Error())
}
for results.Next() {
err = results.Scan(&ratecardid,&ratecardgroupid,&serviceid,&usagetypeid,&currencycode,&ledgerid,&pulse,&specialpulse,&normalrate,&offpeakset,&offpeakrate,&peakrate,&ondemandset,&ondemandrate)
lwratecardefinition = Lwratecardefinition{Ratecardid: ratecardid, Ratecardgroupid: ratecardgroupid, Serviceid: serviceid, Usagetypeid: usagetypeid, Currencycode: currencycode, Ledgerid: ledgerid, Pulse: pulse, Specialpulse: specialpulse, Normalrate: normalrate, Offpeakset: offpeakset, Offpeakrate: offpeakrate, Peakrate: peakrate, Ondemandset: ondemandset, Ondemandrate: ondemandrate}
}
return lwratecardefinition
}
But when I am executing the program I am only getting one row not all rows from mysql table. I need all rows to use it as a cache.
output:
{4 508 1 201 1 DATA 60 30 2 1 3 1 1 5}
Also my final objective is to use these values to find rate and pulse.If
Ratecardgroupid = X value and Serviceid = Y value I want to use pulse value from same row.
Please suggest what is wrong here and if the approach is fine to reduce db calls as well logic.
But when I am executing the program I am only getting one row not all rows from mysql table
Because your function only returns single Lwratecardefinition struct, value of which you override in each iteration of the loop. You want to return slice of them, something like:
func getRatecard() []Lwratecardefinition {
...
var result []Lwratecardefinition
for results.Next() {
err = results.Scan(&ratecardid,&ratecardgroupid,&serviceid,&usagetypeid,&currencycode,&ledgerid,&pulse,&specialpulse,&normalrate,&offpeakset,&offpeakrate,&peakrate,&ondemandset,&ondemandrate)
result = append(result, Lwratecardefinition{Ratecardid: ratecardid, Ratecardgroupid: ratecardgroupid, Serviceid: serviceid, Usagetypeid: usagetypeid, Currencycode: currencycode, Ledgerid: ledgerid, Pulse: pulse, Specialpulse: specialpulse, Normalrate: normalrate, Offpeakset: offpeakset, Offpeakrate: offpeakrate, Peakrate: peakrate, Ondemandset: ondemandset, Ondemandrate: ondemandrate})
}
if (results.Err() != nil) {
// something went wrong while reading records
...
}
return result
}

Is there a way to stop the AURemoteIO thread from the AutioToolBox from performing its polling tasks?

So I am using cocos2dx new AudioEngine, and I am having this crash: https://github.com/cocos2d/cocos2d-x/issues/18948.
0 0x0000000184a5cea8 in CrashIfClientProvidedBogusAudioBufferList ()
1 0x000000018493f270 in AudioConverterConvertComplexBuffer ()
2 0x0000000184939460 in AUSpatialMixer::Render(unsigned int&, AudioTimeStamp const&, unsigned int) ()
3 0x0000000184a62a20 in AUBase::DoRenderBus(unsigned int&, AudioTimeStamp const&, unsigned int, AUOutputElement*, unsigned int, AudioBufferList&) ()
4 0x0000000184a621ac in AUBase::DoRender(unsigned int&, AudioTimeStamp const&, unsigned int, unsigned int, AudioBufferList&) ()
5 0x0000000184a6b550 in AUMethodRender(void*, unsigned int*, AudioTimeStamp const*, unsigned int, unsigned int, AudioBufferList*) ()
6 0x0000000184a5d284 in AUInputElement::PullInput(unsigned int&, AudioTimeStamp const&, unsigned int, unsigned int) ()
7 0x00000001847e8e00 in AUInputFormatConverter2::InputProc(OpaqueAudioConverter*, unsigned int*, AudioBufferList*, AudioStreamPacketDescription**, void*) ()
8 0x000000018474fdd4 in AudioConverterChain::CallInputProc(unsigned int) ()
9 0x000000018474fa64 in AudioConverterChain::FillBufferFromInputProc(unsigned int*, CABufferList*) ()
10 0x000000018472be38 in BufferedAudioConverter::GetInputBytes(unsigned int, unsigned int&, CABufferList const*&) ()
11 0x00000001849906f8 in Resampler2Wrapper::RenderOutput(CABufferList*, unsigned int, unsigned int&) ()
12 0x0000000184752a7c in SampleRateConverter::RenderOutput(CABufferList*, unsigned int, unsigned int&, AudioStreamPacketDescription*) ()
13 0x000000018472bcc8 in BufferedAudioConverter::FillBuffer(unsigned int&, AudioBufferList&, AudioStreamPacketDescription*) ()
14 0x000000018474f65c in AudioConverterChain::RenderOutput(CABufferList*, unsigned int, unsigned int&, AudioStreamPacketDescription*) ()
15 0x000000018472bcc8 in BufferedAudioConverter::FillBuffer(unsigned int&, AudioBufferList&, AudioStreamPacketDescription*) ()
16 0x000000018493ec98 in AudioConverterFillComplexBuffer ()
17 0x00000001847e864c in AUConverterBase::RenderBus(unsigned int&, AudioTimeStamp const&, unsigned int, unsigned int) ()
18 0x00000001846cd07c in AURemoteIO::RenderBus(unsigned int&, AudioTimeStamp const&, unsigned int, unsigned int) ()
19 0x0000000184a62a20 in AUBase::DoRenderBus(unsigned int&, AudioTimeStamp const&, unsigned int, AUOutputElement*, unsigned int, AudioBufferList&) ()
20 0x0000000184a621ac in AUBase::DoRender(unsigned int&, AudioTimeStamp const&, unsigned int, unsigned int, AudioBufferList&) ()
21 0x00000001846ccd28 in AURemoteIO::PerformIO(unsigned int, unsigned int, unsigned int, AudioTimeStamp const&, AudioTimeStamp const&, AudioBufferList const*, AudioBufferList*, int&) ()
22 0x00000001846ce1a0 in AURIOCallbackReceiver_PerformIO ()
23 0x0000000184860018 in _XPerformIO ()
24 0x0000000184a4a5ec in mshMIGPerform ()
25 0x0000000184a4a804 in MSHMIGDispatchMessage ()
26 0x00000001846cd4c0 in AURemoteIO::IOThread::Run() ()
27 0x00000001846d1a40 in AURemoteIO::IOThread::Entry(void*) ()
28 0x0000000184a52630 in CAPThread::Entry(CAPThread*) ()
29 0x00000001808c432c in _pthread_body ()
30 0x00000001808c41f8 in _pthread_start ()
31 0x00000001808c2c38 in thread_start ()
Basically what is happening is that the AudioEngine is using AlBufferDataStatic, store the data to read inside a char* pcmData buffer (you can see the code here: https://github.com/cocos2d/cocos2d-x/tree/v3/cocos/audio/apple (the main ones are AudioCache.mm and AudioDecoder.mm)), and when the pcmData buffer is freed inside the AudioCache destructor (which is called from the main thread), there is a race condition because the AURemoteIO is polling client data (for me it is in mp3) to convert it to pcm into it's own thread.
So the question is: can we stop the AURemoteIO thread from doing anything using this now freed pcmData buffer at the exact moment it is freed?

Error during user registration (Express.Js, Passport, MySQL)

I'm working on a Registration page using Expressjs, Passport(local) and MySQL but my code doesn't seem to function properly.
This is my query (MySQL):
connection.query('\CREATE TABLE `' + dbconfig.database + '`.`' + dbconfig.users_table + '` ( \
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT, \
`useremail` VARCHAR(20) NOT NULL, \
`password` CHAR(60) NOT NULL, \
`passwordHint` VARCHAR(5) NOT NULL, \
`hintanswer` VARCHAR(60) NOT NULL, \
PRIMARY KEY (`id`), \
UNIQUE INDEX `id_UNIQUE` (`id` ASC), \
UNIQUE INDEX `useremail_UNIQUE` (`useremail` ASC) \)');
This is the passport strategy:
passport.use(
'local-signup',
new LocalStrategy({
usernameField : 'useremail',
passwordField : 'password',
passwordHintField:'passwordHint',//change
hintanswerField:'hintanswer',//change
passReqToCallback : true
},
function(req, useremail, password,passwordHint,hintanswer,done) {
connection.query("SELECT * FROM users WHERE useremail = ?",[useremail], function(err, rows) {
if (err)
return done(err);
if (rows.length) {
return done(null, false, req.flash('signupMessage', 'That useremail is already taken.'));
}
else {
var newUserMysql = {
useremail: useremail,
password: bcrypt.hashSync(password, null, null), // use the generateHash function in our user model
passwordHint:bcrypt.hashSync(passwordHint, null, null),//change
hintanswer:bcrypt.hashSync(hintanswer, null, null)//change
};
var insertQuery = "INSERT INTO users ( useremail, password ,passwordHint,hintanswer) values (?,?,?,?)";//change
connection.query(insertQuery,[newUserMysql.useremail, newUserMysql.password,newUserMysql.passwordHint,newUserMysql.hintanswer],function(err, rows) {//change
newUserMysql.id = rows.insertId;
return done(null, newUserMysql);
});
}
});
})
);
And this is the error I get:
TypeError: Cannot read property 'insertId' of undefined
at Query._callback (C:\Users\tpdle\dev\Ttareungyi-Navi\passport\passport.js:72:47)
at Query.Sequence.end (C:\Users\tpdle\dev\Ttareungyi-Navi\node_modules\mysql\lib\protocol\sequences\Sequence.js:88:24)
at Query.ErrorPacket (C:\Users\tpdle\dev\Ttareungyi-Navi\node_modules\mysql\lib\protocol\sequences\Query.js:90:8)
at Protocol._parsePacket (C:\Users\tpdle\dev\Ttareungyi-Navi\node_modules\mysql\lib\protocol\Protocol.js:279:23)
at Parser.write (C:\Users\tpdle\dev\Ttareungyi-Navi\node_modules\mysql\lib\protocol\Parser.js:76:12)
at Protocol.write (C:\Users\tpdle\dev\Ttareungyi-Navi\node_modules\mysql\lib\protocol\Protocol.js:39:16)
at Socket. (C:\Users\tpdle\dev\Ttareungyi-Navi\node_modules\mysql\lib\Connection.js:103:28)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:176:18)
`
If you 're still having this issue, replace this:
var insertQuery = "INSERT INTO users ( useremail, password ,passwordHint,hintanswer) values (?,?,?,?)";
connection.query(insertQuery,[newUserMysql.useremail, newUserMysql.password,newUserMysql.passwordHint,newUserMysql.hintanswer],function(err, rows) {
newUserMysql.id = rows.insertId;
return done(null, newUserMysql);
});
with this:
var insertQuery = "INSERT INTO users SET ?";
connection.query(insertQuery, newUserMysql, function(err, rows) {
if(err) {
console.log(err);
return done(null, err);
else{
newUserMysql.id = rows.insertId;
return done(null, newUserMysql);
}
});
It did the job for me.