limited odbc connections in sql server 2008 import wizard - sql-server-2008

Issue: pervasive odbc driver ((called: "Pervasive ODBC engine interface") is visible in ODBC(odbcad32.exe). However, the same odbc driver is not visible in SQL server 2008 import wizard, although I can see the same odbc driver in SQL server 2000 import wizard.
I am using 32-bit win 7 OS with SQL server 2008, SQL server 2000 and pervasive SQL v11. any solution will be very helpful...Many Thanks!

I could never figure out how to do make the 'Import/Export' wizard work in Sql Server Management Studio. I even tried to modify the 'ProviderResources.xml' file as I saw in another response.
I was attempting to migrate Sage Timberline Office data which uses a proprietary 'Timberline Data' ODBC driver. That driver is missing the 'ORDINAL_POSITION' column when you call the 'GetSchema' function in .NET. So 'Import/Export' in Sql Server Management Studio fails.
I ended up having to write my own app to copy the data over to SQL server. The only downside is it doesn't know about primary keys, indexes, or other constraints. Nonetheless, I get the data in MSSQL so I am happy.
I am sure this code will be useful to others, so here you go.
Program.cs
using System;
using System.Data.Odbc;
using System.Data.SqlClient;
using System.Data;
using System.Collections.Generic;
using System.Diagnostics;
namespace TimberlineOdbcSync
{
class Program
{
static string currentTableName;
const string sourceOdbcDriver = "{Timberline Data}";
const string sourceOdbcDsn = "timberline data source";
const string sourceOdbcUid = "user1";
const string sourceOdbcPwd = "user1a";
const string destSqlServer = "SERVER5";
const string destSqlDatabase = "TSData";
const string destSqlUsername = "";
const string destSqlPassword = "";
const string destSqlOwner = "dbo";
public static void Main(string[] args)
{
DateTime allStartDate = DateTime.Now;
DateTime allEndDate;
DateTime tableStartDate = DateTime.Now;
DateTime tableEndDate;
TimeSpan diff;
string errMsg;
int pCount; //pervasive record count
int sCount; //sql server record count
string sourceOdbcConnString =
"Dsn=" + sourceOdbcDsn + ";" +
"Driver="+ sourceOdbcDriver +";" +
(!string.IsNullOrEmpty(sourceOdbcUid) ? "uid="+ sourceOdbcUid +";" : "") +
(!string.IsNullOrEmpty(sourceOdbcUid) ? "pwd="+ sourceOdbcPwd +";" : "");
string destSqlConnString =
"Server=" + destSqlServer + ";" +
"Database=" + destSqlDatabase+ ";" +
(!string.IsNullOrEmpty(destSqlUsername) && !string.IsNullOrEmpty(destSqlPassword) ?
"User Id=" + destSqlUsername + ";" +
"Password=" + destSqlPassword + ";"
:
"Trusted_Connection=true;");
try{
using(OdbcConnection pConn = new OdbcConnection(sourceOdbcConnString)){
pConn.Open();
List<string> tables = new List<string>();
//get a list of all tables
using(DataTable tableschema = pConn.GetSchema("TABLES"))
foreach(DataRow row in tableschema.Rows)
tables.Add(row["TABLE_NAME"].ToString());
foreach(string tableName in tables){
//set the current table name
currentTableName = tableName;
try{
//get the schema info for the table (from pervasive)
DataTable dtSchema = pConn.GetSchema("Columns", new string[]{null, null, tableName});
//if we could not get the schema
if(dtSchema == null || dtSchema.Rows.Count <= 0){
pConn.Close();
errMsg = "Error: Could not get column information for table " + tableName;
Trace.WriteLine(errMsg);
WriteErrorEvent(errMsg);
return;
}
//emit the table name
Trace.Write("[" + tableName + "]");
//get the number of records in this table
pCount = TableCount(tableName, pConn);
//emit the number of records in this table
Trace.Write(" = P:" + pCount);
//create a data reader to read the pervasive data
string sql = "select * from \""+ tableName + "\"";
OdbcCommand cmd = new OdbcCommand(sql, pConn);
OdbcDataReader dr = cmd.ExecuteReader();
//create a connection to SQL Server
using (SqlConnection sConn = new SqlConnection(destSqlConnString)){
//open the connection
sConn.Open();
//if the table already exists
if(TableExists(tableName, sConn)){
//get the record count for this table
sCount = TableCount(tableName, sConn);
} else {
//set the record count to zero
sCount = 0;
}
//output the record count
Trace.Write(", S: " + sCount);
//if the record counts match
if( pCount == sCount ){
//output an indicator that we are skipping this table
Trace.WriteLine(" -- Skipping");
//skip this table and go to the next
continue;
}
//output a blank line
Trace.WriteLine("");
//create the table in SQL Server using the schema info from Pervasive
CreateTableInDatabase(dtSchema, destSqlOwner, tableName, sConn);
// Copies all rows to the database from the data reader.
using (SqlBulkCopy bc = new SqlBulkCopy(sConn))
{
// Destination table with owner -
// this example does not check the owner names! It uses dbo exclusively.
bc.DestinationTableName = "[" + destSqlOwner + "].[" + tableName + "]";
bc.BulkCopyTimeout = 30;
bc.BatchSize = 3000;
bc.BulkCopyTimeout = 12000;
// User notification with the SqlRowsCopied event
bc.NotifyAfter = 1000;
bc.SqlRowsCopied += new SqlRowsCopiedEventHandler(OnSqlRowsCopied);
//output the date and time so we know when we started
tableStartDate = DateTime.Now;
Trace.WriteLine("Copying " + pCount + " records to " + destSqlServer + " - " + tableStartDate.ToString("g"));
// Starts the bulk copy.
bc.WriteToServer(dr);
tableEndDate = DateTime.Now;
diff = tableEndDate - tableStartDate;
Trace.WriteLine(String.Format(
"Completed {4} at {0}\r\nDuration: {1}:{2}:{3}",
tableEndDate.ToString("g"),
diff.Hours.ToString(), diff.Minutes.ToString(), diff.Seconds.ToString(),
tableName));
// Closes the SqlBulkCopy instance
bc.Close();
}
dr.Close();
}
}catch(Exception ex){
errMsg = "Error: " + ex.Message + Environment.NewLine +
"Stack: " + ex.StackTrace + Environment.NewLine;
Trace.WriteLine(errMsg);
WriteErrorEvent(errMsg);
if( !ReadBool("Do you want to continue? [y/n]") ){
break;
}
}//end try
}//end for
}//end using
allEndDate = DateTime.Now;
diff = allEndDate - allStartDate;
Trace.WriteLine(
"Bulk copy operation complete" + Environment.NewLine +
"Started: " + allStartDate.ToString("g") + Environment.NewLine +
"Current: " + allEndDate.ToString("g") + Environment.NewLine +
String.Format("Duration: {0}:{1}:{2}",
diff.Hours.ToString(),
diff.Minutes.ToString(),
diff.Seconds.ToString()));
}catch(Exception ex){
errMsg =
"Error: " + ex.Message + Environment.NewLine +
"Stack: " + ex.StackTrace;
Trace.WriteLine(errMsg);
WriteErrorEvent(errMsg);
}//end try
Console.Write("Press any key to continue . . . ");
Console.ReadKey(true);
}
static bool TableExists(string tableName, SqlConnection sqlConn){
int retVal = 0;
try{
using(SqlCommand command = sqlConn.CreateCommand()){
command.CommandText = "IF OBJECT_ID('dbo." + tableName + "', 'U') IS NOT NULL SELECT 1 as res ELSE SELECT 0 as res";
retVal = Convert.ToInt32(command.ExecuteScalar());
}
}catch(Exception ex){
string errMsg =
"Error: Could not determine if table " + tableName + " exists."+ Environment.NewLine +
"Reason: " + ex.Message + Environment.NewLine +
"Stack: " + ex.StackTrace;
Trace.WriteLine(errMsg);
WriteErrorEvent(errMsg);
retVal = 0;
}//end try
return (retVal==1);
}
static int TableCount(string tableName, IDbConnection anyConn){
int retVal = 0;
try{
using(IDbCommand command = anyConn.CreateCommand()){
command.CommandText = "SELECT count(*) FROM \"" + tableName + "\"";
retVal = Convert.ToInt32(command.ExecuteScalar());
}
}catch(Exception ex){
string errMsg =
"Error: Could not get table count for " + tableName + "." + Environment.NewLine +
"Reason: " + ex.Message + Environment.NewLine +
"Stack: " + ex.StackTrace;
Trace.WriteLine(errMsg);
WriteErrorEvent(errMsg);
retVal = 0;
}//end try
return (retVal);
}
static bool ReadBool(String question) {
while (true) {
Console.WriteLine(question);
String r = (Console.ReadLine() ?? "").ToLower();
if (r == "y" || r == "yes" || r == "1")
return true;
if (r == "n" || r == "no" || r=="0")
return false;
Console.WriteLine("Please Select a Valid Option!!");
}//end while
}
static void OnSqlRowsCopied(object sender, SqlRowsCopiedEventArgs e) {
Trace.WriteLine(String.Format("-- [{1}] Copied {0} rows.", e.RowsCopied, currentTableName));
}
private static string s(object o){
return (Convert.IsDBNull(o) ? "" : Convert.ToString(o));
}
private static string _drToColSql(DataRow dr){
string colName = s(dr["COLUMN_NAME"]);
string ret = "[" + colName + "] ";
string typeName = ((string)s(dr["TYPE_NAME"])).ToLower();
switch(typeName){
case "char":
ret += "CHAR(" + s(dr["LENGTH"]) + ")";
break;
case "byte":
ret += "CHAR(" + s(dr["PRECISION"]) + ")";
break;
case "text":
ret += "VARCHAR(" + s(dr["PRECISION"]) + ")";
break;
case "date":
ret += "DATE";
break;
case "time":
ret += "TIME(7)";
break;
case "double":
ret += "DECIMAL(16,2)"; // + c(dr["PRECISION"]) + "," + c(dr["LENGTH"]) + ")";
break;
case "usmallint":
case "smallint":
ret += "SMALLINT";
break;
case "utinyint":
case "tinyint":
ret += "TINYINT";
break;
case "identity":
case "integer":
ret += "BIGINT";
break;
case "smallidentity":
case "short":
ret += "INT";
break;
case "longvarchar":
case "memo":
ret += "TEXT";
break;
case "checkbox":
ret += "BIT";
break;
case "real":
ret += "REAL";
break;
default:
//this was an unexpected column, figure out what happened
Trace.WriteLine("ERROR - Column '" + colName + "' Details: ");
Trace.WriteLine("\tCOLUMN_NAME: " + s(dr["COLUMN_NAME"]));
Trace.WriteLine("\tTYPE_NAME: " + s(dr["TYPE_NAME"]));
Trace.WriteLine("\tDATA_TYPE: " + s(dr["DATA_TYPE"]));
Trace.WriteLine("\tLENGTH: " + s(dr["LENGTH"]));
Trace.WriteLine("\tPRECISION: " + s(dr["PRECISION"]));
Trace.WriteLine("\tSCALE: " + s(dr["SCALE"]));
Trace.WriteLine("\tNULLABLE: " + s(dr["NULLABLE"]));
throw new Exception("Unexpected data type: " + typeName);
}
if(s(dr["NULLABLE"])=="1"){
ret += " NULL";
}
return ret;
}
private static bool CreateTableInDatabase(DataTable dtSchemaTable, string tableOwner, string tableName, SqlConnection sqlConn) {
// Generates the create table command.
string ctStr = "CREATE TABLE [" + tableOwner + "].[" + tableName + "](\r\n";
for (int i = 0; i < dtSchemaTable.Rows.Count; i++)
{
ctStr += _drToColSql(dtSchemaTable.Rows[i]);
if (i < dtSchemaTable.Rows.Count)
ctStr += ",";
ctStr += "\r\n";
}
ctStr += ")";
// Emit SQL statement
Trace.WriteLine("-".PadLeft(30, '-'));
Trace.WriteLine(ctStr + Environment.NewLine);
// Runs the SQL command to make the destination table.
using(SqlCommand command = sqlConn.CreateCommand()){
command.CommandText = "IF OBJECT_ID('dbo." + tableName + "', 'U') IS NOT NULL DROP TABLE dbo." + tableName;
command.ExecuteNonQuery();
command.CommandText = ctStr;
command.ExecuteNonQuery();
}
return true;
}
private static bool WriteErrorEvent(string errMsg){
const string sSource = "PervasiveOdbcSync";
const string sLog = "Application";
try{
if (!EventLog.SourceExists(sSource))
EventLog.CreateEventSource(sSource,sLog);
EventLog.WriteEntry(sSource, errMsg);
EventLog.WriteEntry(sSource, errMsg, EventLogEntryType.Error, 128);
return true;
}catch(Exception ex){
Trace.WriteLine("Unable to write error to event log. Reason: " + ex.Message);
return false;
}
}
}
}
You'll want to add a System.Diagnostics.ConsoleTraceListener to your app.config file. That way you can see everything that is being outputted. If you also add a System.Diagnostics.TextWriterTraceListener, you can make the app also output everything to a log file.

On my PSQL v11 box which also has SQL Server 2008 R2 installed, I don't see a "Pervasive ODBC Engine Interface" listed in the "Data Source" dialog of the SQL Server Import and Export Wizard. I do see the "Pervasive PSQL OLEDB Provider" and "Pervasive Provider, release v4.0" (and 3.5 and 3.2). THe Pervasive Provider is an ADO.NET provider. I do see a ".Net Framework Data Provider for ODBC" and if I put a DSN name for a Pervasive DSN (like DEMODATA), it works.

Related

Can't parse JSON returned from MySQL

I have a Node.js app that is writing data to a MySQL backend. One field is an array I stringify. I can see in the workbench the data is correct when in the database. However when I retrieve it I am getting an error when I try to parse it.
"Unexpected token o in JSON at position 1"
If I log the result it shows up as [Object Object].
From what I read online this means it is already a JS object and I do not need to parse it. However I cannot find anything about how to get access to the data.
process: function (bot, msg, suffix) {
var ftcmds = suffix.split(" ", 1);
var ftName = ftcmds[0];
var ftArray;
var selectSQL = "SELECT FireTeam FROM fireteam WHERE Name = '" + ftName + "'";
var updateSQL = "UPDATE fireteam SET FireTeam = '" + ftArray + "'WHERE Name = '" + ftName + "'";
mysqlcon.query(selectSQL, function (err, result) {
console.log("Result |" + result);
console.log("Error |" + err);
if (err) {
console.log("Caught Error " + err + " " + msg.author);
}
else {
console.log("Recovered result " + result);
ftArray = result;
console.log("Attempting to update array");
ftArray.push(msg.author.id);
console.log("updated array " + ftArray);
var jsonArray = JSON.stringify(ftArray);
mysqlcon.query(updateSQL, function (err, result) {
console.log("Result |" + result);
console.log("Error |" + err);
if (err.toString().indexOf(dupErr) != -1) {
msg.author.send("Could not find that fireteam");
console.log("Error: Did not locate the requested name " + msg.author)
} else if (err) {
console.log("Caught Error " + err + " " + msg.author);
}
else {
msg.author.send("You have joined Fireteam " + name + ". I will setup a group chat on " + date + " if your team fills up.");
}
})
}
});
}
You should just be able to access it as an object, so if result has fields name and title you can just access them as:
var name = result.name
var title = result.title

Programmatically add transformation script component to ssis package

Can any one help me please...How do I programmatically add a transformation script component to a dataflow task in an ssis package?
I am crating an ssis package programmatically using the API via C#. It consists of a flat file source and a sql table (oledb destination). I have to add a script component to generate autoincrementing numbers to fill up the rownum column of the table. The rownum column is not an identity column.
CODE GIVEN...
public Microsoft.SqlServer.Dts.Runtime.DTSExecResult CreatePackage(Dictionary<int, int> LenMap)
{
Package package;
try
{
package = new Package();
}
catch (Exception ex)
{
throw new Exception(ex.Message, ex);
}
package.MaximumErrorCount = 4;
package.Name = this.PackageName;
/* Add the Data Flow Task */
package.Executables.Add("STOCK:PipelineTask");
package.DelayValidation = false;
/* Get the task host wrapper, and the Data Flow task*/
TaskHost taskHost = package.Executables[0] as TaskHost;
MainPipe dataFlowTask = (MainPipe)taskHost.InnerObject;
/*===================================================================================
===============================ERROR LOGGING=======================================
===================================================================================*/
package.LoggingMode = DTSLoggingMode.Enabled;
//For package logging connection manager (flat file)
ConnectionManager LogConMgr = package.Connections.Add("FILE");
LogConMgr.ConnectionString = this.LogFilePath;
LogConMgr.Name = "SSISLog.txt";
LogProvider packagelogprovider = package.LogProviders.Add("DTS.LogProviderTextFile.2");
packagelogprovider.ConfigString = LogConMgr.Name;
packagelogprovider.Name = "Text File Log Provider";
packagelogprovider.Description = "Writes log entries for events to a CSV file";
package.LoggingOptions.SelectedLogProviders.Add(packagelogprovider);
LoggingOptions packageLogging = package.LoggingOptions;
packageLogging.EventFilterKind = DTSEventFilterKind.Inclusion;
packageLogging.EventFilter.Initialize();
packageLogging.EventFilter = new string[] { "OnError", "OnWarning", "OnInformation", "OnPreExecute", "OnExecute", "Diagnostic", "PipelineComponentTime", "OnExecStatusChanged", "OnPostExecute", "OnProgress", "OnPreValidate", "OnPostValidate" };
//OnError Event
DTSEventColumnFilter OnErrorColumnFilter = new DTSEventColumnFilter();
OnErrorColumnFilter.Computer = true;
OnErrorColumnFilter.SourceName = true;
OnErrorColumnFilter.MessageText = true;
OnErrorColumnFilter.SourceName = true;
OnErrorColumnFilter.SourceID = true;
OnErrorColumnFilter.DataBytes = true;
packageLogging.SetColumnFilter("OnError", OnErrorColumnFilter);
//OnWarning Event
DTSEventColumnFilter OnWarningColumnFilter = new DTSEventColumnFilter();
OnWarningColumnFilter.Computer = true;
OnWarningColumnFilter.SourceName = true;
OnWarningColumnFilter.MessageText = true;
packageLogging.SetColumnFilter("OnWarning", OnWarningColumnFilter);
/*==================================================================================
================END ERROR LOGGING=================================================
==================================================================================*/
/*=================================================================================
=========================== FOR SOURCE CONNECTION ===============================
=================================================================================*/
/* Add the Flat File connection*/
ConnectionManager connectionManagerFlatFile = package.Connections.Add("FLATFILE");
connectionManagerFlatFile.ConnectionString = this.FlatFileConnString;
connectionManagerFlatFile.Name = "FlatFile";
connectionManagerFlatFile.Properties["Format"].SetValue(connectionManagerFlatFile, this.FlatFileColFormat);
connectionManagerFlatFile.Properties["ColumnNamesInFirstDataRow"].SetValue(connectionManagerFlatFile, this.FirstRowAsCol);
/* Get native flat file connection */
RuntimeWrapper.IDTSConnectionManagerFlatFile100 connectionFlatFile =
connectionManagerFlatFile.InnerObject as RuntimeWrapper.IDTSConnectionManagerFlatFile100;
/* Specify Column delimeter*/
connectionFlatFile.HeaderRowDelimiter = Environment.NewLine;
connectionFlatFile.RowDelimiter = Environment.NewLine;
connectionFlatFile.DataRowsToSkip = 0;
connectionFlatFile.HeaderRowsToSkip = RowsToSkip;
//connectionFlatFile.ColumnNamesInFirstDataRow = this.firstRowAsCol;
/*Determine the number of columns by reading Mapping Information.*/
foreach (DictionaryEntry de in (IDictionary)MapInfo)
{
if (Int32.Parse(de.Key.ToString()) != 0)
{
RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100 flatFileCol =
connectionFlatFile.Columns.Add() as RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100;
//sS_AssignColumnProperties(flatFileCol, parts[col], new string(delimit));
sS_AssignColumnProperties(flatFileCol, de.Value.ToString(), LenMap[Int32.Parse(de.Key.ToString())]);
}
}
//Correct the last Flat File column delimiter, needs to be NewLine not Comma
connectionFlatFile.Columns[connectionFlatFile.Columns.Count - 1].ColumnDelimiter = Environment.NewLine;
// Check if columns generated
if (connectionFlatFile.Columns.Count == 0)
{
throw new ArgumentException(string.Format("No flat file columns have been created. "));
}
// Add Flat File source component
IDTSComponentMetaData100 componentSource = dataFlowTask.ComponentMetaDataCollection.New();
componentSource.Name = "FlatFileSource";
componentSource.ComponentClassID = "DTSAdapter.FlatFileSource.2";
// componentSource.UsesDispositions = true;
// Get source design-time instance, and initialise component
CManagedComponentWrapper instanceSource = componentSource.Instantiate();
instanceSource.ProvideComponentProperties();
// Set source connection
componentSource.RuntimeConnectionCollection[0].ConnectionManagerID = connectionManagerFlatFile.ID;
componentSource.RuntimeConnectionCollection[0].ConnectionManager =
DtsConvert.GetExtendedInterface(connectionManagerFlatFile);
// Reinitialize the metadata,
instanceSource.AcquireConnections(null);
instanceSource.ReinitializeMetaData();
instanceSource.ReleaseConnections();
IDTSOutput100 output = componentSource.OutputCollection[0];
Stack<int> ColumnsToRemove = new Stack<int>();
for (int outCol = 0; outCol < output.OutputColumnCollection.Count; outCol++)
{
IDTSOutputColumn100 column = output.OutputColumnCollection[outCol];
column.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
column.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
}
/*===================================================================================
* ===================================Add Row Number Transformation==================
* ===================================================================================*/
// Create a package variable to store the row count value
package.Variables.Add("RowCountVar", false, "User", 0);
IDTSComponentMetaData100 componentRowNumber = dataFlowTask.ComponentMetaDataCollection.New();
componentRowNumber.Name = "RowNumberTransformation";
componentRowNumber.ComponentClassID = "DTSTransform.RowCount";
CManagedComponentWrapper instanceRowNumber = componentRowNumber.Instantiate();
instanceRowNumber.ProvideComponentProperties();
// Set the variable name property
instanceRowNumber.SetComponentProperty("VariableName", "User::RowCountVar");
// Connect the two components together
IDTSPath100 path = dataFlowTask.PathCollection.New();
path.AttachPathAndPropagateNotifications(componentSource.OutputCollection[0], componentRowNumber.InputCollection[0]);
/*End Row Number Transformation*/
/*=================================================================================
======================== FOR DESTINATION CONNECTION =============================
=================================================================================*/
/* Add the SQL OLE-DB connection*/
ConnectionManager connectionManagerOleDb = package.Connections.Add("OLEDB");
//connectionManagerOleDb.ConnectionString = this.OleDbConnString;
connectionManagerOleDb.ConnectionString = string.Format(
"Provider=SQLOLEDB;Data Source={0};Initial Catalog={1};User ID=truser;Password=truser", this.DBServerName, this.DBName);
connectionManagerOleDb.Name = "OLEDB";
/*=================================================================================
============================Derived Columns======================================
=================================================================================*/
//Derived Column
IDTSComponentMetaData100 derived = dataFlowTask.ComponentMetaDataCollection.New();
derived.Name = "Derived Column Component";
derived.ComponentClassID = "DTSTransform.DerivedColumn";
CManagedComponentWrapper DesignDerivedColumns = derived.Instantiate();
DesignDerivedColumns.ProvideComponentProperties(); //design time
derived.InputCollection[0].ExternalMetadataColumnCollection.IsUsed = false;
derived.InputCollection[0].HasSideEffects = true;
//update the metadata for the derived columns
DesignDerivedColumns.AcquireConnections(null);
DesignDerivedColumns.ReinitializeMetaData();
DesignDerivedColumns.ReleaseConnections();
//Create the path from source to derived component
IDTSPath100 SourceToDerivedPath = dataFlowTask.PathCollection.New();
SourceToDerivedPath.AttachPathAndPropagateNotifications(componentRowNumber.OutputCollection[0], derived.InputCollection[0]);
/*Replace Values of Input to null for empty string*/
// Get the derived's default input and virtual input.
IDTSInput100 input = derived.InputCollection[0];
IDTSVirtualInput100 derivedInputVirtual = input.GetVirtualInput();
IDTSCustomProperty100 property = null;
// Iterate through the virtual input column collection.
foreach (IDTSVirtualInputColumn100 vColumn in derivedInputVirtual.VirtualInputColumnCollection)
{
DesignDerivedColumns.SetUsageType(input.ID, derivedInputVirtual, vColumn.LineageID, DTSUsageType.UT_READWRITE);
}
foreach (IDTSInputColumn100 inputColumn in derived.InputCollection[0].InputColumnCollection)
{
inputColumn.Description = string.Format("Override the orginal column {0} with a null value if the string is empty.", inputColumn.Name);
property = inputColumn.CustomPropertyCollection["Expression"];
property.Name = "Expression";
property.Value = string.Format("(DT_STR,{0},1252)(LEN(TRIM([{1}])) == 0 ? (DT_STR,{0},1252)(NULL(DT_STR,{0},1252)) : TRIM([{1}]))", inputColumn.Length, inputColumn.Name);
property = inputColumn.CustomPropertyCollection["FriendlyExpression"];
property.Name = "FriendlyExpression";
property.Value = string.Format("(DT_STR,{0},1252)(LEN(TRIM([{1}])) == 0 ? (DT_STR,{0},1252)(NULL(DT_STR,{0},1252)) : TRIM([{1}]))", inputColumn.Length, inputColumn.Name);
inputColumn.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
inputColumn.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
}
/*End Replace Values of Input to null for empty string*/
//For Batch Id
IDTSOutputColumn100 myCol = derived.OutputCollection[0].OutputColumnCollection.New();
myCol.Name = "BtchId";
myCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_I4, 0, 0, 0, 0);
myCol.ExternalMetadataColumnID = 0;
myCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
myCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 myProp = myCol.CustomPropertyCollection.New();
myProp.Name = "Expression";
myProp.Value = "\"" + this.BatchId.ToString() + "\"";
myProp = myCol.CustomPropertyCollection.New();
myProp.Name = "FriendlyExpression";
myProp.Value = "\"" + this.BatchId.ToString() + "\"";
// For FileNm
IDTSOutputColumn100 fleNmCol = derived.OutputCollection[0].OutputColumnCollection.New();
fleNmCol.Name = "CSVFileNm";
fleNmCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_STR, 1100, 0, 0, 1252);
fleNmCol.ExternalMetadataColumnID = 0;
fleNmCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
fleNmCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 fleNmProp = fleNmCol.CustomPropertyCollection.New();
fleNmProp.Name = "Expression";
fleNmProp.Value = " \" " + this.FileName.Replace("\\", "\\\\") + " \"";
fleNmProp = fleNmCol.CustomPropertyCollection.New();
fleNmProp.Name = "FriendlyExpression";
fleNmProp.Value = " \" " + "csvFile.csv" + " \"";
// For FileDate
IDTSOutputColumn100 fleDtCol = derived.OutputCollection[0].OutputColumnCollection.New();
fleDtCol.Name = "FileDt";
fleDtCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_STR, 50, 0, 0, 1252);
fleDtCol.ExternalMetadataColumnID = 0;
fleDtCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
fleDtCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 fleDtProp = fleDtCol.CustomPropertyCollection.New();
fleDtProp.Name = "Expression";
fleDtProp.Value = "\"" + this.FileDate + "\"";
fleDtProp = fleDtCol.CustomPropertyCollection.New();
fleDtProp.Name = "FriendlyExpression";
fleDtProp.Value = "\"" + this.FileDate + "\"";
package.Variables.Add("RowNumber", false, "User", 1);
// For RowNum
IDTSOutputColumn100 RowNumCol = derived.OutputCollection[0].OutputColumnCollection.New();
RowNumCol.Name = "RowNum";
RowNumCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_STR, 3, 0, 0, 1252);
RowNumCol.ExternalMetadataColumnID = 0;
RowNumCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
RowNumCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 RowNumColProp = RowNumCol.CustomPropertyCollection.New();
RowNumColProp.Name = "Expression";
RowNumColProp.Value = "DATEPART(\"Ms\",GetDate())";
RowNumColProp = RowNumCol.CustomPropertyCollection.New();
RowNumColProp.Name = "FriendlyExpression";
RowNumColProp.Value = "DATEPART(\"Ms\",GetDate())";
/*Script Component*/
IDTSComponentMetaData100 scriptPropType = dataFlowTask.ComponentMetaDataCollection.New();
scriptPropType.Name = "Transform Property Type";
scriptPropType.ComponentClassID = "DTSTransform.ScriptComponent";
scriptPropType.Description = "Transform Property Type";
CManagedComponentWrapper instance2 = scriptPropType.Instantiate();
instance2.ProvideComponentProperties();
string[] scriptValue = new String[4];
scriptValue[0] = "dts://Scripts/" + "ScriptComponent_9ddf9eba6f5b488f9710b95e4d7e7c74" + "/" + "ScriptComponent_9ddf9eba6f5b488f9710b95e4d7e7c74" + ".vsaproj";
scriptValue[1] = Resources.ProjectFile;//????????
scriptValue[2] = "dts://Scripts/" + "ScriptComponent_9ddf9eba6f5b488f9710b95e4d7e7c74" + "/ScriptMain.vsaitem";
scriptValue[3] = "/* Microsoft SQL Server Integration Services user script component\r\n"
+ "* This is your new script component in Microsoft Visual Basic .NET \r\n"
+ "* ScriptMain is the entrypoint class for script components*/\r\n"
+ "\r\n"
+ "using System \r\n"
+ "using System.Data \r\n"
+ "using System.Math \r\n"
+ "using Microsoft.SqlServer.Dts.Pipeline.Wrapper \r\n"
+ "using Microsoft.SqlServer.Dts.Runtime.Wrapper \r\n"
+ " \r\n"
+ "public class ScriptMain:UserComponent \r\n"
+ "{ \r\n"
+ " int i;\r\n"
+ "public override void PreExecute(){\r\n"
+ "base.PreExecute();\r\n"
+ " i=0;\r\n"
+ "}\r\n"
+ "public override void postExecute(){\r\n"
+ "base.postExecute();\r\n"
+ "}\r\n"
+ " public override void Input0_ProcessInputRow(Input0Buffer Row){ \r\n"
+ " ++i;\r\n"
+ " Row.RowNum = i.ToString(); \r\n"
+ " } \r\n"
+ "} \r\n";
IDTSDesigntimeComponent100 scriptComponentDesignTime = instance2 as IDTSDesigntimeComponent100;
scriptComponentDesignTime.SetComponentProperty("SourceCode", scriptValue);
scriptComponentDesignTime.SetComponentProperty("PreCompile", false);
/*End Script Component
/*=================================================================================
======================== FOR DESTINATION ========================================
=================================================================================*/
/*Add OLE-DB destination*/
IDTSComponentMetaData100 componentDestination = dataFlowTask.ComponentMetaDataCollection.New();
componentDestination.Name = "OLEDBDestination";
componentDestination.ComponentClassID = "DTSAdapter.OLEDBDestination.2";
/* Get destination design-time instance, and initialise component*/
CManagedComponentWrapper instanceDestination = componentDestination.Instantiate();
instanceDestination.ProvideComponentProperties();
/* Set destination connection*/
componentDestination.RuntimeConnectionCollection[0].ConnectionManagerID = connectionManagerOleDb.ID;
componentDestination.RuntimeConnectionCollection[0].ConnectionManager =
DtsConvert.GetExtendedInterface(connectionManagerOleDb);
/* Set destination table name*/
instanceDestination.SetComponentProperty("OpenRowset", this.TargetTable);
instanceDestination.SetComponentProperty("AccessMode", 0);
//instanceDestination.SetComponentProperty("ProtectionLevel", "SaveSensitive");
instanceDestination.SetComponentProperty("DefaultCodePage", 1252);
/* Set Data Flow direction*/
//Create the path from derived to desitination
IDTSPath100 DerivedToDestinationPath = dataFlowTask.PathCollection.New();
DerivedToDestinationPath.AttachPathAndPropagateNotifications(derived.OutputCollection[0], componentDestination.InputCollection[0]);
/* Get input and virtual input for destination to select and map columns*/
IDTSInput100 destinationInput = componentDestination.InputCollection[0];
IDTSVirtualInput100 destinationVirtualInput = destinationInput.GetVirtualInput();
IDTSVirtualInputColumnCollection100 destinationVirtualInputColumns = destinationVirtualInput.VirtualInputColumnCollection;
//componentDestination.ValidateExternalMetadata = false;
/* Reinitialize the metadata, generating exernal columns from flat file columns
If errors are raised here, it is most likely because the flat file connection columns
are wrong, which itself is probably because the template table does not match the file.*/
instanceDestination.AcquireConnections(null);
instanceDestination.ReinitializeMetaData();
instanceDestination.ReleaseConnections();
/*=================================================================================
======================== MAP DESTINATION ========================================
=================================================================================*/
/*Select and map destination columns*/
foreach (IDTSVirtualInputColumn100 virtualInputColumn in destinationVirtualInputColumns)
{
// Select column, and retain new input column
IDTSInputColumn100 inputColumn = instanceDestination.SetUsageType(destinationInput.ID,
destinationVirtualInput, virtualInputColumn.LineageID, DTSUsageType.UT_READWRITE);
// Find external column by name
IDTSExternalMetadataColumn100 externalColumn =
destinationInput.ExternalMetadataColumnCollection[inputColumn.Name];
// Map input column to external column
instanceDestination.MapInputColumn(destinationInput.ID, inputColumn.ID, externalColumn.ID);
}
new Application().SaveToXml("E:\\TrustPortal-All\\SourceCode\\TrustSolution\\Web\\TrustSolution\\TrustRite\\OtherActivity\\Import\\Temp\\test.dtsx", package, null);
Microsoft.SqlServer.Dts.Runtime.DTSExecResult results = package.Execute();
this.RowCount = GetRowCount(package);
string message = "";
if (results == Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Failure)
{
foreach (Microsoft.SqlServer.Dts.Runtime.DtsError local_DtsError in package.Errors)
{
message += local_DtsError.Description.ToString();
}
}
if (results == Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Success)
{
message = "Package Executed Successfully....";
}
package.Dispose();
return results;
}
}
private void sS_AssignColumnProperties(RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100 flatFileCol, string getColName, int colLength)
{
//Assign delimiter:
flatFileCol.ColumnType = this.FlatFileColFormat;
//Indicate column data type – in this case, all the source columns will be set to String Data Type:
flatFileCol.DataType = RuntimeWrapper.DataType.DT_STR;
flatFileCol.MaximumWidth = colLength;
//Indicate column width – in this case, width of all source columns will be set to a length of 50:
flatFileCol.ColumnWidth = colLength; //flatFileColWidth;
flatFileCol.DataPrecision = 0;
flatFileCol.DataScale = 0;
//Assign column name:
RuntimeWrapper.IDTSName100 columnName = flatFileCol as RuntimeWrapper.IDTSName100;
columnName.Name = getColName;
}

civic address in wp8

I am working on a wp8 project and need to find the location . I've used windows.device.geoloaction name space to find the lattitude and longitude now I need to find the address(country state and zip). I found this example but I am confused how to pass the coordinates that I obtained . Here is my code.
public async void FindTADeviceLocation()
{
////Declare Geolocator object
Geolocator geolocator = new Geolocator();
// Set user's accuracy
geolocator.DesiredAccuracy = PositionAccuracy.High;
//get the position of the user.
try
{
//The await guarantee the calls to be returned on the thread from which they were called
Geoposition geoposition = await geolocator.GetGeopositionAsync(
maximumAge: TimeSpan.FromMinutes(1),
timeout: TimeSpan.FromSeconds(10)
);
var geoQ = new ReverseGeocodeQuery();
geoQ.QueryCompleted += geoQ_QueryCompleted;
if (geoQ.IsBusy == true)
{
geoQ.CancelAsync();
}
// Set the geo coordinate for the query
geoQ.GeoCoordinate = geoposition.Coordinate;
geoQ.QueryAsync();
}
catch (Exception ex)
{
if ((uint)ex.HResult == 0x80004004)
{
MessageBox.Show("position is unknown");
}
}
}
void geoQ_QueryCompleted(object sender, QueryCompletedEventArgs<IList<MapLocation>> e)
{
if (e.Result.Count() > 0)
{
string showString = e.Result[0].Information.Name;
showString = showString + "\nAddress: ";
showString = showString + "\n" + e.Result[0].Information.Address.PostalCode + " " + e.Result[0].Information.Address.City;
showString = showString + "\n" + e.Result[0].Information.Address.Country + " " + e.Result[0].Information.Address.CountryCode;
showString = showString + "\nDescription: ";
showString = showString + "\n" + e.Result[0].Information.Description.ToString();
MessageBox.Show(showString);
}
}
I know the problem is in the line geoQ.GeoCoordinate = geoposition.Coordinate;
But how can I pass the coordinates to geoQ.GeoCoordinate?
Thanks in adwance
This is done. The geocordinate takes arguments of the type double. so all we've to do is to convert the cordiantes into double and pass it.
var currentLocationLatitude = Convert.ToDouble(geoposition.Coordinate.Latitude.ToString("0.0000000000000"));
var currentLocationLongitude = Convert.ToDouble(geoposition.Coordinate.Longitude.ToString("0.0000000000000"));
var geoQ = new ReverseGeocodeQuery();
geoQ.QueryCompleted += geoQ_QueryCompleted;
if (geoQ.IsBusy == true)
{
geoQ.CancelAsync();
}
// Set the geo coordinate for the query
geoQ.GeoCoordinate = new GeoCoordinate(currentLocationLatitude, currentLocationLongitude);
geoQ.QueryAsync();
Thanks

Cannot implicitly convert type 'System.Data.DataSet' to 'System.Collections.Generic.List<CalendarEvent>''

I am new to application block.
I am trying to get data from database. Following is the code snap.
JsonResponse.ashx:
public void ProcessRequest(HttpContext context)
{
HttpContext _context = HttpContext.Current;
context.Response.ContentType = "application/json";
int user_id = Convert.ToInt32(HttpContext.Current.Session["userid"]);
DateTime start = new DateTime(1970, 1, 1);
DateTime end = new DateTime(1970, 1, 1);
start = start.AddSeconds(double.Parse(context.Request.QueryString["start"]));
end = end.AddSeconds(double.Parse(context.Request.QueryString["end"]));
String result = String.Empty;
result += "[";
List<int> idList = new List<int>();
foreach (CalendarEvent cevent in EventDAO.getEvents(start, end, user_id))
{
result += convertCalendarEventIntoString(cevent);
idList.Add(cevent.id);
}
if (result.EndsWith(","))
{
result = result.Substring(0, result.Length - 1);
}
result += "]";
//store list of event ids in Session, so that it can be accessed in web methods
context.Session["idList"] = idList;
context.Response.Write(result);
}
private String convertCalendarEventIntoString(CalendarEvent cevent)
{
String allDay = "true";
if (ConvertToTimestamp(cevent.start).ToString().Equals(ConvertToTimestamp(cevent.end).ToString()))
{
if (cevent.start.Hour == 0 && cevent.start.Minute == 0 && cevent.start.Second == 0)
{
allDay = "true";
}
else
{
allDay = "false";
}
}
else
{
if (cevent.start.Hour == 0 && cevent.start.Minute == 0 && cevent.start.Second == 0
&& cevent.end.Hour == 0 && cevent.end.Minute == 0 && cevent.end.Second == 0)
{
allDay = "true";
}
else
{
allDay = "false";
}
}
return "{" +
"id: '" + cevent.id + "'," +
"title: '" + HttpContext.Current.Server.HtmlEncode(cevent.title) + "'," +
"start: " + ConvertToTimestamp(cevent.start).ToString() + "," +
"end: " + ConvertToTimestamp(cevent.end).ToString() + "," +
"allDay:" + allDay + "," +
"user_id:" + cevent.user_id + "," +
"description: '" + HttpContext.Current.Server.HtmlEncode(cevent.description) + "'" +
"},";
}
DA:
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
return SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
}
sqlhelper:
public static DataSet ExecuteDataset(SqlConnection connection, CommandType commandType, string commandText, params SqlParameter[] commandParameters)
{
//create a command and prepare it for execution
SqlCommand cmd = new SqlCommand();
cmd.CommandTimeout = 120;
PrepareCommand(cmd, connection, (SqlTransaction)null, commandType, commandText, commandParameters);
//create the DataAdapter & DataSet
SqlDataAdapter da = new SqlDataAdapter(cmd);
DataSet ds = new DataSet();
//fill the DataSet using default values for DataTable names, etc.
da.Fill(ds);
// detach the SqlParameters from the command object, so they can be used again.
cmd.Parameters.Clear();
//return the dataset
return ds;
}
I am getting error:
Cannot implicitly convert type 'System.Data.DataSet' to 'System.Collections.Generic.List'.
I am unable to understand what is the problem.
In getEvents method, you need to iterate through the records in the dataset and fill in the list that you would return in this method.
var dataset = SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
foreach (var row in ds.Tables["FooTable"].Rows)
{
events.Add(new CalendarEvent(...));
}
return events;
That's because you try to return a dataset as List, which it isn't.
You need to convert the dataset to a list. A possible solution would be to change the getEvents method to something like this ->
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
var ds = SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
return ds.Tables[0].AsEnumerable().Select(datarow => new CalendarEvent{ Title = datarow.Field<string>("Title), /*the rest of your params*/}).ToList();
}
Your problem is this piece of code:
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
return SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
}
You defined the type of this method as List<CalenderEvent> but you return a DataSet.
I do not know which datatables are contained in your dataset, but I assume there is one which represents your calenderevents.
This means you need to extract the data you want from your dataset and make a list out of it. Assuming there is one table in your dataset your new method would look something like this:
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
var data = SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
events = ds.Tables[0].AsEnumerable().Select(r => new CalenderEvent
{
//using dummy properties because I dont know
//your class
Property1 = r.Field<string>("Column1"),
Property2 = r.Field<string>("column2"),
//...
}).ToList();
return events;
}

How to append results in Processing?

I have implemented the Table() function in order to save the results generated by the application. However, it seems that the Timer function in the application causes the application to write over the existing CSV file each time it runs. Rather than write over the existing CSV file, I would like to append the newest search results to the existing CSV file. Is there a way to do this? Is it easier to append the results if the results are stored in a different format such as JSON?
Timer timer;
import java.util.List;
Table table;
long lastID = Long.MAX_VALUE;
void setup() {
timer = new Timer(30000);
timer.start();
goTwitter();
table = new Table();
table.addColumn("id");
table.addColumn("latitude");
table.addColumn("longitude");
}
void draw(){
if (timer.isFinished()){
goTwitter();
timer.start();
}
}
void goTwitter(){
ConfigurationBuilder cb = new ConfigurationBuilder();
cb.setOAuthConsumerKey("");
cb.setOAuthConsumerSecret("");
cb.setOAuthAccessToken("");
cb.setOAuthAccessTokenSecret("");
Twitter twitter = new TwitterFactory(cb.build()).getInstance();
Query query = new Query("#love");
int numberOfTweets = 300;
ArrayList<Status> tweets = new ArrayList<Status>();
while (tweets.size () < numberOfTweets) {
if (numberOfTweets - tweets.size() > 100)
query.setCount(100);
else
query.setCount(numberOfTweets - tweets.size());
//long lastID = Long.MAX_VALUE;
try {
QueryResult result = twitter.search(query);
tweets.addAll(result.getTweets());
println("Gathered " + tweets.size() + " tweets");
for (Status t: tweets)
if(t.getId() < lastID) lastID = t.getId();
}
catch (TwitterException te) {
println("Couldn't connect: " + te);
};
query.setSinceId(lastID);
}
for (int i = 0; i < tweets.size(); i++) {
Status t = (Status) tweets.get(i);
GeoLocation loc = t.getGeoLocation();
String user = t.getUser().getScreenName();
String msg = t.getText();
String time = "";
if (loc!=null) {
Double lat = t.getGeoLocation().getLatitude();
Double lon = t.getGeoLocation().getLongitude();
println(i + " USER: " + user + " wrote: " + msg + " located at " + lat + ", " + lon);
TableRow newRow = table.addRow();
newRow.setString("id", user);
newRow.setDouble("latitude", lat);
newRow.setDouble("longitude", lon);
saveTable(table, "data2/syria_16500_5.csv");
}
}
println("lastID= " + lastID);
}
class Timer {
int savedTime;
int totalTime;
Timer (int tempTotalTime) {
totalTime = tempTotalTime;
}
void start(){
savedTime = millis();
}
boolean isFinished() {
int passedTime = millis() - savedTime;
if (passedTime > totalTime){
return true;
} else {
return false;
}
}
}
Well, there does not seem to be a direct implementation to append to a table, so you'll have to resort to a hack: load the table in processing, write to it and resave it, sort of like this:
processing.data.Table table;
void setup() {
File f = new File(sketchPath("") + "data2/syria_16500_5.csv");
println(f.getAbsolutePath());
if (!f.exists()) {
table = new processing.data.Table();
table.addColumn("id");
table.addColumn("latitude");
table.addColumn("longitude");
}
else
table = loadTable("data2/syria_16500_5.csv", "header, csv");
TableRow newRow = table.addRow();
newRow.setString("id", "asad");
newRow.setDouble("latitude", 234);
newRow.setDouble("longitude", 2523);
saveTable(table, "data2/syria_16500_5.csv");
}
The sketch first checks if the file exists. If it does not, it creates a new table, otherwise it loads the old table in with its header.
Be warned, this is not particularly safe... If you change your columns (say, in a text editor) and try to run the sketch again you will get an exception.