Programmatically add transformation script component to ssis package - ssis

Can any one help me please...How do I programmatically add a transformation script component to a dataflow task in an ssis package?
I am crating an ssis package programmatically using the API via C#. It consists of a flat file source and a sql table (oledb destination). I have to add a script component to generate autoincrementing numbers to fill up the rownum column of the table. The rownum column is not an identity column.
CODE GIVEN...
public Microsoft.SqlServer.Dts.Runtime.DTSExecResult CreatePackage(Dictionary<int, int> LenMap)
{
Package package;
try
{
package = new Package();
}
catch (Exception ex)
{
throw new Exception(ex.Message, ex);
}
package.MaximumErrorCount = 4;
package.Name = this.PackageName;
/* Add the Data Flow Task */
package.Executables.Add("STOCK:PipelineTask");
package.DelayValidation = false;
/* Get the task host wrapper, and the Data Flow task*/
TaskHost taskHost = package.Executables[0] as TaskHost;
MainPipe dataFlowTask = (MainPipe)taskHost.InnerObject;
/*===================================================================================
===============================ERROR LOGGING=======================================
===================================================================================*/
package.LoggingMode = DTSLoggingMode.Enabled;
//For package logging connection manager (flat file)
ConnectionManager LogConMgr = package.Connections.Add("FILE");
LogConMgr.ConnectionString = this.LogFilePath;
LogConMgr.Name = "SSISLog.txt";
LogProvider packagelogprovider = package.LogProviders.Add("DTS.LogProviderTextFile.2");
packagelogprovider.ConfigString = LogConMgr.Name;
packagelogprovider.Name = "Text File Log Provider";
packagelogprovider.Description = "Writes log entries for events to a CSV file";
package.LoggingOptions.SelectedLogProviders.Add(packagelogprovider);
LoggingOptions packageLogging = package.LoggingOptions;
packageLogging.EventFilterKind = DTSEventFilterKind.Inclusion;
packageLogging.EventFilter.Initialize();
packageLogging.EventFilter = new string[] { "OnError", "OnWarning", "OnInformation", "OnPreExecute", "OnExecute", "Diagnostic", "PipelineComponentTime", "OnExecStatusChanged", "OnPostExecute", "OnProgress", "OnPreValidate", "OnPostValidate" };
//OnError Event
DTSEventColumnFilter OnErrorColumnFilter = new DTSEventColumnFilter();
OnErrorColumnFilter.Computer = true;
OnErrorColumnFilter.SourceName = true;
OnErrorColumnFilter.MessageText = true;
OnErrorColumnFilter.SourceName = true;
OnErrorColumnFilter.SourceID = true;
OnErrorColumnFilter.DataBytes = true;
packageLogging.SetColumnFilter("OnError", OnErrorColumnFilter);
//OnWarning Event
DTSEventColumnFilter OnWarningColumnFilter = new DTSEventColumnFilter();
OnWarningColumnFilter.Computer = true;
OnWarningColumnFilter.SourceName = true;
OnWarningColumnFilter.MessageText = true;
packageLogging.SetColumnFilter("OnWarning", OnWarningColumnFilter);
/*==================================================================================
================END ERROR LOGGING=================================================
==================================================================================*/
/*=================================================================================
=========================== FOR SOURCE CONNECTION ===============================
=================================================================================*/
/* Add the Flat File connection*/
ConnectionManager connectionManagerFlatFile = package.Connections.Add("FLATFILE");
connectionManagerFlatFile.ConnectionString = this.FlatFileConnString;
connectionManagerFlatFile.Name = "FlatFile";
connectionManagerFlatFile.Properties["Format"].SetValue(connectionManagerFlatFile, this.FlatFileColFormat);
connectionManagerFlatFile.Properties["ColumnNamesInFirstDataRow"].SetValue(connectionManagerFlatFile, this.FirstRowAsCol);
/* Get native flat file connection */
RuntimeWrapper.IDTSConnectionManagerFlatFile100 connectionFlatFile =
connectionManagerFlatFile.InnerObject as RuntimeWrapper.IDTSConnectionManagerFlatFile100;
/* Specify Column delimeter*/
connectionFlatFile.HeaderRowDelimiter = Environment.NewLine;
connectionFlatFile.RowDelimiter = Environment.NewLine;
connectionFlatFile.DataRowsToSkip = 0;
connectionFlatFile.HeaderRowsToSkip = RowsToSkip;
//connectionFlatFile.ColumnNamesInFirstDataRow = this.firstRowAsCol;
/*Determine the number of columns by reading Mapping Information.*/
foreach (DictionaryEntry de in (IDictionary)MapInfo)
{
if (Int32.Parse(de.Key.ToString()) != 0)
{
RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100 flatFileCol =
connectionFlatFile.Columns.Add() as RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100;
//sS_AssignColumnProperties(flatFileCol, parts[col], new string(delimit));
sS_AssignColumnProperties(flatFileCol, de.Value.ToString(), LenMap[Int32.Parse(de.Key.ToString())]);
}
}
//Correct the last Flat File column delimiter, needs to be NewLine not Comma
connectionFlatFile.Columns[connectionFlatFile.Columns.Count - 1].ColumnDelimiter = Environment.NewLine;
// Check if columns generated
if (connectionFlatFile.Columns.Count == 0)
{
throw new ArgumentException(string.Format("No flat file columns have been created. "));
}
// Add Flat File source component
IDTSComponentMetaData100 componentSource = dataFlowTask.ComponentMetaDataCollection.New();
componentSource.Name = "FlatFileSource";
componentSource.ComponentClassID = "DTSAdapter.FlatFileSource.2";
// componentSource.UsesDispositions = true;
// Get source design-time instance, and initialise component
CManagedComponentWrapper instanceSource = componentSource.Instantiate();
instanceSource.ProvideComponentProperties();
// Set source connection
componentSource.RuntimeConnectionCollection[0].ConnectionManagerID = connectionManagerFlatFile.ID;
componentSource.RuntimeConnectionCollection[0].ConnectionManager =
DtsConvert.GetExtendedInterface(connectionManagerFlatFile);
// Reinitialize the metadata,
instanceSource.AcquireConnections(null);
instanceSource.ReinitializeMetaData();
instanceSource.ReleaseConnections();
IDTSOutput100 output = componentSource.OutputCollection[0];
Stack<int> ColumnsToRemove = new Stack<int>();
for (int outCol = 0; outCol < output.OutputColumnCollection.Count; outCol++)
{
IDTSOutputColumn100 column = output.OutputColumnCollection[outCol];
column.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
column.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
}
/*===================================================================================
* ===================================Add Row Number Transformation==================
* ===================================================================================*/
// Create a package variable to store the row count value
package.Variables.Add("RowCountVar", false, "User", 0);
IDTSComponentMetaData100 componentRowNumber = dataFlowTask.ComponentMetaDataCollection.New();
componentRowNumber.Name = "RowNumberTransformation";
componentRowNumber.ComponentClassID = "DTSTransform.RowCount";
CManagedComponentWrapper instanceRowNumber = componentRowNumber.Instantiate();
instanceRowNumber.ProvideComponentProperties();
// Set the variable name property
instanceRowNumber.SetComponentProperty("VariableName", "User::RowCountVar");
// Connect the two components together
IDTSPath100 path = dataFlowTask.PathCollection.New();
path.AttachPathAndPropagateNotifications(componentSource.OutputCollection[0], componentRowNumber.InputCollection[0]);
/*End Row Number Transformation*/
/*=================================================================================
======================== FOR DESTINATION CONNECTION =============================
=================================================================================*/
/* Add the SQL OLE-DB connection*/
ConnectionManager connectionManagerOleDb = package.Connections.Add("OLEDB");
//connectionManagerOleDb.ConnectionString = this.OleDbConnString;
connectionManagerOleDb.ConnectionString = string.Format(
"Provider=SQLOLEDB;Data Source={0};Initial Catalog={1};User ID=truser;Password=truser", this.DBServerName, this.DBName);
connectionManagerOleDb.Name = "OLEDB";
/*=================================================================================
============================Derived Columns======================================
=================================================================================*/
//Derived Column
IDTSComponentMetaData100 derived = dataFlowTask.ComponentMetaDataCollection.New();
derived.Name = "Derived Column Component";
derived.ComponentClassID = "DTSTransform.DerivedColumn";
CManagedComponentWrapper DesignDerivedColumns = derived.Instantiate();
DesignDerivedColumns.ProvideComponentProperties(); //design time
derived.InputCollection[0].ExternalMetadataColumnCollection.IsUsed = false;
derived.InputCollection[0].HasSideEffects = true;
//update the metadata for the derived columns
DesignDerivedColumns.AcquireConnections(null);
DesignDerivedColumns.ReinitializeMetaData();
DesignDerivedColumns.ReleaseConnections();
//Create the path from source to derived component
IDTSPath100 SourceToDerivedPath = dataFlowTask.PathCollection.New();
SourceToDerivedPath.AttachPathAndPropagateNotifications(componentRowNumber.OutputCollection[0], derived.InputCollection[0]);
/*Replace Values of Input to null for empty string*/
// Get the derived's default input and virtual input.
IDTSInput100 input = derived.InputCollection[0];
IDTSVirtualInput100 derivedInputVirtual = input.GetVirtualInput();
IDTSCustomProperty100 property = null;
// Iterate through the virtual input column collection.
foreach (IDTSVirtualInputColumn100 vColumn in derivedInputVirtual.VirtualInputColumnCollection)
{
DesignDerivedColumns.SetUsageType(input.ID, derivedInputVirtual, vColumn.LineageID, DTSUsageType.UT_READWRITE);
}
foreach (IDTSInputColumn100 inputColumn in derived.InputCollection[0].InputColumnCollection)
{
inputColumn.Description = string.Format("Override the orginal column {0} with a null value if the string is empty.", inputColumn.Name);
property = inputColumn.CustomPropertyCollection["Expression"];
property.Name = "Expression";
property.Value = string.Format("(DT_STR,{0},1252)(LEN(TRIM([{1}])) == 0 ? (DT_STR,{0},1252)(NULL(DT_STR,{0},1252)) : TRIM([{1}]))", inputColumn.Length, inputColumn.Name);
property = inputColumn.CustomPropertyCollection["FriendlyExpression"];
property.Name = "FriendlyExpression";
property.Value = string.Format("(DT_STR,{0},1252)(LEN(TRIM([{1}])) == 0 ? (DT_STR,{0},1252)(NULL(DT_STR,{0},1252)) : TRIM([{1}]))", inputColumn.Length, inputColumn.Name);
inputColumn.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
inputColumn.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
}
/*End Replace Values of Input to null for empty string*/
//For Batch Id
IDTSOutputColumn100 myCol = derived.OutputCollection[0].OutputColumnCollection.New();
myCol.Name = "BtchId";
myCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_I4, 0, 0, 0, 0);
myCol.ExternalMetadataColumnID = 0;
myCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
myCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 myProp = myCol.CustomPropertyCollection.New();
myProp.Name = "Expression";
myProp.Value = "\"" + this.BatchId.ToString() + "\"";
myProp = myCol.CustomPropertyCollection.New();
myProp.Name = "FriendlyExpression";
myProp.Value = "\"" + this.BatchId.ToString() + "\"";
// For FileNm
IDTSOutputColumn100 fleNmCol = derived.OutputCollection[0].OutputColumnCollection.New();
fleNmCol.Name = "CSVFileNm";
fleNmCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_STR, 1100, 0, 0, 1252);
fleNmCol.ExternalMetadataColumnID = 0;
fleNmCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
fleNmCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 fleNmProp = fleNmCol.CustomPropertyCollection.New();
fleNmProp.Name = "Expression";
fleNmProp.Value = " \" " + this.FileName.Replace("\\", "\\\\") + " \"";
fleNmProp = fleNmCol.CustomPropertyCollection.New();
fleNmProp.Name = "FriendlyExpression";
fleNmProp.Value = " \" " + "csvFile.csv" + " \"";
// For FileDate
IDTSOutputColumn100 fleDtCol = derived.OutputCollection[0].OutputColumnCollection.New();
fleDtCol.Name = "FileDt";
fleDtCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_STR, 50, 0, 0, 1252);
fleDtCol.ExternalMetadataColumnID = 0;
fleDtCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
fleDtCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 fleDtProp = fleDtCol.CustomPropertyCollection.New();
fleDtProp.Name = "Expression";
fleDtProp.Value = "\"" + this.FileDate + "\"";
fleDtProp = fleDtCol.CustomPropertyCollection.New();
fleDtProp.Name = "FriendlyExpression";
fleDtProp.Value = "\"" + this.FileDate + "\"";
package.Variables.Add("RowNumber", false, "User", 1);
// For RowNum
IDTSOutputColumn100 RowNumCol = derived.OutputCollection[0].OutputColumnCollection.New();
RowNumCol.Name = "RowNum";
RowNumCol.SetDataTypeProperties(Microsoft.SqlServer.Dts.Runtime.Wrapper.DataType.DT_STR, 3, 0, 0, 1252);
RowNumCol.ExternalMetadataColumnID = 0;
RowNumCol.ErrorRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
RowNumCol.TruncationRowDisposition = DTSRowDisposition.RD_IgnoreFailure;
IDTSCustomProperty100 RowNumColProp = RowNumCol.CustomPropertyCollection.New();
RowNumColProp.Name = "Expression";
RowNumColProp.Value = "DATEPART(\"Ms\",GetDate())";
RowNumColProp = RowNumCol.CustomPropertyCollection.New();
RowNumColProp.Name = "FriendlyExpression";
RowNumColProp.Value = "DATEPART(\"Ms\",GetDate())";
/*Script Component*/
IDTSComponentMetaData100 scriptPropType = dataFlowTask.ComponentMetaDataCollection.New();
scriptPropType.Name = "Transform Property Type";
scriptPropType.ComponentClassID = "DTSTransform.ScriptComponent";
scriptPropType.Description = "Transform Property Type";
CManagedComponentWrapper instance2 = scriptPropType.Instantiate();
instance2.ProvideComponentProperties();
string[] scriptValue = new String[4];
scriptValue[0] = "dts://Scripts/" + "ScriptComponent_9ddf9eba6f5b488f9710b95e4d7e7c74" + "/" + "ScriptComponent_9ddf9eba6f5b488f9710b95e4d7e7c74" + ".vsaproj";
scriptValue[1] = Resources.ProjectFile;//????????
scriptValue[2] = "dts://Scripts/" + "ScriptComponent_9ddf9eba6f5b488f9710b95e4d7e7c74" + "/ScriptMain.vsaitem";
scriptValue[3] = "/* Microsoft SQL Server Integration Services user script component\r\n"
+ "* This is your new script component in Microsoft Visual Basic .NET \r\n"
+ "* ScriptMain is the entrypoint class for script components*/\r\n"
+ "\r\n"
+ "using System \r\n"
+ "using System.Data \r\n"
+ "using System.Math \r\n"
+ "using Microsoft.SqlServer.Dts.Pipeline.Wrapper \r\n"
+ "using Microsoft.SqlServer.Dts.Runtime.Wrapper \r\n"
+ " \r\n"
+ "public class ScriptMain:UserComponent \r\n"
+ "{ \r\n"
+ " int i;\r\n"
+ "public override void PreExecute(){\r\n"
+ "base.PreExecute();\r\n"
+ " i=0;\r\n"
+ "}\r\n"
+ "public override void postExecute(){\r\n"
+ "base.postExecute();\r\n"
+ "}\r\n"
+ " public override void Input0_ProcessInputRow(Input0Buffer Row){ \r\n"
+ " ++i;\r\n"
+ " Row.RowNum = i.ToString(); \r\n"
+ " } \r\n"
+ "} \r\n";
IDTSDesigntimeComponent100 scriptComponentDesignTime = instance2 as IDTSDesigntimeComponent100;
scriptComponentDesignTime.SetComponentProperty("SourceCode", scriptValue);
scriptComponentDesignTime.SetComponentProperty("PreCompile", false);
/*End Script Component
/*=================================================================================
======================== FOR DESTINATION ========================================
=================================================================================*/
/*Add OLE-DB destination*/
IDTSComponentMetaData100 componentDestination = dataFlowTask.ComponentMetaDataCollection.New();
componentDestination.Name = "OLEDBDestination";
componentDestination.ComponentClassID = "DTSAdapter.OLEDBDestination.2";
/* Get destination design-time instance, and initialise component*/
CManagedComponentWrapper instanceDestination = componentDestination.Instantiate();
instanceDestination.ProvideComponentProperties();
/* Set destination connection*/
componentDestination.RuntimeConnectionCollection[0].ConnectionManagerID = connectionManagerOleDb.ID;
componentDestination.RuntimeConnectionCollection[0].ConnectionManager =
DtsConvert.GetExtendedInterface(connectionManagerOleDb);
/* Set destination table name*/
instanceDestination.SetComponentProperty("OpenRowset", this.TargetTable);
instanceDestination.SetComponentProperty("AccessMode", 0);
//instanceDestination.SetComponentProperty("ProtectionLevel", "SaveSensitive");
instanceDestination.SetComponentProperty("DefaultCodePage", 1252);
/* Set Data Flow direction*/
//Create the path from derived to desitination
IDTSPath100 DerivedToDestinationPath = dataFlowTask.PathCollection.New();
DerivedToDestinationPath.AttachPathAndPropagateNotifications(derived.OutputCollection[0], componentDestination.InputCollection[0]);
/* Get input and virtual input for destination to select and map columns*/
IDTSInput100 destinationInput = componentDestination.InputCollection[0];
IDTSVirtualInput100 destinationVirtualInput = destinationInput.GetVirtualInput();
IDTSVirtualInputColumnCollection100 destinationVirtualInputColumns = destinationVirtualInput.VirtualInputColumnCollection;
//componentDestination.ValidateExternalMetadata = false;
/* Reinitialize the metadata, generating exernal columns from flat file columns
If errors are raised here, it is most likely because the flat file connection columns
are wrong, which itself is probably because the template table does not match the file.*/
instanceDestination.AcquireConnections(null);
instanceDestination.ReinitializeMetaData();
instanceDestination.ReleaseConnections();
/*=================================================================================
======================== MAP DESTINATION ========================================
=================================================================================*/
/*Select and map destination columns*/
foreach (IDTSVirtualInputColumn100 virtualInputColumn in destinationVirtualInputColumns)
{
// Select column, and retain new input column
IDTSInputColumn100 inputColumn = instanceDestination.SetUsageType(destinationInput.ID,
destinationVirtualInput, virtualInputColumn.LineageID, DTSUsageType.UT_READWRITE);
// Find external column by name
IDTSExternalMetadataColumn100 externalColumn =
destinationInput.ExternalMetadataColumnCollection[inputColumn.Name];
// Map input column to external column
instanceDestination.MapInputColumn(destinationInput.ID, inputColumn.ID, externalColumn.ID);
}
new Application().SaveToXml("E:\\TrustPortal-All\\SourceCode\\TrustSolution\\Web\\TrustSolution\\TrustRite\\OtherActivity\\Import\\Temp\\test.dtsx", package, null);
Microsoft.SqlServer.Dts.Runtime.DTSExecResult results = package.Execute();
this.RowCount = GetRowCount(package);
string message = "";
if (results == Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Failure)
{
foreach (Microsoft.SqlServer.Dts.Runtime.DtsError local_DtsError in package.Errors)
{
message += local_DtsError.Description.ToString();
}
}
if (results == Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Success)
{
message = "Package Executed Successfully....";
}
package.Dispose();
return results;
}
}
private void sS_AssignColumnProperties(RuntimeWrapper.IDTSConnectionManagerFlatFileColumn100 flatFileCol, string getColName, int colLength)
{
//Assign delimiter:
flatFileCol.ColumnType = this.FlatFileColFormat;
//Indicate column data type – in this case, all the source columns will be set to String Data Type:
flatFileCol.DataType = RuntimeWrapper.DataType.DT_STR;
flatFileCol.MaximumWidth = colLength;
//Indicate column width – in this case, width of all source columns will be set to a length of 50:
flatFileCol.ColumnWidth = colLength; //flatFileColWidth;
flatFileCol.DataPrecision = 0;
flatFileCol.DataScale = 0;
//Assign column name:
RuntimeWrapper.IDTSName100 columnName = flatFileCol as RuntimeWrapper.IDTSName100;
columnName.Name = getColName;
}

Related

Export data from the mssql database to a csv file

good work
v.b.net I will start a new project on my project and my purpose in this project from the database to extract data from certain tables and I want to save as csv
"##FILE VERSION##","251" "##TABLEDEF START##"
"MESAJ=String,50,""MESAJ"","""",50,Data,"""""
"ID=Integer,0,""ID"","""",10,Data,"""""
"SUBEIND=Integer,0,""SUBEIND"","""",10,Data,"""""
"KASAIND=Integer,0,""KASAIND"","""",10,Data,""""" "##INDEXDEF START##"
"##INDEXDEF END##" "##TABLEDEF END##"
"MESAJ","ID","SUBEIND","KASAIND", "YeniFirma","112","100","101",
"YeniCari","100","100","101", "YeniStok","101","100","101", –
Send your sql dataset result as a parameter to this function. It create csv format for you.
public string ConvertToCSV(DataSet objDataSet)
{
StringBuilder content = new StringBuilder();
if (objDataSet.Tables.Count >= 1)
{
System.Data.DataTable table = objDataSet.Tables[0];
if (table.Rows.Count > 0)
{
DataRow dr1 = (DataRow)table.Rows[0];
int intColumnCount = dr1.Table.Columns.Count;
int index = 1;
foreach (DataColumn item in dr1.Table.Columns)
{
content.Append(String.Format("\"{0}\"", item.ColumnName));
if (index < intColumnCount)
content.Append(",");
else
content.Append("\r\n");
index++;
}
foreach (DataRow currentRow in table.Rows)
{
string strRow = string.Empty;
for (int y = 0; y <= intColumnCount - 1; y++)
{
strRow += "\"" + currentRow[y].ToString() + "\"";
if (y < intColumnCount - 1 && y >= 0)
strRow += ",";
}
content.Append(strRow + "\r\n");
}
}
}
This function send a mail:
public void sendMail(string csv)
{
var sendMailThread = new Thread(() =>
{
MemoryStream stream = new MemoryStream(Encoding.ASCII.GetBytes(csv));
Attachment attachment = new Attachment(stream, new ContentType("text/csv"));
attachment.Name = DateTime.Now.ToShortDateString() + "Report.csv";
MailMessage ePosta = new MailMessage();
ePosta.From = new MailAddress("xx");
ePosta.To.Add("xxx");
ePosta.CC.Add("xxx");
ePosta.CC.Add("xxx");
ePosta.Attachments.Add(attachment);
ePosta.Subject = DateTime.Now + " Subject";
ePosta.Body = DateTime.Now + " body message.";
SmtpClient smtp = new SmtpClient();
smtp.Credentials = new System.Net.NetworkCredential("xxx", "xxx");
smtp.Port = 587;
smtp.Host = "smtp.gmail.com";
smtp.EnableSsl = true;
object userState = ePosta;
smtp.SendAsync(ePosta, (object)ePosta);
});
sendMailThread.Start();
}

Downloading Attachment from Exchange Server using SSIS package deployed on another Server

I have built an SSIS package which reads CSV from certain folder. But now I need to download same csv from exchange server.Also Outlook is not installed on my machine. Will I be able to download CSV from exchange server and how ? Thanks!
I have used some of the code from the link http://sqlandbilearning.blogspot.com.au/2014/07/download-email-attachment-using-ssis.html but i have added some new code for removing TCP binding error using ServicePointManager as well as added search filter for retrieving specific emails and this code also takes care of multiple attachment from different emails to be saved on file system.
public void Main()
{
string filePath = "";
string fileName = "";
List<SearchFilter> searchFilterCollection = new List<SearchFilter>();
DateTime now = DateTime.Now;
DateTime beginRecievedTime = new DateTime(now.Year, now.Month, now.Day, 7, 55, 0);
DateTime finishRecievedTime = new DateTime(now.Year, now.Month, now.Day, 8, 15, 0);
EmailMessage latestEmail = null;
try
{
ServicePointManager.ServerCertificateValidationCallback = (sender, certificate, chain, sslPolicyErrors) => true;
ExchangeService service = new ExchangeService(ExchangeVersion.Exchange2010);
service.UseDefaultCredentials = true;
//service.Credentials = new WebCredentials("username", "password");
service.Url = new Uri("");
// 10 mails per page in DESC order
ItemView view = new ItemView(10);
view.OrderBy.Add(ItemSchema.DateTimeReceived, SortDirection.Descending);
searchFilterCollection.Add(new SearchFilter.ContainsSubstring(ItemSchema.Subject, "Scheduled search"));
SearchFilter greaterthanfilter = new SearchFilter.IsGreaterThanOrEqualTo(ItemSchema.DateTimeReceived, beginRecievedTime);
searchFilterCollection.Add(greaterthanfilter);
SearchFilter lessthanfilter = new SearchFilter.IsLessThan(ItemSchema.DateTimeReceived, finishRecievedTime);
searchFilterCollection.Add(lessthanfilter);
SearchFilter filter = new SearchFilter.SearchFilterCollection(LogicalOperator.And, searchFilterCollection);
//Find mails
FindItemsResults<Item> fir = service.FindItems(WellKnownFolderName.Inbox, filter, view);
Dictionary<EmailMessage, string> emailsMap = new Dictionary<EmailMessage, string>();
foreach (Item item in fir.Items)
{
item.Load(); //Load the entire message with attachment
EmailMessage email = item as EmailMessage;
if (email != null)
{
if (email.HasAttachments == true && email.Attachments.Count == 1)
{
if (email.Subject.StartsWith("Scheduled search") == true)
{
filePath = Path.Combine(Dts.Variables["User::SourceFolderPath"].Value.ToString()
, email.DateTimeReceived.Date.ToString("MM.dd.yyyy") + "_" +
email.Attachments[0].Name);
// fileName = email.DateTimeReceived.Date.ToString("MM.dd.yyyy") + "_" +
// email.Attachments[0].Name.ToString();
emailsMap.Add(email, filePath);
}
}
}
}
if (emailsMap.Count > 0) {
foreach (var item in emailsMap) {
//Save attachment
EmailMessage email = item.Key;
filePath = item.Value;
FileAttachment fileAttachment = email.Attachments[0] as FileAttachment;
fileAttachment.Load(filePath);
string extractPath = Dts.Variables["User::SourceFolderPath"].Value.ToString() + "\\" + email.Attachments[0].Name;
System.IO.Compression.ZipFile.ExtractToDirectory(filePath, extractPath);
fileName = Dts.Variables["User::SourceFolderPath"].Value.ToString() + "\\" + email.DateTimeReceived.Date.ToString("MM.dd.yyyy") + "_" +
email.Attachments[0].Name.ToString();
if (File.Exists(fileName))
{
File.Delete(fileName);
}
}
}
// Dts.Variables["User::SourceFileName"].Value = fileName;
Dts.TaskResult = (int)ScriptResults.Success;
}
catch(System.Runtime.InteropServices.COMException ex)
{
if (Dts.Variables.Locked == true)
{
Dts.Variables.Unlock();
}
//An error occurred.
Dts.Events.FireError(0, "Error occured", ex.Message, String.Empty, 0);
Dts.TaskResult = (int)ScriptResults.Failure;
}
}

Export html to Excel format? [duplicate]

I want to extract some data like " email addresses " .. from table which are in PDF file and use this email addresses which I extract to send email to those people.
What I have found so far through searching the web:
I have to convert the PDF file to Excel to read the data easily and use them as I want.
I find some free dll like itextsharp or PDFsharp.
But I didn't find any snippet code help to do this in C#. is there any solution ?
You absolutely do not have to convert PDF to Excel.
First of all, please determine whether your PDF contains textual data, or it is scanned image.
If it contains textual data, then you are right about using "some free dll". I recommend iTextSharp as it is popular and easy to use.
Now the controversial part. If you don't need rock solid solution, it would be easiest to read all PDF to a string and then retrieve emails using regular expression.
Here is example (not perfect) of reading PDF with iTextSharp and extracting emails:
public string PdfToString(string fileName)
{
var sb = new StringBuilder();
var reader = new PdfReader(fileName);
for (int page = 1; page <= reader.NumberOfPages; page++)
{
var strategy = new SimpleTextExtractionStrategy();
string text = PdfTextExtractor.GetTextFromPage(reader, page, strategy);
text = Encoding.UTF8.GetString(ASCIIEncoding.Convert(Encoding.Default, Encoding.UTF8, Encoding.Default.GetBytes(text)));
sb.Append(text);
}
reader.Close();
return sb.ToString();
}
//adjust expression as needed
Regex emailRegex = new Regex("Email Address (?<email>.+?) Passport No");
public IEnumerable<string> ExtractEmails(string content)
{
var matches = emailRegex.Matches(content);
foreach (Match m in matches)
{
yield return m.Groups["email"].Value;
}
}
Using bytescout PDF Extractor SDK we can be able to extract the whole page to csv as below.
CSVExtractor extractor = new CSVExtractor();
extractor.RegistrationName = "demo";
extractor.RegistrationKey = "demo";
TableDetector tdetector = new TableDetector();
tdetector.RegistrationKey = "demo";
tdetector.RegistrationName = "demo";
// Load the document
extractor.LoadDocumentFromFile("C:\\sample.pdf");
tdetector.LoadDocumentFromFile("C:\\sample.pdf");
int pageCount = tdetector.GetPageCount();
for (int i = 1; i <= pageCount; i++)
{
int j = 1;
do
{
extractor.SetExtractionArea(tdetector.GetPageRect_Left(i),
tdetector.GetPageRect_Top(i),
tdetector.GetPageRect_Width(i),
tdetector.GetPageRect_Height(i)
);
// and finally save the table into CSV file
extractor.SavePageCSVToFile(i, "C:\\page-" + i + "-table-" + j + ".csv");
j++;
} while (tdetector.FindNextTable()); // search next table
}
public void Convert(string fileNames) {
int pageCount = 0;
iTextSharp.text.pdf.PdfReader reader = new iTextSharp.text.pdf.PdfReader(fileNames);
pageCount = reader.NumberOfPages;
string ext = System.IO.Path.GetExtension(fileNames);
//string[] outfiles = new string[pageCount];
//Excel.Application app = new Excel.Application();
//app.Workbooks.Add("");
CSVExtractor extractor = new CSVExtractor();
//string outfilePDF1 = fileNames.Replace((System.IO.Path.GetFileName(fileNames)), (System.IO.Path.GetFileName(fileNames).Replace(".pdf", "") + "_rez" + ".csv"));
string outfilePDFExcel1 = fileNames.Replace((System.IO.Path.GetFileName(fileNames)),
(System.IO.Path.GetFileName(fileNames).Replace(".pdf", "") + "_rez" + ".xls"));
extractor.RegistrationName = "demo";
extractor.RegistrationKey = "demo";
string folderName = #"C:\Users\Dafina\Desktop\PDF_EditProject\PDF_EditProject\PDFs";
string pathString = System.IO.Path.Combine(folderName, System.IO.Path.GetFileName(fileNames).Replace(".pdf", "")) + "-CSVs";
System.IO.Directory.CreateDirectory(pathString);
for (int i = 0; i < pageCount; i++)
{
string outfilePDF = fileNames.Replace((System.IO.Path.GetFileName(fileNames)),
(System.IO.Path.GetFileName(fileNames).Replace(".pdf", "") + "_" + (i + 1).ToString()) + ext);
extractor.LoadDocumentFromFile(outfilePDF);
//string outfile = fileNames.Replace((System.IO.Path.GetFileName(fileNames)),
// (System.IO.Path.GetFileName(fileNames).Replace(".pdf", "") + "_" + (i + 1).ToString()) + ".csv");
string outfile = fileNames.Replace((System.IO.Path.GetFileName(fileNames)),
(System.IO.Path.GetFileName(fileNames).Replace(".pdf", "") + "-CSVs\\" + "Sheet_" + (i + 1).ToString()) + ".csv");
extractor.SaveCSVToFile(outfile);
}
Excel.Application xlApp = new Microsoft.Office.Interop.Excel.Application();
if (xlApp == null)
{
Console.WriteLine("Excel is not properly installed!!");
return;
}
Excel.Workbook xlWorkBook;
object misValue = System.Reflection.Missing.Value;
xlWorkBook = xlApp.Workbooks.Add(misValue);
string[] cvsFiles = Directory.GetFiles(pathString);
Array.Sort(cvsFiles, new AlphanumComparatorFast());
//string[] lista = new string[pageCount];
//for (int t = 0; t < pageCount; t++)
//{
// lista[t] = cvsFiles[t];
//}
//Array.Sort(lista, new AlphanumComparatorFast());
Microsoft.Office.Interop.Excel.Worksheet xlWorkSheet;
for (int i = 0; i < cvsFiles.Length; i++)
{
int sheet = i + 1;
xlWorkSheet = xlWorkBook.Sheets[sheet];
if (i < cvsFiles.Length - 1)
{
xlWorkBook.Worksheets.Add(Type.Missing, xlWorkSheet, Type.Missing, Type.Missing);
}
int sheetRow = 1;
Encoding objEncoding = Encoding.Default;
StreamReader readerd = new StreamReader(File.OpenRead(cvsFiles[i]));
int ColumLength = 0;
while (!readerd.EndOfStream)
{
string line = readerd.ReadLine();
Console.WriteLine(line);
try
{
string[] columns = line.Split((new char[] { '\"' }));
for (int col = 0; col < columns.Length; col++)
{
if (ColumLength < columns.Length)
{
ColumLength = columns.Length;
}
if (col % 2 == 0)
{
}
else if (columns[col] == "")
{
}
else
{
xlWorkSheet.Cells[sheetRow, col + 1] = columns[col].Replace("\"", "");
}
}
sheetRow++;
}
catch (Exception e)
{
string msg = e.Message;
}
}
int k = 1;
for (int s = 1; s <= ColumLength; s++)
{
xlWorkSheet.Columns[k].Delete();
k++;
}
releaseObject(xlWorkSheet);
readerd.Close();
}
xlWorkBook.SaveAs(outfilePDFExcel1, Microsoft.Office.Interop.Excel.XlFileFormat.xlWorkbookNormal,
misValue, misValue, misValue, misValue, Microsoft.Office.Interop.Excel.XlSaveAsAccessMode.xlExclusive,
misValue, misValue, misValue, misValue, misValue);
xlWorkBook.Close(true, misValue, misValue);
xlApp.Quit();
releaseObject(xlWorkBook);
releaseObject(xlApp);
var dir = new DirectoryInfo(pathString);
dir.Attributes = dir.Attributes & ~FileAttributes.ReadOnly;
dir.Delete(true);
}
Probably the Best code would be to use Third party dll
namespace ConsoleApp2
{
internal class Program
{
static void Main(string[] args)
{
string pathToPdf = #"D:\abc\abc.pdf";
string pathToExcel = Path.ChangeExtension(pathToPdf, ".xls");
SautinSoft.PdfFocus f = new SautinSoft.PdfFocus();
f.ExcelOptions.ConvertNonTabularDataToSpreadsheet = false;
// 'true' = Preserve original page layout.
// 'false' = Place tables before text.
f.ExcelOptions.PreservePageLayout = true;
// The information includes the names for the culture, the writing system,
// the calendar used, the sort order of strings, and formatting for dates and numbers.
System.Globalization.CultureInfo ci = new System.Globalization.CultureInfo("en-US");
ci.NumberFormat.NumberDecimalSeparator = ",";
ci.NumberFormat.NumberGroupSeparator = ".";
f.ExcelOptions.CultureInfo = ci;
f.OpenPdf(pathToPdf);
if (f.PageCount > 0)
{
int result = f.ToExcel(pathToExcel);
// Open the resulted Excel workbook.
if (result == 0)
{
System.Diagnostics.Process.Start(pathToExcel);
}
}
}
}
}

Cannot implicitly convert type 'System.Data.DataSet' to 'System.Collections.Generic.List<CalendarEvent>''

I am new to application block.
I am trying to get data from database. Following is the code snap.
JsonResponse.ashx:
public void ProcessRequest(HttpContext context)
{
HttpContext _context = HttpContext.Current;
context.Response.ContentType = "application/json";
int user_id = Convert.ToInt32(HttpContext.Current.Session["userid"]);
DateTime start = new DateTime(1970, 1, 1);
DateTime end = new DateTime(1970, 1, 1);
start = start.AddSeconds(double.Parse(context.Request.QueryString["start"]));
end = end.AddSeconds(double.Parse(context.Request.QueryString["end"]));
String result = String.Empty;
result += "[";
List<int> idList = new List<int>();
foreach (CalendarEvent cevent in EventDAO.getEvents(start, end, user_id))
{
result += convertCalendarEventIntoString(cevent);
idList.Add(cevent.id);
}
if (result.EndsWith(","))
{
result = result.Substring(0, result.Length - 1);
}
result += "]";
//store list of event ids in Session, so that it can be accessed in web methods
context.Session["idList"] = idList;
context.Response.Write(result);
}
private String convertCalendarEventIntoString(CalendarEvent cevent)
{
String allDay = "true";
if (ConvertToTimestamp(cevent.start).ToString().Equals(ConvertToTimestamp(cevent.end).ToString()))
{
if (cevent.start.Hour == 0 && cevent.start.Minute == 0 && cevent.start.Second == 0)
{
allDay = "true";
}
else
{
allDay = "false";
}
}
else
{
if (cevent.start.Hour == 0 && cevent.start.Minute == 0 && cevent.start.Second == 0
&& cevent.end.Hour == 0 && cevent.end.Minute == 0 && cevent.end.Second == 0)
{
allDay = "true";
}
else
{
allDay = "false";
}
}
return "{" +
"id: '" + cevent.id + "'," +
"title: '" + HttpContext.Current.Server.HtmlEncode(cevent.title) + "'," +
"start: " + ConvertToTimestamp(cevent.start).ToString() + "," +
"end: " + ConvertToTimestamp(cevent.end).ToString() + "," +
"allDay:" + allDay + "," +
"user_id:" + cevent.user_id + "," +
"description: '" + HttpContext.Current.Server.HtmlEncode(cevent.description) + "'" +
"},";
}
DA:
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
return SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
}
sqlhelper:
public static DataSet ExecuteDataset(SqlConnection connection, CommandType commandType, string commandText, params SqlParameter[] commandParameters)
{
//create a command and prepare it for execution
SqlCommand cmd = new SqlCommand();
cmd.CommandTimeout = 120;
PrepareCommand(cmd, connection, (SqlTransaction)null, commandType, commandText, commandParameters);
//create the DataAdapter & DataSet
SqlDataAdapter da = new SqlDataAdapter(cmd);
DataSet ds = new DataSet();
//fill the DataSet using default values for DataTable names, etc.
da.Fill(ds);
// detach the SqlParameters from the command object, so they can be used again.
cmd.Parameters.Clear();
//return the dataset
return ds;
}
I am getting error:
Cannot implicitly convert type 'System.Data.DataSet' to 'System.Collections.Generic.List'.
I am unable to understand what is the problem.
In getEvents method, you need to iterate through the records in the dataset and fill in the list that you would return in this method.
var dataset = SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
foreach (var row in ds.Tables["FooTable"].Rows)
{
events.Add(new CalendarEvent(...));
}
return events;
That's because you try to return a dataset as List, which it isn't.
You need to convert the dataset to a list. A possible solution would be to change the getEvents method to something like this ->
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
var ds = SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
return ds.Tables[0].AsEnumerable().Select(datarow => new CalendarEvent{ Title = datarow.Field<string>("Title), /*the rest of your params*/}).ToList();
}
Your problem is this piece of code:
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
return SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
}
You defined the type of this method as List<CalenderEvent> but you return a DataSet.
I do not know which datatables are contained in your dataset, but I assume there is one which represents your calenderevents.
This means you need to extract the data you want from your dataset and make a list out of it. Assuming there is one table in your dataset your new method would look something like this:
public static List<CalendarEvent> getEvents(DateTime start, DateTime end, int user_id)
{
List<CalendarEvent> events = new List<CalendarEvent>();
SqlParameter[] sqlParam = new SqlParameter[3];
sqlParam[0] = new SqlParameter("#start", start);
sqlParam[1] = new SqlParameter("#end", end);
sqlParam[2] = new SqlParameter("#user_id", user_id);
var data = SqlHelper.ExecuteDataset(connectionString,CommandType.StoredProcedure, "GetData", sqlParam);
events = ds.Tables[0].AsEnumerable().Select(r => new CalenderEvent
{
//using dummy properties because I dont know
//your class
Property1 = r.Field<string>("Column1"),
Property2 = r.Field<string>("column2"),
//...
}).ToList();
return events;
}

How to append results in Processing?

I have implemented the Table() function in order to save the results generated by the application. However, it seems that the Timer function in the application causes the application to write over the existing CSV file each time it runs. Rather than write over the existing CSV file, I would like to append the newest search results to the existing CSV file. Is there a way to do this? Is it easier to append the results if the results are stored in a different format such as JSON?
Timer timer;
import java.util.List;
Table table;
long lastID = Long.MAX_VALUE;
void setup() {
timer = new Timer(30000);
timer.start();
goTwitter();
table = new Table();
table.addColumn("id");
table.addColumn("latitude");
table.addColumn("longitude");
}
void draw(){
if (timer.isFinished()){
goTwitter();
timer.start();
}
}
void goTwitter(){
ConfigurationBuilder cb = new ConfigurationBuilder();
cb.setOAuthConsumerKey("");
cb.setOAuthConsumerSecret("");
cb.setOAuthAccessToken("");
cb.setOAuthAccessTokenSecret("");
Twitter twitter = new TwitterFactory(cb.build()).getInstance();
Query query = new Query("#love");
int numberOfTweets = 300;
ArrayList<Status> tweets = new ArrayList<Status>();
while (tweets.size () < numberOfTweets) {
if (numberOfTweets - tweets.size() > 100)
query.setCount(100);
else
query.setCount(numberOfTweets - tweets.size());
//long lastID = Long.MAX_VALUE;
try {
QueryResult result = twitter.search(query);
tweets.addAll(result.getTweets());
println("Gathered " + tweets.size() + " tweets");
for (Status t: tweets)
if(t.getId() < lastID) lastID = t.getId();
}
catch (TwitterException te) {
println("Couldn't connect: " + te);
};
query.setSinceId(lastID);
}
for (int i = 0; i < tweets.size(); i++) {
Status t = (Status) tweets.get(i);
GeoLocation loc = t.getGeoLocation();
String user = t.getUser().getScreenName();
String msg = t.getText();
String time = "";
if (loc!=null) {
Double lat = t.getGeoLocation().getLatitude();
Double lon = t.getGeoLocation().getLongitude();
println(i + " USER: " + user + " wrote: " + msg + " located at " + lat + ", " + lon);
TableRow newRow = table.addRow();
newRow.setString("id", user);
newRow.setDouble("latitude", lat);
newRow.setDouble("longitude", lon);
saveTable(table, "data2/syria_16500_5.csv");
}
}
println("lastID= " + lastID);
}
class Timer {
int savedTime;
int totalTime;
Timer (int tempTotalTime) {
totalTime = tempTotalTime;
}
void start(){
savedTime = millis();
}
boolean isFinished() {
int passedTime = millis() - savedTime;
if (passedTime > totalTime){
return true;
} else {
return false;
}
}
}
Well, there does not seem to be a direct implementation to append to a table, so you'll have to resort to a hack: load the table in processing, write to it and resave it, sort of like this:
processing.data.Table table;
void setup() {
File f = new File(sketchPath("") + "data2/syria_16500_5.csv");
println(f.getAbsolutePath());
if (!f.exists()) {
table = new processing.data.Table();
table.addColumn("id");
table.addColumn("latitude");
table.addColumn("longitude");
}
else
table = loadTable("data2/syria_16500_5.csv", "header, csv");
TableRow newRow = table.addRow();
newRow.setString("id", "asad");
newRow.setDouble("latitude", 234);
newRow.setDouble("longitude", 2523);
saveTable(table, "data2/syria_16500_5.csv");
}
The sketch first checks if the file exists. If it does not, it creates a new table, otherwise it loads the old table in with its header.
Be warned, this is not particularly safe... If you change your columns (say, in a text editor) and try to run the sketch again you will get an exception.