Bulk Insert into SQL Server 2008 - sql-server-2008

for (int i = 0; i < myClass.Length; i++)
{
string upSql = "UPDATE CumulativeTable SET EngPosFT = #EngPosFT,EngFTAv=#EngFTAv WHERE RegNumber =#RegNumber AND Session=#Session AND Form=#Form AND Class=#Class";
SqlCommand cmdB = new SqlCommand(upSql, connection);
cmdB.CommandTimeout = 980000;
cmdB.Parameters.AddWithValue("#EngPosFT", Convert.ToInt32(Pos.GetValue(i)));
cmdB.Parameters.AddWithValue("#RegNumber", myClass.GetValue(i));
cmdB.Parameters.AddWithValue("#EngFTAv", Math.Round((engtot / arrayCount), 2));
cmdB.Parameters.AddWithValue("#Session", drpSess.SelectedValue);
cmdB.Parameters.AddWithValue("#Form", drpForm.SelectedValue);
cmdB.Parameters.AddWithValue("#Class", drpClass.SelectedValue);
int idd = Convert.ToInt32(cmdB.ExecuteScalar());
}
assuming myClass.Length is 60. This does 60 update statements. How can I limit it to 1 update statement. Please code example using the above code will be appreciated. Thanks
Tried using this
StringBuilder command = new StringBuilder();
SqlCommand cmdB = null;
for (int i = 0; i < myClass.Length; i++)
{
command.Append("UPDATE CumulativeTable SET" + " EngPosFT = " + Convert.ToInt32(Pos.GetValue(i)) + "," + " EngFTAv = " + Math.Round((engtot / arrayCount), 2) +
" WHERE RegNumber = " + myClass.GetValue(i) + " AND Session= " + drpSess.SelectedValue + " AND Form= " + drpForm.SelectedValue + " AND Class= " + drpClass.SelectedValue + ";");
//or command.AppendFormat("UPDATE CumulativeTable SET EngPosFT = {0},EngFTAv={1} WHERE RegNumber ={2} AND Session={3} AND Form={4} AND Class={5};", Convert.ToInt32(Pos.GetValue(i)), Math.Round((engtot / arrayCount), 2), myClass.GetValue(i), drpSess.SelectedValue, drpForm.SelectedValue, drpClass.SelectedValue);
}//max length is 128 error is encountered

Look at the BULK INSERT T-SQL command. But since I don't have a lot of personal experience with that command, I do see some immediate opportunity to improve this code using the same sql by creating the command and parameters outside of the loop, and only making the necessary changes inside the loop:
string upSql = "UPDATE CumulativeTable SET EngPosFT = #EngPosFT,EngFTAv=#EngFTAv WHERE RegNumber =#RegNumber AND Session=#Session AND Form=#Form AND Class=#Class";
SqlCommand cmdB = new SqlCommand(upSql, connection);
cmdB.CommandTimeout = 980000;
//I had to guess at the sql types you used here.
//Adjust this to match your actual column data types
cmdB.Parameters.Add("#EngPosFT", SqlDbType.Int);
cmdB.Parameters.Add("#RegNumber", SqlDbType.Int);
//It's really better to use explicit types here, too.
//I'll just update the first parameter as an example of how it looks:
cmdB.Parameters.Add("#EngFTAv", SqlDbType.Decimal).Value = Math.Round((engtot / arrayCount), 2));
cmdB.Parameters.AddWithValue("#Session", drpSess.SelectedValue);
cmdB.Parameters.AddWithValue("#Form", drpForm.SelectedValue);
cmdB.Parameters.AddWithValue("#Class", SqlDbTypedrpClass.SelectedValue);
for (int i = 0; i < myClass.Length; i++)
{
cmdB.Parameters[0].Value = Convert.ToInt32(Pos.GetValue(i)));
cmdB.Parameters[1].Value = myClass.GetValue(i));
int idd = Convert.ToInt32(cmdB.ExecuteScalar());
}

It would be better in this case to create a stored procedure that accepts a Table Valued Parameter. On the .NET side of things, you create a DataTable object containing a row for each set of values you want to use.
On the SQL Server side of things, you can treat the parameter as another table in a query. So inside the stored proc, you'd have:
UPDATE a
SET
EngPosFT = b.EngPosFT,
EngFTAv=b.EngFTAv
FROM
CumulativeTable a
inner join
#MyParm b
on
a.RegNumber =b.RegNumber AND
a.Session=b.Session AND
a.Form=b.Form AND
a.Class=b.Class
Where #MyParm is your table valued parameter.
This will then be processed as a single round-trip to the server.

In such scenarios it is always best to write a Stored Procedure and call that stored proc in the for loop, passing the necessary arguments at each call.

using System;
using System.Data;
using System.Data.SqlClient;
namespace DataTableExample
{
class Program
{
static void Main(string[] args)
{
DataTable prodSalesData = new DataTable("ProductSalesData");
// Create Column 1: SaleDate
DataColumn dateColumn = new DataColumn();
dateColumn.DataType = Type.GetType("System.DateTime");
dateColumn.ColumnName = "SaleDate";
// Create Column 2: ProductName
DataColumn productNameColumn = new DataColumn();
productNameColumn.ColumnName = "ProductName";
// Create Column 3: TotalSales
DataColumn totalSalesColumn = new DataColumn();
totalSalesColumn.DataType = Type.GetType("System.Int32");
totalSalesColumn.ColumnName = "TotalSales";
// Add the columns to the ProductSalesData DataTable
prodSalesData.Columns.Add(dateColumn);
prodSalesData.Columns.Add(productNameColumn);
prodSalesData.Columns.Add(totalSalesColumn);
// Let's populate the datatable with our stats.
// You can add as many rows as you want here!
// Create a new row
DataRow dailyProductSalesRow = prodSalesData.NewRow();
dailyProductSalesRow["SaleDate"] = DateTime.Now.Date;
dailyProductSalesRow["ProductName"] = "Nike";
dailyProductSalesRow["TotalSales"] = 10;
// Add the row to the ProductSalesData DataTable
prodSalesData.Rows.Add(dailyProductSalesRow);
// Copy the DataTable to SQL Server using SqlBulkCopy
using (SqlConnection dbConnection = new SqlConnection("Data Source=ProductHost;Initial Catalog=dbProduct;Integrated Security=SSPI;Connection Timeout=60;Min Pool Size=2;Max Pool Size=20;"))
{
dbConnection.Open();
using (SqlBulkCopy s = new SqlBulkCopy(dbConnection))
{
s.DestinationTableName = prodSalesData.TableName;
foreach (var column in prodSalesData.Columns)
s.ColumnMappings.Add(column.ToString(), column.ToString());
s.WriteToServer(prodSalesData);
}
}
}
}
}

Related

fast way to update 10M record in Mysql database using QT

I have a 500MB file which contains a data that I need to use it to update a table that contains 10M rows so I use this code in using QT SQL library
QSqlDatabase db = QSqlDatabase::addDatabase("QMYSQL");
// my database info
db.transaction(); // start the transaction
QSqlQuery q;
QString sql = ""; // to append many sql statment to this string
QFile file(dataFile);
file.open(QIODevice::Text | QIODevice::ReadOnly);
QTextStream stream(&file);
int counter = 0;
clock_t tStart = clock();
while (!stream.atEnd()) {
counter++;
QString str = stream.readLine();
QStringList list = str.split(';');
QString id = list[0];
QString streetName = list[1];
QString HouseName = list[2];
QString HouseNumber = list[3];
QString city = list[4];
QString postalCode = list[5];
QString temp_sql = "UPDATE temp_table_copy SET street_name='!1', house_number='!2',postal_code='!3',address_city='!4', house='!5' where id=" + id + ";";
temp_sql = temp_sql.replace("!1", streetName);
temp_sql = temp_sql.replace("!2", HouseNumber);
temp_sql = temp_sql.replace("!3", postalCode);
temp_sql = temp_sql.replace("!4", city);
temp_sql = temp_sql.replace("!5", HouseName);
sql += temp_sql;
if (sql.size() >= 100000) {
if (!q.exec(sql)) {
qDebug() << "Excute Error: " << q.lastError().text();
}
sql = "";
printf("Finished: %d sql statment\n", counter);
}
if (counter == Number)
break;
}
if (!sql.isEmpty()) {
if (!q.exec(sql)) {
qDebug() << "Excute Error: " << q.lastError().text();
}
printf("Finished: %d sql statment\n", counter);
}
db.commit(); // commit and
printf("Time taken: %.2fs\n", (double)(clock() - tStart)/CLOCKS_PER_SEC);
db.close();
the idea is I append the big SQL string with the SQL statement and when it reaches the 100,000 in size I execute it and empty the string to avoid bad allocation when the string is too much size in memory, and also I read that MySQL database has a certain size to the SQL statement to be executed and I get its limit using
SHOW VARIABLES LIKE 'max_allowed_packet';
the problem here is that not all the data are updated some ids have a value in the text file and not updated inside the MySQL database table so what can be the problem? those columns are null before the update so when I try to count how many not update I found 7M only which means some ids are not updated and also I did not get any error from the query, any help or suggestion to know why this happened, or a better way to do it.
Thanks in advance.

asp.net:query with multiple where conditions

1. I have query.But couldn't add second where condition.Please suggest me the correct semantic .
2. And how can fetch the data from dropdownlist and show it in gridview.
3. How can i fetch value from Tution fee Column of my database when condition satisfies ,and Hostel Fee on fail of condition.??
protected void BindGridview()
{
constr = ConfigurationManager.ConnectionStrings["connstring_DETMIS"].ToString(); // connection string
// String FID = DropDownList1.SelectedItem.Value;
using (var conn = new MySql.Data.MySqlClient.MySqlConnection(constr)) {
conn.Open();
using (var cmd = new MySql.Data.MySqlClient.MySqlCommand("select * from fees_collect_category" + " where F_id =" + DropDownList1.SelectedItem.Value " and C_id=" + DropDownList2.SelectedItem.Value, conn)) {
using (var reader = cmd.ExecuteReader()) {
if (reader.HasRows) {
gvDetails.DataSource = reader;
gvDetails.DataBind();
} else
lblWarning.Text = "There are no records..";
}
}
}
}
Welcome to Stackoverflow. You should first research on the google as why you are not able to add multiple conditions(It's just because of simple syntax mistake).
The exact code of line would be something like this.
using (var cmd = new MySql.Data.MySqlClient.MySqlCommand("select * from fees_collect_category" +
" where F_id = '" + DropDownList1.SelectedItem.Value + "' and C_id=" + DropDownList2.SelectedItem.Value + "'", conn))
NOTE:- As a fellow developer, I won't suggest you to do this by passing values from here as it is dangerous and prone to SQL INJECTION
I would rather tell you to go by using Parameterized queries
Hope that helps and for future go for the Parametrized ones, as it is easy and technically preffered.
protected void BindGridview()
{
String strConnString = ConfigurationManager
.ConnectionStrings["connstring_DETMIS"].ConnectionString;
String strQuery = "select * from student_details " +
"where F_id=#F_Id and C_id=#C_Id";
MySqlConnection con = new MySql.Data.MySqlClient.MySqlConnection(strConnString);
MySqlCommand cmd = new MySql.Data.MySqlClient.MySqlCommand();
cmd.Parameters.AddWithValue("#F_Id",
DropDownList1.SelectedItem.Value);
cmd.Parameters.AddWithValue("#C_Id",
DropDownList2.SelectedItem.Value);
cmd.CommandType = CommandType.Text;
cmd.CommandText = strQuery;
cmd.Connection = con;
try
{
con.Open();
gvDetails.DataSource = cmd.ExecuteReader();
gvDetails.DataBind();
}
catch (Exception ex)
{
throw ex;
}
}

Parse MS Excel files dynamically with SSIS

I have a business requirement that is looking for the ability to have Excel files placed in a network location and the data from these files uploaded to a SQL Server database. The files will contain 1 worksheet of data. The files correspond to a table found within a known database. The files can and will correlate to multiple tables and will be known only when opening up the file, i.e., the name of the worksheet. I'm currently creating multiple SSIS packages for each of these files as they are uploaded to the shared drive but sometimes, I'm not creating the package fast enough.
I guess my question is, is this type of dynamic parsing something that SSIS can accomplish from a Script Task within a Foreach container? or should I look into another option?
So far, I have the following...but as I've researched, I've come across post similar to this: Extracting excel files with SSIS and that is making me slightly concerned regarding the feasiability...
public void Main()
{
// TODO: Add your code here
Dts.TaskResult = (int)ScriptResults.Success;
string NetworkLocation;
//Create database connection
SqlConnection myADONETConnection = new SqlConnection();
myADONETConnection = (SqlConnection)(Dts.Connections["db"].AcquireConnection(Dts.Transaction) as SqlConnection);
//Obtain the location of the file(s)
NetworkLocation = (string)Dts.Variables["User::NetworkLocation"].Value;
string[] dirs = Directory.GetFiles(NetworkLocation, "*.csv");
}
So, any thoughts or ideas or what direction I should look into?
I wrote a SSIS package a few months ago that does exactly what you seek, plus a little more. In my case, several hundred Excel files containing one or more worksheets of differing names needed to be imported into a database as unique staging tables. Also, the column names and number of columns in each worksheet were unknown. Each worksheet became its own table and the table name was a combination of the original Excel file name and the worksheet name (FileName__WorksheetName). I applied two underscores between the file name and worksheet name in case the file and worksheet names contained underscores. There are a few caveats to this process: 1) All of the Excel files must be located in the same folder; 2) The column headers in each worksheet must appear in the first row; and 3) the worksheet names must not contain any special characters (spaces are automatically replaced with an underscore).
Steps:
1) Create a For Each Loop Container. Under Collection, apply a "Foreach File Enumerator" where under Enumerator configuration, list the folder location and the Files. For files you can list . or even *.xlsx or *.xls to filter to specific files. Apply Fully Qualified. For Variable Mappings, apply a string user variable like "ExcelFile" with an index of 0.
2) Add a Script task in the For Each Loop Container. You will send it the ReadOnlyVariable "ExcelFile" and it will write to two new string variables "TableName" and "Worksheets" under ReadWriteVariables. Apply the following C# script. Note, scince the following script will update your Excel files, you should be applying copies of your originals.
#region Namespaces
using System;
using System.Data;
using Microsoft.SqlServer.Dts.Runtime;
using System.Windows.Forms;
using System.IO;
using Excel = Microsoft.Office.Interop.Excel;
using System.Text;
using System.Linq;
using System.Threading.Tasks;
using System.Data.OleDb;
using System.Xml.Serialization;
#endregion
namespace xxxxxxxxx
{
[Microsoft.SqlServer.Dts.Tasks.ScriptTask.SSISScriptTaskEntryPointAttribute]
public partial class ScriptMain : Microsoft.SqlServer.Dts.Tasks.ScriptTask.VSTARTScriptObjectModelBase
{
public void Main()
{
// Includes full path, filename and extension... C:\Documents\ThisExcel.xlsx
string xlFile = Dts.Variables["User::ExcelFile"].Value.ToString();
// Remove path changing value to "ThisExcel.xlsx"
string NoPath = Path.GetFileName(xlFile);
// Remove extension changing value to "ThisExcel".
// This is applied because filename will become part of the name for new database tables
string tableName = Path.GetFileNameWithoutExtension(NoPath);
// Replace any spaces with underscores in tableName (FileName without path and extension)
tableName = tableName.Replace(" ", "_");
Dts.Variables["User::TableName"].Value = tableName;
Excel.Application app = new Excel.Application();
Excel.Workbook excelWorkbook;
try
{
excelWorkbook = app.Workbooks.Open(xlFile);
string tempsheet = " ";
int CountWorksheets = excelWorkbook.Sheets.Count;
//Dts.Variables["User::WorksheetCount"].Value = CountWorksheets;
string[] Excelworksheets;
Excelworksheets = new string[CountWorksheets];
int x = 0;
// Rename worksheets replace empty space with an underscore needed for an SSIS import and
// to avoid empty spaces in final table names.
foreach (Excel.Worksheet sheet in excelWorkbook.Worksheets)
{
tempsheet = sheet.Name;
tempsheet = tempsheet.Replace(" ", "_");
Excelworksheets[x++] = tempsheet.ToString();
sheet.Name = tempsheet;
}
Dts.Variables["User::Worksheets"].Value = Excelworksheets;
excelWorkbook.Save();
excelWorkbook.Close();
}
catch (Exception ex)
{
MessageBox.Show("Excel sheet rename failed for file " + xlFile + " based on " + ex.Message);
}
finally
{
app.Quit();
app = null;
GC.Collect();
GC.WaitForPendingFinalizers();
}
Dts.TaskResult = (int)ScriptResults.Success;
}
#region ScriptResults declaration
enum ScriptResults
{
Success = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Success,
Failure = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Failure
};
#endregion
}
}
3) After saving and building the above C# script task, add a For Each Loop Container in the earlier For Each Loop container below the script task just created. This will loop through each worksheet in each Excel file. If you only have one worksheet, that is fine. It will apply an Enumerator of "Foreach From Variable Enumerator", which will be the "Worksheets" string variable created that is populated in the before mentioned script task. It will write to a new user string variable called "Worksheet" with an Index of 0.
4) Within this new nested For Each Loop Container, add script task that will create the database table for each worksheet. The tricky part I had to deal with here was defining the field types, this is not retained from the Excel worksheets or text CSV files. So I made them all nvarchar(255) or, if column headers were something like Remark, Description or something else, I made it nvarchar(max), which is good to 4000 or 4262 characters (I do not recall for certain). Here is the dynamic code I applied stemming from what you began.
#region Namespaces
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Data;
using System.Data.OleDb;
using System.Xml.Serialization;
using System.IO;
using Microsoft.SqlServer.Dts.Runtime;
using System.Windows.Forms;
#endregion
namespace yyyyyyyyyy
{
[Microsoft.SqlServer.Dts.Tasks.ScriptTask.SSISScriptTaskEntryPointAttribute]
public partial class ScriptMain : Microsoft.SqlServer.Dts.Tasks.ScriptTask.VSTARTScriptObjectModelBase
public void Main()
{
// TODO: Add your code here
string xlFile = Dts.Variables["User::ExcelFile"].Value.ToString(); //Includes full path and filename with extension
//xlFilex = xlFilex.Replace(#"\", #"\\");
string worksheet = Dts.Variables["User::Worksheet"].Value.ToString(); //Worksheet name from Excel file.
string Tablename = Dts.Variables["User::TableName"].Value.ToString(); //Currently file name without path and extension. Spaces replaced by underscores.
string ExcelExtension = Path.GetExtension(xlFile);
string columnName = "";
string columnType = "";
int i = 0;
string worksheet2 = worksheet + "$";
OleDbConnection xl = new OleDbConnection("Provider=Microsoft.ACE.OLEDB.12.0;Data Source=" + xlFile + ";Extended Properties=\"Excel 12.0 Xml;HDR=YES;IMEX=1\"");
xl.Open();
System.Data.DataTable dt = xl.GetOleDbSchemaTable(OleDbSchemaGuid.Columns, new object[] { null, null, worksheet2, null });
List<string> listColumn = new List<string>();
// Create the name of the table that will be created in the SQL Server database, which is
// a concatentation of the root file name and worksheet name separated by two undescores.
Tablename = Tablename + "__" + worksheet;
string CreateTable = "CREATE TABLE " + Tablename + " (";
string InsertTable = "INSERT INTO " + Tablename + " (";
string SelectColumns = "";
// Create the string that will be applied to create the table defining the field types based on the names
foreach (DataRow row in dt.Rows)
{
listColumn.Add(row["Column_name"].ToString());
columnName = listColumn[i].ToString();
if ((columnName == "Remark") || (columnName == "remark") || (columnName == "REMARK") ||
(columnName == "Remarks") || (columnName == "remarks") || (columnName == "REMARKS") ||
(columnName.Contains("Remarks")) || (columnName.Contains("remarks")) || (columnName.Contains("REMARKS")) ||
(columnName.Contains("Remark")) || (columnName.Contains("remark")) || (columnName.Contains("REMARK")) ||
(columnName == "Comment") || (columnName == "comment") || (columnName == "COMMENT") ||
(columnName == "Comments") || (columnName == "comments") || (columnName == "COMMENTS") ||
(columnName == "Description") || (columnName == "description") || (columnName == "DESCRIPTION") ||
(columnName.Contains("Description")) || (columnName.Contains("description")) || (columnName.Contains("DESCRIPTION")) ||
(columnName == "Legal") || (columnName == "legal") || (columnName == "LEGAL") ||
(columnName == "Note") || (columnName == "note") || (columnName == "NOTE") ||
(columnName.Contains("Format")) || (columnName.Contains("format")) || (columnName.Contains("FORMAT")) ||
(columnName == "Notes") || (columnName == "notes") || (columnName == "NOTES")
)
{
columnType = "nvarchar(max),";
}
else
{
columnType = "nvarchar(255),";
}
CreateTable = CreateTable + "[" + columnName + "] " + columnType;
InsertTable = InsertTable + "[" + columnName + "],";
SelectColumns = SelectColumns + "[" + columnName + "],";
//MessageBox.Show(columnName + " " + columnType);
i++;
}
// Remove last comma from CreateTable and add closing
CreateTable = CreateTable.Remove(CreateTable.Length - 1);
CreateTable = CreateTable + ")";
// Removoe last comman from InsertTable and add closing
InsertTable = InsertTable.Remove(InsertTable.Length - 1);
InsertTable = InsertTable + ")";
// Removoe last comman from SelectColumns
SelectColumns = SelectColumns.Remove(SelectColumns.Length - 1);
xl.Close();
string SQL = "";
// Assemble the dynamic SQL that will be applied in the SQL task next to generate and populate a new database table
if (ExcelExtension == ".xlsx")
{
SQL = "IF OBJECT_ID ('dbo." + Tablename + "') IS NOT NULL DROP TABLE dbo." + Tablename +
" " + CreateTable + " " +
InsertTable + " " + "SELECT " + SelectColumns + " FROM OPENROWSET('Microsoft.ACE.OLEDB.12.0', " +
//" INSERT INTO [dbo].[" + Tablename + "] SELECT * FROM OPENROWSET('Microsoft.ACE.OLEDB.12.0', " +
"'Excel 12.0 Xml;HDR=YES;Database=" + xlFile + "', 'SELECT * FROM [" + worksheet + "$]');";
}
else if (ExcelExtension == ".xls")
{
SQL = "IF OBJECT_ID ('dbo." + Tablename + "') IS NOT NULL DROP TABLE dbo." + Tablename +
" " + CreateTable + " " +
" INSERT INTO [dbo].[" + Tablename + "] SELET * FROM OPENROWSET('Microsoft.Jet.OLEDB.4.0', " +
"'Excel 8.0 Xml;HDR=YES;Database=" + xlFile + "', 'SELECT * FROM [" + worksheet + "$]');";
}
//MessageBox.Show(SQL);
Dts.Variables["User::CreateTableSQL"].Value = SQL;
Dts.TaskResult = (int)ScriptResults.Success;
}
#region ScriptResults declaration
enum ScriptResults
{
Success = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Success,
Failure = Microsoft.SqlServer.Dts.Runtime.DTSExecResult.Failure
};
#endregion
}
}
Looking at the above script you'll notice that the following ReadOnlyVariables will need to be declared: ExelFile, SourceFolder, TableName, tempFileName, and Worksheet. Then the following ReadWriteVariables will need to be declared: ColumnCount, CreateTable, and InsertTableName.
5) Within the nested ForEach Loop Container and just below the above Task script, add an Execute SQL Task that will run the sql contained in the CreateTableSQL variable. Be sure to set the SQLSourceType to "Variable". This will create and populate the table and even overwrite it if it already exists.
When done, you should have something that looks like the following flow:
Hope this helps and let me know if you have any questions. I did not have time to remove the extraneous stuff, but this should put you on the right path. This loop container is for Excel files, but you can add on other loop containers with code modified to handle CSV or other file types. All of this can be enclosed in a single SSIS package.
The final SQL task just runs the following TSQL that looks for field names in your database containing a space between words and replaces that space with an underscore. It is not necessary, but avoids having to apply SQL with columns wrapped with brackets [].
DECLARE My_Cursor Cursor
FOR
SELECT 'sp_rename '''+table_name+'.['+column_name+']'','''+replace(column_name,' ','_')+''',''COLUMN'''
FROM information_schema.columns
WHERE column_name like '% %'
OPEN My_Cursor
DECLARE #SQL NVARCHAR(1000)
FETCH NEXT FROM My_Cursor INTO #SQL
WHILE ##FETCH_STATUS <> -1
BEGIN
EXECUTE sp_executesql #SQL
FETCH NEXT FROM My_Cursor INTO #SQL
END
CLOSE My_Cursor
DEALLOCATE My_Cursor

how to store columns values in array in asp.net

I am writing code in asp.net. I have a column named c_name in database. I want to store all my columns values in an array. Here is my incomplete code. I am confused what to do here.
String str = "select c_name from contacts where user_id = " + user_id + "";
MySqlCommand cmd = new MySqlCommand(str, dbConnection);
cmd.ExecuteNonQuery();
MySqlDataReader mdr = cmd.ExecuteReader(CommandBehavior.CloseConnection);
while (mdr.Read())
{
}
Please help.
Check below code:
String str = "select c_name from contacts where user_id = " + user_id + "";
MySqlCommand cmd = new MySqlCommand(str, dbConnection);
MySqlDataReader mdr = cmd.ExecuteReader(CommandBehavior.CloseConnection);
List<string> list = new List<string>();
while (mdr.Read())
{
list.Add(mdr.GetString(0));
}
string[] strMyArray = list.ToArray<string>();
I suspect one of your your problems is this line:
cmd.ExecuteNonQuery();
You're actually executing cmd twice, once with this line and once again when you create your reader. I've also added a little code to copy the result into an element in a list. Try the following:
string str = "select c_name from contacts where user_id = " + user_id + "";
MySqlCommand cmd = new MySqlCommand(str, dbConnection);
MySqlDataReader mdr = cmd.ExecuteReader(CommandBehavior.CloseConnection);
// create a list to hold the results in
List<string> results = new List<string>();
while (mdr.Read())
{
// for each row read a string and add it in
results.Add(mdr.GetString(0));
}
You've asked for an array - I've used a list because it's easier. If you REALLY want an array then just call:
var theArray = results.ToArray();
I would also suggest throwing in some using blocks around the connection, command and reader.

Sql Server 2008 Tuning with large transactions (700k+ rows/transaction)

So, I'm working on a database that I will be adding to my future projects as sort of a supporting db, but I'm having a bit of an issue with it, especially the logs.
The database basically needs to be updated once a month. The main table has to be purged and then refilled off of a CSV file. The problem is that Sql Server will generate a log for it which is MEGA big. I was successful in filling it up once, but wanted to test the whole process by purging it and then refilling it.
That's when I get an error that the log file is filled up. It jumps from 88MB (after shrinking via maintenance plan) to 248MB and then stops the process altogether and never completes.
I've capped it's growth at 256MB, incrementing by 16MB, which is why it failed, but in reality I don't need it to log anything at all. Is there a way to just completely bypass logging on any query being run against the database?
Thanks for any responses in advance!
EDIT: Per the suggestions of #mattmc3 I've implemented SqlBulkCopy for the whole procedure. It works AMAZING, except, my loop is somehow crashing on the very last remaining chunk that needs to be inserted. I'm not too sure where I'm going wrong, heck I don't even know if this is a proper loop, so I'd appreciate some help on it.
I do know that its an issue with the very last GetDataTable or SetSqlBulkCopy calls. I'm trying to insert 788189 rows, 788000 get in and the remaining 189 are crashing...
string[] Rows;
using (StreamReader Reader = new StreamReader("C:/?.csv")) {
Rows = Reader.ReadToEnd().TrimEnd().Split(new char[1] {
'\n'
}, StringSplitOptions.RemoveEmptyEntries);
};
int RowsInserted = 0;
using (SqlConnection Connection = new SqlConnection("")) {
Connection.Open();
DataTable Table = null;
while ((RowsInserted < Rows.Length) && ((Rows.Length - RowsInserted) >= 1000)) {
Table = GetDataTable(Rows.Skip(RowsInserted).Take(1000).ToArray());
SetSqlBulkCopy(Table, Connection);
RowsInserted += 1000;
};
Table = GetDataTable(Rows.Skip(RowsInserted).ToArray());
SetSqlBulkCopy(Table, Connection);
Connection.Close();
};
static DataTable GetDataTable(
string[] Rows) {
using (DataTable Table = new DataTable()) {
Table.Columns.Add(new DataColumn("A"));
Table.Columns.Add(new DataColumn("B"));
Table.Columns.Add(new DataColumn("C"));
Table.Columns.Add(new DataColumn("D"));
for (short a = 0, b = (short)Rows.Length; a < b; a++) {
string[] Columns = Rows[a].Split(new char[1] {
','
}, StringSplitOptions.RemoveEmptyEntries);
DataRow Row = Table.NewRow();
Row["A"] = Columns[0];
Row["B"] = Columns[1];
Row["C"] = Columns[2];
Row["D"] = Columns[3];
Table.Rows.Add(Row);
};
return (Table);
};
}
static void SetSqlBulkCopy(
DataTable Table,
SqlConnection Connection) {
using (SqlBulkCopy SqlBulkCopy = new SqlBulkCopy(Connection)) {
SqlBulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping("A", "A"));
SqlBulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping("B", "B"));
SqlBulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping("C", "C"));
SqlBulkCopy.ColumnMappings.Add(new SqlBulkCopyColumnMapping("D", "D"));
SqlBulkCopy.BatchSize = Table.Rows.Count;
SqlBulkCopy.DestinationTableName = "E";
SqlBulkCopy.WriteToServer(Table);
};
}
EDIT/FINAL CODE: So the app is now finished and works AMAZING, and quite speedy! #mattmc3, thanks for all the help! Here is the final code for anyone who may find it useful:
List<string> Rows = new List<string>();
using (StreamReader Reader = new StreamReader(#"?.csv")) {
string Line = string.Empty;
while (!String.IsNullOrWhiteSpace(Line = Reader.ReadLine())) {
Rows.Add(Line);
};
};
if (Rows.Count > 0) {
int RowsInserted = 0;
DataTable Table = new DataTable();
Table.Columns.Add(new DataColumn("Id"));
Table.Columns.Add(new DataColumn("A"));
while ((RowsInserted < Rows.Count) && ((Rows.Count - RowsInserted) >= 1000)) {
Table = GetDataTable(Rows.Skip(RowsInserted).Take(1000).ToList(), Table);
PerformSqlBulkCopy(Table);
RowsInserted += 1000;
Table.Clear();
};
Table = GetDataTable(Rows.Skip(RowsInserted).ToList(), Table);
PerformSqlBulkCopy(Table);
};
static DataTable GetDataTable(
List<string> Rows,
DataTable Table) {
for (short a = 0, b = (short)Rows.Count; a < b; a++) {
string[] Columns = Rows[a].Split(new char[1] {
','
}, StringSplitOptions.RemoveEmptyEntries);
DataRow Row = Table.NewRow();
Row["A"] = "";
Table.Rows.Add(Row);
};
return (Table);
}
static void PerformSqlBulkCopy(
DataTable Table) {
using (SqlBulkCopy SqlBulkCopy = new SqlBulkCopy(#"", SqlBulkCopyOptions.TableLock)) {
SqlBulkCopy.BatchSize = Table.Rows.Count;
SqlBulkCopy.DestinationTableName = "";
SqlBulkCopy.WriteToServer(Table);
};
}
If you are doing a Bulk Insert into the table in SQL Server, which is how you should be doing this (BCP, Bulk Insert, Insert Into...Select, or in .NET, the SqlBulkCopy class) you can use the "Bulk Logged" recovery model. I highly recommend reading the MSDN articles on recovery models: http://msdn.microsoft.com/en-us/library/ms189275.aspx
You can set the Recover model for each database separately. Maybe the simple recovery model will work for you. The simple model:
Automatically reclaims log space to keep space requirements small, essentially eliminating the need to manage the transaction log space.
Read up on it here.
There is no way to bypass using the transaction log in SQL Server.