I have the below code that takes the sql data and exports it to xml or csv. The xml is processing at good speed but my csv sometimes takes 1 hour depending on how many data rows it exports. My question is my code effective writing the csv or it can be improved for speed? Has around 40 columns and x rows which can be over 10k.
public static void SqlExtract(this string queryStatement, string xFilePath, string fileName)
{
string connectionString = @"Data Source=ipaddress; Initial Catalog=name; User ID=username; Password=password";
using (SqlConnection _con = new SqlConnection(connectionString))
{
using (SqlCommand _cmd = new SqlCommand(queryStatement, _con))
{
using (SqlDataAdapter _dap = new SqlDataAdapter(_cmd))
{
if (fileName == "item1" || fileName == "item2" || fileName == "item3")
{
DataSet ds = new DataSet("FItem");
_con.Open();
_dap.Fill(ds);
_con.Close();
FileStream fs = new FileStream(xFilePath, FileMode.Create, FileAccess.Write, FileShare.None);
StreamWriter writer = new StreamWriter(fs, Encoding.UTF8);
ds.WriteXml(writer, XmlWriteMode.IgnoreSchema);
fs.Close();
StringWriter sw = new StringWriter();
ds.WriteXml(sw, XmlWriteMode.IgnoreSchema);
string OutputXML = sw.ToString();
OutputXML = OutputXML.Replace("Table", "Item");
System.IO.File.WriteAllText(xFilePath, OutputXML);
}
else
{
DataTable table1 = new DataTable("Table1");
_con.Open();
_dap.Fill(table1);
_con.Close();
string exportCSV = string.Empty;
foreach (DataRow row in table1.Rows)
{
int i = 1;
foreach (DataColumn column in table1.Columns)
{
if (row[1].ToString() == "002" && i > 41 || row[1].ToString() == "END" && i > 4)
{
//do nothing
}
else
{
if (i > 1)
{
exportCSV += ";";
}
exportCSV += row[column.ColumnName].ToString();
}
i++;
}
exportCSV += "\r\n";
}
//Write CSV
File.WriteAllText(xFilePath, exportCSV.ToString());
}
}
}
}
}