I am exporting a CSV file with 2 million records and 150+ columns. It is utilizing high RAM around 30-40 GB. What is the best way to load this data into a CSV file?
using (Stream stream = File.OpenWrite(fileInfo))
{
stream.SetLength(0);
using (StreamWriter writer = new StreamWriter(stream))
{
writer.WriteLine(string.Join(Seperator, newColumnList.ToArray()));
foreach (DataRow row in dataTable.Rows)
{
IEnumerable<string> fields = null;
if (fileextension.ToLower() == "txt")
{
fields = row.ItemArray
.Select(field =>
field.ToString());
}
else
{
fields = row.ItemArray.Select(field =>
string.Concat("\"", field.ToString().Replace("\"", "\"\""), "\""));
}
writer.WriteLine(String.Join(Seperator, fields));
}
writer.Flush();
}
}