I've just started working with SQLite in C# to test various features of an application. In building my first SQLite example I wanted to insert a large .csv into a table (Person) with two columns, A and B. The .csv file is 50MB in size and contains multiple rows of the same data ("abc, def").
I wrote the following code, but I think this is fairly inefficient and am looking for advice on a better method(s) to optimize this code to read the file and finally insert it into SQLite.
Regex splitRx = new Regex(@",\s*", RegexOptions.Compiled);
ArrayList al = new ArrayList();
using (StreamReader sr = new StreamReader(@"c:\Temp\test.csv"))
{
string line = null;
int ln = 0;
while ((line = sr.ReadLine()) != null)
{
string[] fields = splitRx.Split(line);
if (fields.Length != 2)
{
Console.WriteLine("Invalid Input on line:" + ln);
continue;
}
ln++;
al.Add(fields);
}
}
using (var conn = new SQLiteConnection(@"Data Source=C:\Temp\test.sqlite"))
{
conn.Open();
using (var cmd = new SQLiteCommand(conn))
{
using (var transaction = conn.BeginTransaction())
{
foreach (string[] sa in al)
{
cmd.CommandText =
"INSERT INTO Person (FirstName, LastName) VALUES ('" + sa[0] + "', '" + sa[1] + "');";
cmd.ExecuteNonQuery();
}
transaction.Commit();
}
}
conn.Close();
}