• This is one main problem with mass data in ADO.NET.

    Most people don't know that for every command send ADO.NET needs an open Transaction.

    If there is none, it will open one and Commit or Rollback depending on the result.

    So if you open a Transaction for those mass updates yourself, you save a lot of time.

    So instead of using 3.000.000 commands and therefor 3.000.000 transactions just use one command,

    and an explicit Transaction.

    This alone cuts down the time needed in this experiment to about 25%.

    So change the OldSchool connection and command loop to

    using (dbConnection = new SqlConnection("Server=.\\sqlexpress;Database=Test;Trusted_Connection=True;"))

    {

    dbConnection.Open();

    SqlTransaction trans = dbConnection.BeginTransaction();

    Console.WriteLine("{0} -- Calling the proc iteratively ...", DateTime.Now );

    using( SqlCommand importProc = new SqlCommand("INSERT INTO dbo.ImportTest (SillyIDField, MeaninglessText) VALUES (@SillyIDField, @MeaninglessText)", dbConnection))

    {

    SqlParameter sillyIDField = importProc.Parameters.AddWithValue("@SillyIDField", (int)tempTable.Rows[0][0]);

    SqlParameter meaninglessText = importProc.Parameters.AddWithValue("@MeaninglessText", (string)tempTable.Rows[0][1]);

    importProc.Transaction = trans;

    for (int rowNum = 0; rowNum < tempTable.Rows.Count; rowNum++)

    {

    sillyIDField.Value=(int)tempTable.Rows[rowNum][0];

    meaninglessText.Value=(string)tempTable.Rows[rowNum][1];

    importProc.ExecuteNonQuery();

    }

    }

    trans.Commit();

    Console.WriteLine("{0} -- Done importing the data old-school style ({1} Seconds)", DateTime.Now, elapsedSeconds);

    }

    And by the way DataRows.ItemArray is only needed if you want to get/set the Rows value as an Object[].