sql bulk insert/update

I have a c# code which reads from a flat file and inserts/update into a database. It is taking very long to process. I need help with changing the code to bulk insert/update. Code is shown below:

public static void ReadSouthCarolinaLienholders()
            {
                Logger.Instance.LogEvent("State: SC");

                //string file = @"c:\SC\liens.txt";
                string file = ConfigurationManager.AppSettings["sc_file_path"];

                List<string> list = new List<string>();
                using (StreamReader reader = new StreamReader(file))
                {
                    string line;
                    while ((line = reader.ReadLine()) != null)
                    {
                        list.Add(line); // Add to list.                        
                    }
                }

                using (SqlConnection cn = new SqlConnection(ConfigurationManager.AppSettings["sc_DSN"]))
                {
                    string sql1 = "UPDATE sc_lienholder SET is_active = 'N'";
                    string sql = "INSERT INTO sc_lienholder(lienholder_code,name,address,address2,city,state,zipcode,elt_status,is_active,insert_dtim,insert_person) VALUES(@lienholder_code,@name,@address,@address2,@city,@state,@zipcode,@elt_status,@is_active,@insert_dtim,@insert_person)";
                    string sqlUpdate = "UPDATE sc_lienholder SET name= @name, address = @address, address2 = @address2, city  = @city, state = @state, zipcode = @zipcode, elt_status = @elt_status, is_active = @is_active, update_dtim = @update_dtim, update_person=@update_person " +
                    "where lienholder_code = @lienholder_code";
                    Logger.Instance.LogEvent("Begin Updating existing records to inactive in DB");
                    using (SqlCommand cmd = new SqlCommand(sql1, cn))
                    {
                        cn.Open();
                        cmd.ExecuteNonQuery();                      
                    }
                    Logger.Instance.LogEvent("Complete Updating existing records to inactive in DB");
                    string sBackupTable = "sc_lienholder_" + DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString();
                    string sqlbackup = @"SELECT * into " + sBackupTable + " FROM sc_lienholder";
                    Logger.Instance.LogEvent("Begin backing up the table");
                    using (SqlCommand cmd = new SqlCommand(sqlbackup, cn))
                    {
                        cmd.ExecuteNonQuery();
                    }
                    Logger.Instance.LogEvent("End backing up the table");
                    // define the SqlCommmand to do the insert - use the using() approach again  
                    Logger.Instance.LogEvent("Begin updating the DB");
                    //using (SqlCommand cmd = new SqlCommand(sql, cn))
                    //{
                       // cn.Open();
                        for (int index = 0; index < list.Count; index++)
                           // try
                            {
                                string[] items = list[index].Substring(53).Split(new char[] { ',' });
                                string item = list[index].Substring(0, 51);
                                SqlCommand check_User_Name = new SqlCommand("select COUNT(*) FROM sc_lienholder WHERE lienholder_code=@lienholder_code", cn);  
                                    check_User_Name.Parameters.AddWithValue("@lienholder_code", items[9].Trim());                              
                                    int UserExist = (int)check_User_Name.ExecuteScalar();
                               
                                if (UserExist > 0)
                                {
                                    SqlCommand cmd_update = new SqlCommand(sqlUpdate, cn);
                                    try
                                    {
                                        cmd_update.Parameters.AddWithValue("@name", item.Replace("'", ""));
                                        cmd_update.Parameters.AddWithValue("@address", items[4]);
                                        cmd_update.Parameters.AddWithValue("@address2", items[5]);
                                        cmd_update.Parameters.AddWithValue("@city", items[6]);
                                        cmd_update.Parameters.AddWithValue("@state", items[7].Trim());//sf SC-275 04/02/2015
                                        cmd_update.Parameters.AddWithValue("@zipcode", items[8]);
                                        cmd_update.Parameters.AddWithValue("@lienholder_code", items[9].Trim()); //sf SC-77 03/09/2015
                                        if (items[10].Trim() == "E") //sf SC-77 03/23/2015
                                        {
                                            cmd_update.Parameters.AddWithValue("@elt_status", "Y"); //sf SC-77 03/23/2015
                                        }
                                        else
                                        {
                                            cmd_update.Parameters.AddWithValue("@elt_status", "N"); //sf SC-77 03/23/2015
                                        }
                                        cmd_update.Parameters.AddWithValue("@is_active", "Y");
                                        cmd_update.Parameters.AddWithValue("@update_dtim", DateTime.Now);
                                        cmd_update.Parameters.AddWithValue("@update_person", "Batch Job");
                                        cmd_update.ExecuteNonQuery();
                                        cmd_update.Parameters.Clear();
                                        Logger.Instance.LogEvent(String.Format("{0} lienholders updated", index));
                                    }
                                    catch (Exception ex)
                                    {
                                        cmd_update.Parameters.Clear();
                                        Logger.Instance.LogException(String.Format("Exception: {0}", ex.Message), ex);
                                        Emailer.SendMail(ex.Message, "Error in South Carolina Lien Console Application");
                                    }
                                }
                                else
                                {
                                    SqlCommand cmd = new SqlCommand(sql, cn);
                                    try
                                    {                                        
                                        cmd.Parameters.AddWithValue("@name", item.Replace("'", ""));
                                        cmd.Parameters.AddWithValue("@address", items[4]);
                                        cmd.Parameters.AddWithValue("@address2", items[5]);
                                        cmd.Parameters.AddWithValue("@city", items[6]);
                                        cmd.Parameters.AddWithValue("@state", items[7].Trim());//sf SC-275 04/02/2015
                                        cmd.Parameters.AddWithValue("@zipcode", items[8]);
                                        cmd.Parameters.AddWithValue("@lienholder_code", items[9].Trim()); //sf SC-77 03/09/2015
                                        if (items[10].Trim() == "E") //sf SC-77 03/23/2015
                                        {
                                            cmd.Parameters.AddWithValue("@elt_status", "Y"); //sf SC-77 03/23/2015
                                        }
                                        else
                                        {
                                            cmd.Parameters.AddWithValue("@elt_status", "N"); //sf SC-77 03/23/2015
                                        }
                                        cmd.Parameters.AddWithValue("@is_active", "Y");
                                        cmd.Parameters.AddWithValue("@insert_dtim", DateTime.Now);
                                        cmd.Parameters.AddWithValue("@insert_person", "Batch Job");
                                        cmd.ExecuteNonQuery();
                                        cmd.Parameters.Clear();
                                        Logger.Instance.LogEvent(String.Format("{0} lienholders inserted", index));
                                    }
                                    catch (Exception ex)
                                    {
                                        cmd.Parameters.Clear();
                                        Logger.Instance.LogException(String.Format("Exception: {0}", ex.Message), ex);
                                        Emailer.SendMail(ex.Message, "Error in South Carolina Lien Console Application");
                                    }
                                }
                               
                            }                      
                            cn.Close();
MeinhoonaaAsked:
Who is Participating?
I wear a lot of hats...

"The solutions and answers provided on Experts Exchange have been extremely helpful to me over the last few years. I wear a lot of hats - Developer, Database Administrator, Help Desk, etc., so I know a lot of things but not a lot about one thing. Experts Exchange gives me answers from people who do know a lot about one thing, in a easy to use platform." -Todd S.

Bob LearnedCommented:
What operation is taking up the most time?
0
MeinhoonaaAuthor Commented:
row by row insert/update.
0
Bob LearnedCommented:
You could use the SqlBulkCopy class to insert records, but that doesn't handle the updates.  It would be possible to do the updates the way you have, and then do the inserts with the SqlBulkCopy, which will optimize inserts.
0

Experts Exchange Solution brought to you by

Your issues matter to us.

Facing a tech roadblock? Get the help and guidance you need from experienced professionals who care. Ask your question anytime, anywhere, with no hassle.

Start your 7-day free trial
Ioannis ParaskevopoulosCommented:
There are some improvements that you could do.

First of all it seems that you load the whole file into the memory. I do not know how big your file is but this could affect performance as the file grows bigger. You could stream it row by row using 'IEnumerable' and 'yield return'. A sample code could be:

public IEnumerable<string> GetFileLines(string file)
{
    List<string> list = new List<string>();
    using (StreamReader reader = new StreamReader(file))
    {
        string line;
        while ((line = reader.ReadLine()) != null)
        {
            yield return line;                        
        }
    }
}

Open in new window


This piece of code would ensure that the memory footprint for reading the file will increase only for how big is each line (you load the file line by line and not in whole into the memory). In order to use this you just add a foreach statement:


string file = ConfigurationManager.AppSettings["sc_file_path"];
foreach(var line in GetLines(file))
{
    //process the lines here
}

Open in new window


Another thing you could is use just an sql statement instead of fetching data and deciding in your application.
For instance, you are now getting data from the server and decide in the application if you will push an Insert or Update statement. Get rid of those trips back and forth by defining a bigger statement and let the server decide. Something like:

IF(select COUNT(*) FROM sc_lienholder WHERE lienholder_code=@lienholder_code) > 0
    BEGIN
        --Update statement here
    END
ELSE
    BEGIN 
        --Insert statement here
    END

Open in new window


Of course if you feel like being more advanced you can check out the "MERGE" statement.

What i would favour the most is using an ETL (Extract Transform Load) Library in C# such as Rhino-ETL.

You may check out a video demonstrating it here. This is quite advanced but very powerful.

Giannis
0
apeterCommented:
Please make use of this tool. http://www.csvreader.com/csv_samples.php

Easy to implement.

1. Populate you model collection from your flat file
2. Create IDataReaderobject(link has details) from your model collection
3. Use SQLBulk copy to push the data into DB. Have the batch size to 100. It even have events after each batch is pushed, so that you show a status or progress to the user.
0
It's more than this solution.Get answers and train to solve all your tech problems - anytime, anywhere.Try it for free Edge Out The Competitionfor your dream job with proven skills and certifications.Get started today Stand Outas the employee with proven skills.Start learning today for free Move Your Career Forwardwith certification training in the latest technologies.Start your trial today
C#

From novice to tech pro — start learning today.

Question has a verified solution.

Are you are experiencing a similar issue? Get a personalized answer when you ask a related question.

Have a better answer? Share it in a comment.