在这一刻,我有一个 11 MB 的大型 csv 文件,其中包含许多插入到... excel 文件末尾的数据。所以它将是大约 10 列中的 100 万行。现在我想编写 ac# 代码,它可以更快地导入这个文件。
我做了什么?
首先,我编写了从 csv 文件中导入所有数据的代码:
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Collections;
using System.Data.OleDb;
using System.IO;
using System.Configuration;
using MySql.Data.MySqlClient;
namespace ControlDataBase
{
public partial class Import_data_mysql : Form
{
public Import_data_mysql()
{
InitializeComponent();
}
New_Tables frm2 = (New_Tables)Application.OpenForms["New_Tables"];
private DataTable ImportFile()
{
DataTable imported_data = new DataTable();
OpenFileDialog ofd = new OpenFileDialog();
ofd.Title = "Open csv file";
ofd.DefaultExt = "*.csv";
ofd.Filter = "Documents (*.csv)|*.csv";
ofd.ShowDialog();
FileInfo fi = new FileInfo(ofd.FileName);
string FileName1 = ofd.FileName;
string excel = fi.FullName;
using(StreamReader sr = new StreamReader(excel))
{
string header = sr.ReadLine();
if (string.IsNullOrEmpty(header))
{
MessageBox.Show("Not found or loaded not correct file.");
return null;
}
string[] header_columns = header.Split(';');
foreach(string header_column in header_columns)
{
imported_data.Columns.Add(header_column);
}
while (!sr.EndOfStream)
{
string line = sr.ReadLine();
if (string.IsNullOrEmpty(line)) continue;
string[] fields = line.Split(';');
DataRow imported_row = imported_data.NewRow();
for (int i = 0; i < fields.Count(); i++)
{
imported_row[i] = fields[i];
}
imported_data.Rows.Add(imported_row);
}
}
return imported_data;
}
其次,我可以将 mysql 数据库插入“order_status”表:
private void save_status_to_database(DataTable imported_data)
{
string connect = "datasource=localhost;port=3306;username=root;password=;CharSet=utf8mb4";
using (MySqlConnection conn = new MySqlConnection(connect))
{
conn.Open();
foreach (DataRow importRow in importowane_dane.Rows)
{
string query5 = @INSERT IGNORE INTO try1.order_status(ID_WORKER, ID_ORDER, ID_MODULE, ID_PROJECT,
AMOUNT_OF_PRODUCTS, BEGIN_DATE, END_DATE) SELECT workers.ID_WORKER, orders.ID_ORDER, module.ID_MODULE,
projects.ID, @AMOUNT_OF_PRODUCTS, @BEGIN_DATE, @END_DATE FROM try1.workers INNER JOIN try1.orders
INNER JOIN try1.modules INNER JOIN try1.projects WHERE workers.FNAME = @FNAME AND workers.LNAME = @LNAME
AND workers.ID_WORKER = @ID_WORKER AND orders.DESC_ORDER = @DESC_ORDER
AND orders.ORDER_NUMBER = @ORDER_NUMBER AND modules.NAME = @MODULES_NAME
AND projects.PROJECT_NAME = @PROJECT_NAME"
MySqlCommand cmd = new MySqlCommand(query5, conn);
cmd.Parameters.AddWithValue("@ID_WORKER", importRow["ID_WORKER"]);
cmd.Parameters.AddWithValue("@FNAME", importRow["FNAME"]);
cmd.Parameters.AddWithValue("@LNAME", importRow["LNAME"]);
cmd.Parameters.AddWithValue("@DESC_ORDER", importRow["DESC_ORDER"]);
cmd.Parameters.AddWithValue("@ORDER_NUMBER", importRow["ORDER_NUMBER"]);
cmd.Parameters.AddWithValue("@MODULES_NAME", importRow["NAME"]);
cmd.Parameters.AddWithValue("@PROJECT_NAME", importRow["PROJECT_NAME"]);
cmd.Parameters.AddWithValue("@AMOUNT_OF_PRODUCTS", importRow["AMOUNT_OF_PRODUCTS"]);
cmd.Parameters.AddWithValue("@BEGIN_DATE", importRow["BEGIN_DATE"]);
cmd.Parameters.AddWithValue("@END_DATE", importRow["END_DATE"]);
cmd.ExecuteNonQuery();
}
conn.Close();
}
MessageBox.Show("Imported to database.");
}
但是当我插入 11 MB 的大文件时,它会导入很多时间,大约需要 10 分钟。在编译这段代码的一半时间里,进程内存大约有...... 5 GB!
现在我想知道如何加快从大型 csv 文件中导入数据的速度。MysqlBulkLoader会足够吗?也许应该以其他方式重写导入代码?有任何想法吗?感谢您的帮助。
我试过什么?
我试过在 x64 模式下运行并添加<runtime> <gcAllowVeryLargeObjects enabled="true" /> </runtime>App.config。但它还没有帮助。