我正在使用ajax post将我的列标题和数据从可手持设备发送回ashx处理程序。
$.ajax({
type: 'POST',
url: "Scripts/SaveExcelData.ashx",
contentType: "application/json; charset=utf-8",
data: JSON.stringify({"columns": hot.getColHeader(), "rows": hot.getData()}),
success: function (data) {}
});
目前,我正在使用以下方法来反序列化请求,但还未能成功地将行转换为DataBaseRow类对象的数组。
var jsonString = String.Empty;
context.Request.InputStream.Position = 0;
using (var inputStream = new StreamReader(context.Request.InputStream))
{
jsonString = inputStream.ReadToEnd();
var results = JsonConvert.DeserializeObject<dynamic>(jsonString);
var columns = results.columns;
var rows = results.rows;
//use columns here to convert rows into DataBaseRow class
}
列看起来像:{["Col1","Col2","Col3"]}
行看起来像:{[["Val1","Val2","Val3"],["Val1","Val2","Val3"],["Val1","Val2","Val3"]]}
我该怎么做?
更新
我发现我实际上可以手动循环遍历数组值,并将它们写入DataBaseRow类的新实例中,而不是尝试将动态类转换为DataBaseRow类。
using (DBEntities edmx = new DBEntities())
{
foreach (var row in rows)
{
DataBaseRow dbr = new DataBaseRow();
edmx.DataBaseRow.Add(dbr);
dbr.LoadedTime = DateTime.Now;
for (int i = 0; i < row.Count; i++)
{
string colval = row[i].ToString();
string colname = columns[i].ToString();
switch (colname)
{
case "Col1":
dbr.DBCol1 = colval;
break;
case "Col2":
dbr.DBCol2 = colval;
break;
case "Col3":
dbr.DBCol3 = colval;
break;
}
}
}
edmx.SaveChanges();
}
这是有效的,但速度非常慢(请参阅评论了解时间安排)。有没有更快/更好的方法来处理这些数据?(如果重要的话-我实际上有14列正在交换机中映射)
因此,我的问题的技术答案可以在我添加的更新中找到(只需将动态对象引用为数组,不要试图转换它们)。
然而,实体框架似乎在处理保存大型数据集方面非常糟糕。可以通过将保存分组为块并为每个chunck重新创建上下文来加快速度。https://stackoverflow.com/a/5942176/266592
我最终重写了它,将值插入到DataTable
中,然后使用SqlBulkCopy
将记录保存到数据库中。
var jsonString = String.Empty;
context.Request.InputStream.Position = 0;
using (var inputStream = new StreamReader(context.Request.InputStream))
{
jsonString = inputStream.ReadToEnd();
var results = JsonConvert.DeserializeObject<dynamic>(jsonString);
var columns = results.columns;
var rows = results.rows;
var dt = new DataTable();
for (int i = 0; i < columns.Count; i++)
{
dt.Columns.Add(columns[i].ToString());
}
foreach (var row in rows)
{
var datarow = dt.NewRow();
for (int i = 0; i < row.Count; i++)
{
datarow[i] = row[i];
}
dt.Rows.Add(datarow);
}
using (var connection = new SqlConnection(ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.DestinationTableName = "TABLENAME";
sqlBulkCopy.BatchSize = 100000;
sqlBulkCopy.BulkCopyTimeout = 0;
foreach (DataColumn col in dt.Columns)
{
sqlBulkCopy.ColumnMappings.Add(col.ColumnName, col.ColumnName);
}
sqlBulkCopy.WriteToServer(dt);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
}