I am using an http handler (.ashx) to handle file uploads. Once a file is selected, "uploadFile()" is called to save it to a temporary location. The file is then read, parsed and stored in a (C#) data table to be used to populate a jQuery data table.
It is fine up to the point it stores, opens and read the file, using StreamReader. However it takes too long if the file is large.
I searched and it seems the problem is with storing each line parsed in the data table. In response to another user having similar issue it was recommended to read each line and store it in DB (he needed to store in DB). But I can;t incorporate that suggestion to suit my needs, which is to store in datatable and use it to populate a table.
Is there any way to speed this up? I have to display the parsed content in a jQuery data table.
This is what I have currently ("FilesUpload" is the file input control's ID, with an "onChange" event)
JS function:
var logFile = [];
function onChange(oFile) {
Array.prototype.forEach.call(oFile.files, function (file) {
logFile.push({
"id": 0,
"fn": file.name,
"fl": file.size,
"fp": '',
"ct": file.type
});
uploadFile();
});
}
function uploadFile() {debugger
...
var uploadingfiles = $("#FilesUpload").get(0);
var uploadedfiles = uploadingfiles.files;
var formdata = new FormData();
for (var i = 0; i < uploadedfiles.length; i++) {
formdata.append("file", $("#MultipleFilesUpload").prop("files")[i]);
}
formdata.append("ToUpload", JSON.stringify(logFile));
formdata.append("UploadFolder", currUploadFolder);
$.ajax({
url: '<%= ResolveUrl("../UploadHandler.ashx") %>',
type: 'post',
data: formdata,
contentType: false,
cache: false,
dataType: 'script',
responseType: "json",
processData: false
}).done(function (result) {debugger
var jResult = JSON.parse(result);
...
$("#lblSelectedFile").html(jResult.FileName);
}).fail(function (jqXHR, textStatus, errorThrown) {debugger
...
});
}
HTTP Handler (UploadHandler.ashx):
public void ProcessRequest(HttpContext context)
{
var MessageData = new object();
if (context.Request.Files.Count > 0)
{
// Do something, save file in folder
....
// Process the file
JSONresult = ProcessLogFile_BufferedStream(sUploadFolder, fn);
}
else
{
}
....
}
I think this is taking way too long, or data table gets way too big
private string ProcessLogFile_BufferedStream(string Folder, string FileName)
{
string JSONresult = string.Empty;
if (string.IsNullOrEmpty(Folder) || string.IsNullOrEmpty(FileName))
return JSONresult;
DataTable dtLog = new DataTable();
dtLog.Columns.Add("ReqTimestamp");
dtLog.Columns.Add("ReqDataLength");
...
dtIDCLog.Columns.Add("RespTimestamp");
dtIDCLog.AcceptChanges();
Regex reAsciiPatern = new Regex(@"[^\u0000-\u007F]+");
Regex ConParts = new Regex(@"^(.*?)\|(.*?)\|(.*?)\|(.*?)\|(.*?)\|(.*?)$");
string sLine;
string sTimestamp;
int iLineNo = 0;
using (FileStream fs = File.Open(Path.Combine(Folder, FileName), FileMode.Open, FileAccess.Read, FileShare.Read))
using (BufferedStream bs = new BufferedStream(fs))
using (StreamReader sr = new StreamReader(bs))
{
while ((sLine = sr.ReadLine()) != null)
{
if (!string.IsNullOrEmpty(sLine))
{
sLine = reAsciiPatern.Replace(sLine, ""); // remove non-ASCII chars
DataRow drNew = dtIDCLog.NewRow();
Match match = ConParts.Match(sLine);
if (match.Success)
{
int i = match.Groups.Count;
// Request portion
sTimestamp = match.Groups[2].Value;
drNew["ReqTimestamp"] = sTimestamp;
string sReqLen = match.Groups[3].Value.Split(':')[0];
string sReq = match.Groups[3].Value.Split(':')[1].Replace("{", "").Replace("}", "");
drNew["ReqDataLength"] = sReqLen;
...
...
// Response portion
sTimestamp = match.Groups[4].Value;
drNew["RespTimestamp"] = sTimestamp;
...
...
dtIDCLog.Rows.Add(drNew);
iLineNo++;
}
else
{
drNew["Success"] = false;
dtIDCLog.Rows.Add(drNew);
break;
// Error, clear data table, display error
}
}
}
}
JSONresult = JsonConvert.SerializeObject(dtIDCLog);
return JSONresult;
}