Batch processing CSV upload
Batch processing CSV upload
kthorngren
Posts: 21,330Questions: 26Answers: 4,951
in Editor
I'm using the CSV Upload Example but need to batch process the uploaded rows. Otherwise I get server timeouts. I'm using the preSubmit
event to break the ajax request up into 500 row chunks to send to the server. The below code works but not sure if its the best way.
Let me know if there is a better event to use or a more efficient way to accomplish the batch processing
editor
//.on( 'postCreate postRemove', function () {
.on( 'postRemove', function () {
// After create or edit, a number of other rows might have been effected -
// so we need to reload the table, keeping the paging in the current position
table.ajax.reload( null, false );
} )
.on( 'initCreate', function () {
// Enable order for create
editor.field( 'pkid' ).disable();
} )
.on( 'initEdit', function () {
// Disable for edit (re-ordering is performed by click and drag)
editor.field( 'pkid' ).disable();
} )
.on( 'preSubmit', function (e, data, action) {
if (action === 'create') {
var maxRows = 500; // Max to submit at a time
var fudgeFactor = 50; // Fudge factor for determining if batch processing is needed
var createData = data.data;
var rows = Object.keys(createData);
var totalRows = rows.length;
// Uploaded rows + fudge factor is less than max rows per request
// Process normally
if (rows.length <= (maxRows + fudgeFactor)) {
$( document ).one().ajaxStop(function() {
// Reload table after upload
$(document).off().ajaxStop(); // For some reason $( document ).one().ajaxStop() doesn't work
table.ajax.reload( null, false );
$.unblockUI();
});
return true;
}
// Block UI while batch processing
$.blockUI({ message: $('#msg') });
$("#msg").html("Batch uploading files... Please wait (monitor using console)");
$( document ).one().ajaxStop(function() {
// Reload table after all batches are complete
$(document).off().ajaxStop();
console.log('Batch upload complete - reloading table')
table.ajax.reload( null, false );
$.unblockUI();
});
// Batch process a group of rows
while (rows.length > 0) {
var batch = {};
var batchKeys = rows.splice(0, maxRows);
//console.log(batchKeys.length)
// Move the rows to be processed into new object
// TO DO: determine if there is a more efficient way to do this
for (i=0; i<batchKeys.length; i++) {
key = batchKeys[i];
batch[key] = createData[key]
delete createData[key];
}
// Send promises ajax request for batch
new Promise( function ( resolve ) {
$.ajax( {
url: '/api/v1/locations/create',
type: 'POST',
data: {'data': batch},
success: function ( json ) {
resolve( json.allowed );
console.log("Batch uploading files....");
}
} );
} );
}
// Complete with batches
// No data left but return true to close modal.
return true;
}
} );
Kevin
This question has an accepted answers - jump to answer
This discussion has been closed.
Answers
I found that the above code works ok for a small number of records. The problem is that it doesn't control the ajax requests so they are sent simultaneously. In my environment this causes timeouts. I changed the way the promises are handled so the ajax requests run sequentially. Here is the code:
Changed the
preSubmit
to use the above promises solution and to eliminate the Datatables create from sending any data.Using this solution allows for using the code and modals provided in the CSV Upload example but intercepts the Editor AJAX call to batch upload via promises.
Kevin
Hi Kevin,
That looks like a nice solution - batch uploading in sequence with promises is probably about as good an option as it is going to get there at the moment.
The only other option I can think of just now is to use
preSubmit
to trigger another Editor instance that will run is sequence, taking the next lot off a queue wheneversubmitComplete
triggers. But that's effectively what you are doing with your promises anyway, just not using a second Editor instance.Allan
Thats a good idea. I'll have to try it out. Thanks!
Kevin