Batch processing CSV upload

Batch processing CSV upload

kthorngrenkthorngren Posts: 9,424Questions: 25Answers: 2,167

I'm using the CSV Upload Example but need to batch process the uploaded rows. Otherwise I get server timeouts. I'm using the preSubmit event to break the ajax request up into 500 row chunks to send to the server. The below code works but not sure if its the best way.

Let me know if there is a better event to use or a more efficient way to accomplish the batch processing

    editor
        //.on( 'postCreate postRemove', function () {
        .on( 'postRemove', function () {
            // After create or edit, a number of other rows might have been effected -
            // so we need to reload the table, keeping the paging in the current position
            table.ajax.reload( null, false );
        } )
        .on( 'initCreate', function () {
            // Enable order for create
            editor.field( 'pkid' ).disable();
        } )
        .on( 'initEdit', function () {
            // Disable for edit (re-ordering is performed by click and drag)
            editor.field( 'pkid' ).disable();
        } )
        .on( 'preSubmit', function (e, data, action) {

            if (action === 'create') {
                var maxRows = 500;  // Max to submit at a time
                var fudgeFactor = 50; // Fudge factor for determining if batch processing is needed
                var createData = data.data;
                var rows = Object.keys(createData);
                var totalRows = rows.length;

                // Uploaded rows + fudge factor is less than max rows per request
                // Process normally
                if (rows.length <= (maxRows + fudgeFactor)) {
                    $( document ).one().ajaxStop(function() {
                        // Reload table after upload
                        $(document).off().ajaxStop();  // For some reason $( document ).one().ajaxStop() doesn't work
                        table.ajax.reload( null, false );
                        $.unblockUI();
                    });
                    return true;
                }

                // Block UI while batch processing
                $.blockUI({ message: $('#msg') });
                $("#msg").html("Batch uploading files... Please wait (monitor using console)");

                $( document ).one().ajaxStop(function() {

                    // Reload table after all batches are complete
                    $(document).off().ajaxStop();
                    console.log('Batch upload complete - reloading table')
                    table.ajax.reload( null, false );
                    $.unblockUI();
                });

                // Batch process a group of rows
                while (rows.length > 0) {
                    
                    var batch = {};
                    var batchKeys = rows.splice(0, maxRows);
                    //console.log(batchKeys.length)

                    // Move the rows to be processed into new object
                    // TO DO: determine if there is a more efficient way to do this
                    for (i=0; i<batchKeys.length; i++) {
                        key = batchKeys[i];
                        batch[key] = createData[key]
                        delete createData[key];
                    }

                    // Send promises ajax request for batch
                    new Promise( function ( resolve ) {
                        $.ajax( {
                            url: '/api/v1/locations/create',
                            type: 'POST',
                            data: {'data': batch},
                            success: function ( json ) {
                                resolve( json.allowed );
                                console.log("Batch uploading files....");
                            }
                        } );
                    } );

                }

                // Complete with batches
                // No data left but return true to close modal.
                return true;

            }
        } );

Kevin

This question has an accepted answers - jump to answer

Answers

  • kthorngrenkthorngren Posts: 9,424Questions: 25Answers: 2,167
    edited July 1

    I found that the above code works ok for a small number of records. The problem is that it doesn't control the ajax requests so they are sent simultaneously. In my environment this causes timeouts. I changed the way the promises are handled so the ajax requests run sequentially. Here is the code:

    var promises = [Promise.resolve()];
    
    /**********************************************
        Save data to DB functions
    **********************************************/
    
    // Promise ajax function to post data to DB.
    // Parameters:
    // data: CSV data to upload to server
    // count: beginning file number for batch
    // numFiles: Ending file number for batch
    // totalRecords:  Total number of files uploaded
    // moreFiles: boolean indicating if this is the last set of files
    function postDbData(data, count, numFiles, totalRecords, moreFiles) {
    
        // If no more files then use ajaxstop to relaod the Datatable.
        if ( !moreFiles ) {
            $( document ).one().ajaxStop(function() {
    
                // Reload table after all batches are complete
                $(document).off().ajaxStop();
                console.log('Batch upload complete - reloading table')
                table = $('#main-table').DataTable();
                table.ajax.reload( null, false );
                $.unblockUI();
    
                if (errors) {
                    alert('The following errors occurred:\n - ' + errors.join('\n - '));
                }
    
            });
        }
        console.log('postDbData - Uploading records ' + count + ' to ' + numFiles + ' of ' + totalRecords +' records')
        addMsgDiv();
        $("#msg").html('Uploading records ' + count + ' to ' + numFiles + ' of ' + totalRecords +' records', false);
        $.blockUI({ message: $('#msg') });   // Block UI and display status
        return new Promise((resolve, reject) => {
            $.ajax({
                type: "POST",
                url: "/api/v1/locations/create",
                data: {data: data},
                success: function(json) {
                    $.unblockUI();  // Unblock UI
                    resolve();
                },
                error: function() {
                    data = [];
                    $.unblockUI();
                    errors.push('Error uploading ' + count + ' to ' + numFiles + ' - Check the server log')
                    resolve();  // reject() causes the pushPromise function to fail with `undefined`
                }  
            });      
        });
    }
    
    // Prepare promises array of ajax calls.
    function pushPromise(data, count, numFiles, totalRecords, moreFiles) {
        promises.push(promises.pop().then(function(){
        return postDbData(data, count, numFiles, totalRecords, moreFiles)}));
    }
    

    Changed the preSubmit to use the above promises solution and to eliminate the Datatables create from sending any data.

            .on( 'preSubmit', function (e, data, action) {
    
                if (action === 'create') {
                    var maxRows = 300;  // Max to submit at a time
                    var createData = data.data;
                    var rows = Object.keys(createData);
                    var totalRows = rows.length;
    
                    // Block UI while batch processing
                    console.log("Batch uploading files... Please wait");
                    addMsgDiv();
                    $("#msg").html("Batch uploading files... Please wait");
                    $.blockUI({ message: $('#msg') });
    
                    var count = 1;
                    var numFiles = 0;
    
                    // Batch process a group of rows
                    while (rows.length > 0) {
                        console.log('loop')
                        var batch = {};
                        var batchKeys = rows.splice(0, maxRows);
                        numFiles += batchKeys.length;
                        
                        var moreFiles = true;
                        if (batchKeys.length < maxRows) {
                            moreFiles = false;
                        }
    
                        // Move the rows to be processed into new object
                        // TO DO: determine if there is a more efficient way to do this
                        for (i=0; i<batchKeys.length; i++) {
                            key = batchKeys[i];
                            batch[key] = createData[key]
                            delete createData[key];
                        }
                        
                        pushPromise(batch, count, numFiles, totalRows, moreFiles);
                        
                        count = numFiles + 1;
    
                    }
    
                    // Complete with batches
                    // No data left but return true to close modal.
                    return true;
    
                }
            } );
    

    Using this solution allows for using the code and modals provided in the CSV Upload example but intercepts the Editor AJAX call to batch upload via promises.

    Kevin

  • allanallan Posts: 51,954Questions: 1Answers: 7,882 Site admin
    Answer ✓

    Hi Kevin,

    That looks like a nice solution - batch uploading in sequence with promises is probably about as good an option as it is going to get there at the moment.

    The only other option I can think of just now is to use preSubmit to trigger another Editor instance that will run is sequence, taking the next lot off a queue whenever submitComplete triggers. But that's effectively what you are doing with your promises anyway, just not using a second Editor instance.

    Allan

  • kthorngrenkthorngren Posts: 9,424Questions: 25Answers: 2,167

    Thats a good idea. I'll have to try it out. Thanks!

    Kevin

Sign In or Register to comment.