Handle huge datasets from 3rd party Integrations
ServiceNows OOB challenges around receiving large amounts of data (read response packs greater than 32 Mb for data type string). To go around the challenge, I’ve solved it by saving the reply as an attachment (data type: String)
var r = new sn_ws.RESTMessageV2("xxxx", "yyyy");
r.setBasicAuth(ssss, rrrr);
var response = r.execute();
response.waitForResponse(200);
//Write the huge attachment from response body
var attachSysId = this.attachment.write(<GlideRecord>, <filnamn>, 'plain/text', response.getBody());
Now we have the answer as an attachment so now we need to make that little “magic” and stream back the answer into smaller portions.
var arrayNumber = this.getStreamAndParseArray(attachSysId);
getStreamAndParseArray: function(attachSysId) {
var inputStream = this.attachment.getContentStream(attachSysId);
var reader = new GlideTextReader(inputStream);
var ln = ' ';
this["responseArray1"] = [];
var arrayNumber = 1;
var threadhold = 15000;
while ((ln = reader.readLine()) != null) {
if (this.loopIndex == (threadhold * arrayNumber)) {
/*If the response from XXXX are to big to handle the normal ServiceNow way we need to split the huge attachment into smaller chunks. In this case I have choosen the number of 15000 records per String Array*/
arrayNumber++;
this["responseArray"+arrayNumber] = [];
}
this["responseArray"+arrayNumber].push(ln);
this.loopIndex++;
}
return arrayNumber;
}
After the job to go through the entire stream divided into xx array (responseArray) I will load a data source (data source) that then does the job itself and it runs with speed lightning. The solution I chose here is to create a CSV file that is simple and very fast, non existent “overhead” as ex exists for both SOAP and REST protocols. Each Array becomes a file that is saved in the selected data source which is then loaded and transformed, etc. until all Array is complete.
doLoadAndTransform: function(arrayNumber, dataSource, filename, attachSysId, firstTransformName) {
//Parse and create a new CSV file, more easy for ServiceNow to handle
for (var parsedArrayCount = 1; parsedArrayCount <= arrayNumber; parsedArrayCount++) {
gs.log('XXXX parsed # array: ' + this["responseArray" + parsedArrayCount].length);
var stringJson = '{"import": [' + this["responseArray" + parsedArrayCount] + ']}';
var parsedJson = JSON.parse(stringJson);
var csv = this.createCSVObject(parsedJson['import']);
//Attach the newly created CSV file to our Data Source
attachSysId = this.attachment.write(dataSource, filename, 'plain/text; charset=utf-8', csv);
//Initiate DataSourceLoader and call the load function to do the actual load and tranforms
var ret = this.dt.load(dataSource.getValue('sys_id'), firstTransformName, false);
gs.log('XXXX DataSource loaded for array #' + parsedArrayCount + ' of total #' + arrayNumber);
//Delete the current Array file/attachment from the Data Source record
gs.log('XXXX try to delete loaded attachment');
this.deleteAttachments(filename, dataSource.getValue('sys_id'));
}
}
An example of creating a CSV variable from a JSON
createCSVObject: function(json) {
var fields = Object.keys(json[0]);
var replacer = function (key, value) {
return value === null ? '' : value;
};
var csv = json.map(function (row) {
return fields.map(function (fieldName) {
return JSON.stringify(row[fieldName], replacer);
}).join(';');
});
csv.unshift(fields.join(';'));
csv = csv.join('\r\n');
return csv;
}
Questions or comments, please don’t hesitate!
Leave a Reply
Want to join the discussion?Feel free to contribute!