Suggestions-size limit and best practices to be considered to send table data via Scripted REST API
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
06-05-2023 11:17 AM
Hi,
I have created a Scripted REST API endpoint and would like to send a custom table data (specified fields only) of size 50k records - ~7MB (1.5 minutes) when I checked via Postman. Below's the script I'm using in the Scripted REST Resource - GET
Found this link and configured the streaming part and I think the first method I used fits in with the Object Serialization method, but I could be totally wrong here. However, there is a difference in the size that we see in postman as seen in the screenshots below.
1. Object Serialization script and result in Postman
(function process( /*RESTAPIRequest*/ request, /*RESTAPIResponse*/ response) {
var Scope = new GlideRecord('<table_name>');
//Scope.addEncodedQuery('sys_updated_onONToday@javascript:gs.beginningOfToday()@javascript:gs.endOfToday()');
//Scope.setLimit(100);
Scope.query();
var results = [];
while (Scope.next()) {
results.push({
userID: Scope.getDisplayValue('u_user.u_gid'),
costCenter: Scope.getDisplayValue('u_cost_center'),
costCenterOfParent: Scope.getDisplayValue('u_cost_center.parent'),
location: Scope.getDisplayValue('u_location'),
locationofParent: Scope.getDisplayValue('u_location.parent'),
message: 'success'
});
}
response.setBody(results);
})(request, response);
2. Streaming method and result in Postman
/**
* Sample Scripted REST Resource that returns custom JSON objects with properties from Incident GlideRecords
* This sample uses ServiceNow JavaScript API to query incident records
* and then iterates over those records to build and stream a custom JSON object that
* includes some values from the incidents
*/
(function runOperation(/*RESTServiceRequest*/ request, /*RESTServiceResult*/ response) {
var writer = response.getStreamWriter(),
hdrs = {},
table = '<custom_table_name>',
//record_limit = 100,
gr = new GlideRecord(table);
hdrs['Content-Type'] = 'application/json';
response.setStatus(200);
response.setHeaders(hdrs);
//gr.setLimit(record_limit);
gr.query();
// start building response object
writer.writeString("{\"results\":[");
// iterate over incident records and build JSON representations to be streamed out.
while (gr.next()) {
var ScopeObj = {};
ScopeObj.gid = gr.getDisplayValue('u_user.u_gid') + '';
ScopeObj.costCenter = gr.getDisplayValue('u_cost_center') + '';
ScopeObj.costCenterParent = gr.getDisplayValue('u_cost_center.parent') + '';
ScopeObj.location = gr.getDisplayValue('u_location') + '';
ScopeObj.location = gr.getDisplayValue('u_location.parent') + '';
writer.writeString(global.JSON.stringify(ScopeObj));
if (gr.hasNext()) {
writer.writeString(",");
}
}
// close the response object
writer.writeString("]}");
})(request, response);
I have checked this from docs to see if I can find something on the size limit but these properties are not present in the client instance.
Please suggest which of these or any other methods are recommended to have this on a daily basis so the data can be extracted by the 3rd party according to ServiceNow best practices.
It's not mandatory that we send it in JSON, it can be sent as JSON zip or csv as well.
Could you please help with any reference links/docs where we can find the limit/best practice?
Best Regards,
Pratyusha