Assistance with Importing CSV with headers on row 2
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
09-14-2022 08:23 AM
We are having some issues with processing attachments for an integration and could do with some suggestions:
Is there any way to set a csv data import to read from row 2?
Or, is there any way to strip the first row from a csv file emailed into Servicenow?
We have an integration from peoplesoft where data was emailed in with attachment, in xlsx format, that had the headers on row 2, this worked fine. The scripts imported the data into the staging table, ran the transform map, and pushed the data to the target table.
We had issues though where this file is generated in protected mode from the source and our email server will not accept it (due to security reasons). At the source they have some data in row 1 and this cannot be changed to get the headers on row 1.
Weve tried using the same data source which was working for xlsx to csv, which had header row 2 specified, but this doesnt seem to work. The data doent make it into the staging table.
(The headers were the exact same, just saved as csv for the test)
Or maybe can we strip that first row or maybe copy the csv and generate a new csv with the headers on row 1?
The code below was taken from a community post on spreadsheet imports.
Code below
Inbound action
(function runAction(/*GlideRecord*/ current, /*GlideRecord*/ event, /*EmailWrapper*/ email, /*ScopedEmailLogger*/ logger, /*EmailClassifier*/ classifier) {
//Set the Data Source to be used
var dataSourceID = "placeholder 1";
var transformMapIDs = "placeholder 2";
//Query the attachments for this data source and delete them
var gr = new GlideRecord('sys_attachment');
gr.addQuery('table_name','sys_data_source');
gr.addQuery('table_sys_id',dataSourceID);
gr.query();
gr.deleteMultiple();
//Copy attachment from the Email to the Data Source
GlideSysAttachment.copy('sys_email',sys_email.sys_id,'sys_data_source',dataSourceID);
/*
* Schedule Load of Attachment
*
* This inbound email action will generate an import data source, however the attachment isn't copied to the data source until
* after the insert of the record. Scheduling the import to happen 30 seconds later so that attachment has time to be copied.
*/
new global.EmailFileImportUtils().scheduleImport(dataSourceID, transformMapIDs);
})(current, event, email, logger, classifier);
EmailFileImportUtils
var EmailFileImportUtils = Class.create();
EmailFileImportUtils.prototype = {
initialize: function() {
},
scheduleImport: function(dataSourceID, transformMapIDs) {
/*
* Create scheduled job to process import
* The inbound email action will generate an import data source, however the attachment isn't copied to the data source until
* after the insert of the record. The code below will create a scheduled job to process the import 30 seconds later
* so that attachment has time to be copied to the data source from the email.
*/
var schRec = new GlideRecord("sys_trigger");
schRec.name = "Load Data Source: " + dataSourceID;
schRec.trigger_type = 0; // Run Once
schRec.script = "new global.EmailFileImportUtils().loadImportSet('" + dataSourceID + "', '" + transformMapIDs + "')";
var nextAction = new GlideDateTime();
nextAction.addSeconds(30); // 30 seconds should be enough time however this can be changed.
schRec.next_action = nextAction;
schRec.insert();
},
loadImportSet: function(dataSourceID, transformMapIDs) {
// Get Datasource Record
var dataSource = new GlideRecord("sys_data_source");
dataSource.get(dataSourceID);
// Process data source file
var loader = new GlideImportSetLoader();
var importSetRec = loader.getImportSetGr(dataSource);
var ranload = loader.loadImportSetTable(importSetRec, dataSource);
importSetRec.state = "loaded";
importSetRec.update();
// Transform import set
var transformWorker = new GlideImportSetTransformerWorker(importSetRec.sys_id, transformMapIDs);
transformWorker.setBackground(true);
transformWorker.start();
},
type: 'EmailFileImportUtils'
};
Thanks
- Labels:
-
Integrations
-
Scripting and Coding
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
09-16-2022 10:55 AM
I recognize and use this script myself. There is a missing component in the loadImportSet function.
loadImportSet: function(dataSourceID, transformMapIDs) {
// Get Datasource Record
var dataSource = new GlideRecord("sys_data_source");
dataSource.get(dataSourceID);
// If CSV and header isn't on row 1, recreate attachment with empty rows removed, and delete original
if (dataSource.getValue("format") == "CSV" && dataSource.getValue("header_row") > 1) {
var attachmentRec = new GlideRecord("sys_attachment");
attachmentRec.addQuery("table_sys_id", dataSource.getValue("sys_id"));
attachmentRec.query();
if (attachmentRec.next()) {
var oldAttachmentID = attachmentRec.getValue("sys_id");
var inputStream = new GlideSysAttachment().getContentStream(oldAttachmentID);
var textReader = new GlideTextReader(inputStream);
var ln = " ";
var newLine = "";
var lineCounter = 0;
var headerRow = parseInt(dataSource.getValue("header_row"));
while((ln = textReader.readLine()) != null) {
lineCounter += 1;
if (lineCounter < headerRow) {
continue;
}
if (ln.length > 1) {
newLine += ln + "\n";
}
}
new GlideSysAttachment().write(dataSource, 'Changed ' + attachmentRec.getValue('file_name'), "text/csv", newLine);
//Delete original email attachment so that the new one is processed. If two remain, API may get confused on which one to process.
attachmentRec.deleteRecord();
}
}
// Process data source file
var loader = new GlideImportSetLoader();
var importSetRec = loader.getImportSetGr(dataSource);
var ranload = loader.loadImportSetTable(importSetRec, dataSource);
importSetRec.state = "loaded";
importSetRec.update();
// Transform import set
var transformWorker = new GlideImportSetTransformerWorker(importSetRec.sys_id, transformMapIDs);
transformWorker.setBackground(true);
transformWorker.start();
},