Bulk excel file upload through catalog request
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
‎07-11-2025 09:13 AM
Hi All,
I am try to create a service catalog which will do bulk upload the data in 3 tables that are api, api component and end point table currently this is working through multi row variable set by uploading excel sheet out there as MRVS having some limit to 50 to 100 rows above it cause performance issue I am facing the same.
How I can bulk upload of excel sheet through imporset so it will follow all data validation which is working in MRVS through on submit client script and it will also follow our workflow as well is there any.
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
‎07-12-2025 09:53 AM
Any suggestion any one
- Mark as New
- Bookmark
- Subscribe
- Mute
- Subscribe to RSS Feed
- Permalink
- Report Inappropriate Content
‎07-12-2025 06:51 PM
Hi Keshav,
Here's how to create a catalog item that handles bulk Excel uploads using ImportSets while maintaining your validation and workflow requirements.
Catalog Item Implementation
1. Create the Catalog Item Structure
Catalog Item Configuration:
// sample Variables I have used: // 1. file_attachment (File Upload) - for Excel file // 2. upload_type (Choice) - API/Component/Endpoint // 3. instructions (HTML) - Upload instructions
2. Workflow for Processing Upload
Create a workflow that triggers on catalog item submission:
// Workflow: "Process Bulk Upload" // Trigger: Request Item State = Requested // Activity 1: Process Excel File var processUpload = new GlideRecord('sc_req_item'); processUpload.get(current.sys_id); // Get attachment var attachmentGR = new GlideRecord('sys_attachment'); attachmentGR.addQuery('table_name', 'sc_req_item'); attachmentGR.addQuery('table_sys_id', current.sys_id); attachmentGR.query(); if (attachmentGR.next()) { // Process the Excel file var processor = new BulkUploadProcessor(); processor.processExcelFile(attachmentGR, current); }
4. Server-side Processing Script Include
// Script Include: BulkUploadProcessor var BulkUploadProcessor = Class.create(); BulkUploadProcessor.prototype = { initialize: function() { this.importSetTable = ''; this.transformMap = ''; }, processExcelFile: function(attachment, requestItem) { try { // Determine target table based on upload type var uploadType = requestItem.variables.upload_type; this.setImportConfiguration(uploadType); // Create ImportSet run var importSetRun = this.createImportSetRun(requestItem); // Process Excel file to ImportSet this.convertExcelToImportSet(attachment, importSetRun); // Transform data with validation this.transformData(importSetRun, requestItem); } catch (e) { gs.error('Bulk upload processing error: ' + e.message); this.updateRequestStatus(requestItem, 'failed', e.message); } }, setImportConfiguration: function(uploadType) { switch(uploadType) { case 'api': this.importSetTable = 'u_api_import_set'; this.transformMap = 'u_api_transform_map'; break; case 'component': this.importSetTable = 'u_api_component_import_set'; this.transformMap = 'u_component_transform_map'; break; case 'endpoint': this.importSetTable = 'u_endpoint_import_set'; this.transformMap = 'u_endpoint_transform_map'; break; } }, createImportSetRun: function(requestItem) { var importSetRun = new GlideRecord('sys_import_set_run'); importSetRun.initialize(); importSetRun.data_source = 'Catalog Bulk Upload'; importSetRun.state = 'loaded'; importSetRun.table = this.importSetTable; importSetRun.request_item = requestItem.sys_id; // Link to catalog request return importSetRun.insert(); }, convertExcelToImportSet: function(attachment, importSetRunId) { // Read Excel file content var attachmentStream = new GlideSysAttachment(); var content = attachmentStream.read(attachment); // Parse Excel using ImportSet API var importSet = new GlideImportSet(); importSet.setTable(this.importSetTable); importSet.setImportSetRun(importSetRunId); // Process Excel rows var excelProcessor = new ExcelProcessor(); var rows = excelProcessor.parseExcelContent(content); rows.forEach(function(row) { var importRecord = new GlideRecord(this.importSetTable); importRecord.initialize(); importRecord.sys_import_set_run = importSetRunId; // Map Excel columns to ImportSet fields for (var field in row) { if (importRecord.isValidField(field)) { importRecord.setValue(field, row[field]); } } importRecord.insert(); }, this); }, transformData: function(importSetRunId, requestItem) { var transformer = new GlideImportSetTransformer(); transformer.setImportSet(importSetRunId); transformer.setTransformMap(this.transformMap); // Custom validation before transform this.validateImportData(importSetRunId, requestItem); // Transform data var result = transformer.transform(); // Update request item with results this.updateRequestWithResults(requestItem, result); }, validateImportData: function(importSetRunId, requestItem) { var importData = new GlideRecord(this.importSetTable); importData.addQuery('sys_import_set_run', importSetRunId); importData.query(); var errors = []; while (importData.next()) { // Apply your MRVS validation logic here var validationResult = this.validateRow(importData); if (!validationResult.isValid) { errors.push('Row ' + importData.sys_row_number + ': ' + validationResult.errors.join(', ')); } } if (errors.length > 0) { throw new Error('Validation failed:\n' + errors.join('\n')); } }, validateRow: function(importRecord) { var errors = []; // Example validation (adapt your MRVS validation logic) if (!importRecord.api_name || importRecord.api_name.toString().trim() === '') { errors.push('API Name is required'); } if (importRecord.version && !importRecord.version.toString().match(/^\d+\.\d+$/)) { errors.push('Version must be in format x.y'); } // Add more validation rules from your MRVS client script return { isValid: errors.length === 0, errors: errors }; }, updateRequestWithResults: function(requestItem, transformResult) { var reqItem = new GlideRecord('sc_req_item'); reqItem.get(requestItem.sys_id); var workNotes = 'Bulk upload completed:\n'; workNotes += 'Total rows processed: ' + transformResult.total + '\n'; workNotes += 'Successfully imported: ' + transformResult.success + '\n'; workNotes += 'Errors: ' + transformResult.errors + '\n'; reqItem.work_notes = workNotes; reqItem.state = (transformResult.errors > 0) ? 'work_in_progress' : 'closed_complete'; reqItem.update(); // Trigger your existing workflow if needed if (transformResult.success > 0) { gs.eventQueue('bulk.upload.completed', reqItem, transformResult); } } };
5. Transform Map with Validation
// Transform Map Script for API table (function runTransformScript(source, map, log, target) { // Get the original request item for context var requestItem = source.sys_import_set_run.request_item; // Apply validation logic (from your MRVS client script) var validator = new BulkUploadValidator(); var validationResult = validator.validateAPIRecord(source); if (!validationResult.isValid) { log.error('Validation failed for row ' + source.sys_row_number + ': ' + validationResult.errors.join(', ')); return; // Skip this record } // Map fields to target table target.api_name = source.api_name; target.description = source.description; target.version = source.version; target.status = source.status || 'draft'; // Set who submitted this via catalog target.submitted_by = requestItem.opened_by; target.submitted_via = 'catalog_bulk_upload'; // Trigger workflow after successful transform target.workflow_trigger = 'bulk_upload'; })(source, map, log, target);