Skip to content

Instantly share code, notes, and snippets.

@Sdy603
Created September 30, 2025 12:20
Show Gist options
  • Select an option

  • Save Sdy603/6d3351b86f5d727709d9357caf58e45d to your computer and use it in GitHub Desktop.

Select an option

Save Sdy603/6d3351b86f5d727709d9357caf58e45d to your computer and use it in GitHub Desktop.
/**
* deployment_backfill_run.js
*
* Reads deployments from a CSV file and posts them to the DX Deployments API.
* Supports --dry-run to preview the first 10 records only (no API calls).
*/
const fs = require('fs');
const path = require('path');
const csv = require('csv-parser');
const axios = require('axios');
// === CONFIG ===
const args = process.argv.slice(2);
if (!args[0]) {
console.error("❌ No CSV file provided.\nUsage: node deployment_backfill_run.js <path-to-csv> [--dry-run]");
process.exit(1);
}
const DRY_RUN = args.includes('--dry-run');
const FILE_PATH = path.resolve(process.cwd(), args[0]);
const API_URL = 'https://tesco.getdx.net/api/deployments.create';
// Token via env or hard-code
const BEARER_TOKEN = process.env.DX_API_TOKEN || '';
const DELAY_MS = 100; // 100ms (~10 rps)
const FAIL_LOG_PATH = path.resolve(__dirname, 'deployment_failures_log.csv');
const CONNECTION_LOG_PATH = path.resolve(__dirname, 'connection_errors.log');
// === Helpers ===
function sleep(ms) { return new Promise(resolve => setTimeout(resolve, ms)); }
function isEmpty(val) { return val === undefined || val === null || String(val).trim() === ''; }
function formatISO(dateString) {
if (isEmpty(dateString)) return undefined;
const s = String(dateString).trim();
return s.includes('T') ? s : s.replace(' ', 'T');
}
function parseBoolean(val) {
if (isEmpty(val)) return undefined;
const s = String(val).trim().toLowerCase();
if (['true','t','1','yes','y'].includes(s)) return true;
if (['false','f','0','no','n'].includes(s)) return false;
return undefined;
}
function normalizedEnv(row) {
if (!('environment' in row)) return undefined;
const s = String(row.environment || '').trim();
return s === '' ? undefined : s;
}
function hasSlash(repo) { return typeof repo === 'string' && repo.includes('/'); }
function validSha(sha) {
if (typeof sha !== 'string') return false;
const s = sha.trim();
return s.length >= 7 && s.length <= 40;
}
function csvEscape(val) {
if (val === null || val === undefined) return '';
const s = String(val);
if (s.includes('"') || s.includes(',') || s.includes('\n')) {
return `"${s.replace(/"/g, '""')}"`;
}
return s;
}
function ensureFailureLogHeader() {
if (!fs.existsSync(FAIL_LOG_PATH)) {
fs.writeFileSync(
FAIL_LOG_PATH,
[
'row_index',
'reference_id',
'service',
'deployed_at',
'repository',
'commit_sha',
'environment',
'success',
'source_url',
'status_code',
'result',
'error'
].join(',') + '\n',
'utf8'
);
}
}
function appendFailureLog(rowIndex, attempted, statusCode, result, errorMsg) {
ensureFailureLogHeader();
const line = [
rowIndex,
csvEscape(attempted.reference_id),
csvEscape(attempted.service),
csvEscape(attempted.deployed_at),
csvEscape(attempted.repository),
csvEscape(attempted.commit_sha),
csvEscape(attempted.environment),
csvEscape(attempted.success),
csvEscape(attempted.source_url),
csvEscape(statusCode),
csvEscape(result),
csvEscape(errorMsg)
].join(',') + '\n';
fs.appendFileSync(FAIL_LOG_PATH, line, 'utf8');
}
function logConnectionError(index, payload, errMsg) {
const line = `[${new Date().toISOString()}] Row ${index} Ref:${payload.reference_id} Service:${payload.service} Error:${errMsg}\n`;
fs.appendFileSync(CONNECTION_LOG_PATH, line, 'utf8');
}
// Known fields
const KNOWN_FIELDS = new Set([
'reference_id',
'service',
'deployed_at',
'repository',
'commit_sha',
'source_url',
'environment',
'success'
]);
function buildPayload(row) {
const payload = {
reference_id: isEmpty(row.reference_id) ? undefined : String(row.reference_id).trim(),
service: isEmpty(row.service) ? undefined : String(row.service).trim(),
deployed_at: formatISO(row.deployed_at),
repository: isEmpty(row.repository) ? undefined : String(row.repository).trim(),
commit_sha: isEmpty(row.commit_sha) ? undefined : String(row.commit_sha).trim(),
source_url: isEmpty(row.source_url) ? undefined : String(row.source_url).trim(),
environment: normalizedEnv(row),
success: ('success' in row) ? parseBoolean(row.success) : undefined,
source_name: 'API'
};
// Extras → metadata
const metadata = {};
for (const [k, v] of Object.entries(row)) {
if (KNOWN_FIELDS.has(k)) continue;
if (isEmpty(v)) continue;
metadata[k] = String(v).trim();
}
if (Object.keys(metadata).length) payload.metadata = metadata;
return Object.fromEntries(Object.entries(payload).filter(([, v]) => v !== undefined && v !== null));
}
function validateRowPayload(p) {
if (isEmpty(p.reference_id)) return 'Missing required field: reference_id';
if (isEmpty(p.service)) return 'Missing required field: service';
if (isEmpty(p.deployed_at)) return 'Missing required field: deployed_at (ISO-8601)';
if ('repository' in p && !hasSlash(p.repository)) return "Invalid repository format. Expected 'OrgName/RepoName'.";
if ('commit_sha' in p && !validSha(p.commit_sha)) return 'commit_sha must be 7–40 characters.';
return null;
}
let successCount = 0;
let failureCount = 0;
async function sendDeployment(cleanPayload, index) {
try {
await axios.post(API_URL, cleanPayload, {
headers: {
'Content-Type': 'application/json',
...(BEARER_TOKEN ? { 'Authorization': `Bearer ${BEARER_TOKEN}` } : {})
},
timeout: 30000
});
successCount++;
} catch (error) {
const status = error.response?.status ?? 'NO_RESPONSE';
const data = error.response?.data ?? error.message;
const msg = typeof data === 'string' ? data : JSON.stringify(data);
console.error(`FAIL Deployment for ${cleanPayload.service} (${cleanPayload.reference_id}). Status: ${status}`);
console.error('Response:', msg);
if (status === 'NO_RESPONSE') {
logConnectionError(index, cleanPayload, msg);
}
appendFailureLog(index, cleanPayload, status, 'failure', msg);
failureCount++;
}
}
// === Main ===
function processCSV() {
const rows = [];
fs.createReadStream(FILE_PATH)
.pipe(csv())
.on('data', (row) => rows.push(row))
.on('end', async () => {
if (DRY_RUN) {
console.log(`⚡ Dry run enabled — showing first 10 payloads only from ${rows.length} rows.\n`);
rows.slice(0, 10).forEach((rawRow, i) => {
const payload = buildPayload(rawRow);
const validationError = validateRowPayload(payload);
console.log(`--- Row ${i + 1} ---`);
if (validationError) {
console.log(`⚠️ Skipped (validation error): ${validationError}`);
} else {
console.log(JSON.stringify(payload, null, 2));
}
});
console.log("\nDry run complete. No requests sent.");
process.exit(0);
}
console.log(`Loaded ${rows.length} rows. Processing with ${DELAY_MS}ms pacing...`);
for (let i = 0; i < rows.length; i++) {
const rawRow = rows[i];
const payload = buildPayload(rawRow);
const validationError = validateRowPayload(payload);
if (validationError) {
appendFailureLog(i + 1, payload, 'SKIPPED', 'validation', validationError);
failureCount++;
continue;
}
await sendDeployment(payload, i + 1);
await sleep(DELAY_MS);
}
console.log('CSV processing complete.');
console.log(`✅ Deployments sent successfully: ${successCount}`);
console.log(`❌ Deployments failed: ${failureCount}`);
console.log(`Failures (if any) are logged to ${FAIL_LOG_PATH}`);
console.log(`Connection errors (if any) are logged to ${CONNECTION_LOG_PATH}`);
});
}
// === RUN ===
processCSV();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment