Skip to content

Instantly share code, notes, and snippets.

require("dotenv").config();
const fs = require("fs");
const path = require("path");
const csv = require("csv-parser");
const axios = require("axios");
// Configuration via environment variables
const DX_INSTANCE_URL =
process.env.DX_INSTANCE_URL || ".getdx.net";
const DX_API_TOKEN =
'use strict';
/**
* export_incidents_mttr.js
*
* Streams a DX Postgres query to a CSV file.
* - Uses DATABASE_URL from environment (preferred)
* - Falls back to HARDCODED_DB_URL if set
* - Normalizes postgres:// -> postgresql://
* - Streams results to avoid memory issues
// incidents_upsert.js
// Classic Node.js (CommonJS)
const { Client } = require('pg');
const axios = require('axios');
// Config
const DEBUG = process.env.DEBUG === 'true'; // set to "true" to skip POSTs
const DB_URL = process.env.DX_DB_URL; // e.g. postgres://user:pass@host:5432/dbname
const API_URL = 'https://activecampaign.getdx.net/api/incidents.upsert';
// fetch_github_to_csv.js
// Fetch GitHub orgs, repos, PRs, and reviews -> write 4 CSVs.
//
// Usage:
// mac/linux:
// GITHUB_TOKEN=ghp_xxx node fetch_github_to_csv.js
// windows (PowerShell):
// $env:GITHUB_TOKEN="ghp_xxx"; node fetch_github_to_csv.js
//
// Optional env vars:
// Jira Worklogs + Summary with Original Estimate
//
/*
Jira Worklogs + Estimates Sync
Purpose:
- Sync Jira worklogs into Postgres and build a per issue time summary with Original Estimate.
Selection:
- Processes only completed issues
- Applies a grace window (COMPLETED_GRACE_DAYS)
- Skips issues already snapshotted unless OVERWRITE_SUMMARY=true
/**
* deployment_backfill_run.js
*
* Reads deployments from a CSV file and posts them to the DX Deployments API.
* Supports --dry-run to preview the first 10 records only (no API calls).
*/
const fs = require('fs');
const path = require('path');
const csv = require('csv-parser');
/**
* fetch_work_item_parents.js
*
* Backfill: WorkItemLinks WIQL, partitioned by Source.ChangedDate windows using DATE precision (YYYY-MM-DD)
* Incremental: WorkItems WIQL by ChangedDate with DATE precision filter (YYYY-MM-DD)
* Writes: batched multi row upserts into custom.ado_work_item_links
* Schema columns used: child_work_item_source_id, parent_work_item_source_id, relation_url
* MIN_ID applied to both parent and child
* Direct ADO HTTPS without DX proxy
* Dry run writes a SQL file
// enrich_duo_csv.js
// CommonJS version. Only env var required: DATABASE_URL
const fs = require("fs");
const { parse } = require("csv-parse");
const { stringify } = require("csv-stringify");
const { Pool } = require("pg");
const DATABASE_URL = process.env.DATABASE_URL;
if (!DATABASE_URL) {
/**
* Fetch ADO custom field "Classification" for Work Items and upsert into DX.
* Usage: node fetch_classification.js
*/
const fs = require('fs');
const path = require('path');
const { Client } = require('pg');
const axios = require('axios');
'use strict';
/**
* export_dx_users.js
*
* Streams a DX Postgres query to a CSV file.
* - Uses DATABASE_URL from environment (preferred)
* - Falls back to HARDCODED_DB_URL if set
* - Normalizes postgres:// → postgresql://
* - Streams results to avoid memory issues