Skip to content

Instantly share code, notes, and snippets.

@Sdy603
Created November 19, 2025 18:30
Show Gist options
  • Select an option

  • Save Sdy603/f119ecf445abd29debda340d07fafd99 to your computer and use it in GitHub Desktop.

Select an option

Save Sdy603/f119ecf445abd29debda340d07fafd99 to your computer and use it in GitHub Desktop.
'use strict';
/**
* export_incidents_mttr.js
*
* Streams a DX Postgres query to a CSV file.
* - Uses DATABASE_URL from environment (preferred)
* - Falls back to HARDCODED_DB_URL if set
* - Normalizes postgres:// -> postgresql://
* - Streams results to avoid memory issues
* - Includes graceful shutdown
*/
const fs = require('fs');
const path = require('path');
const { pipeline } = require('stream');
const { promisify } = require('util');
const { Pool } = require('pg');
const QueryStream = require('pg-query-stream');
const { stringify } = require('csv-stringify');
require('dotenv').config();
const pipe = promisify(pipeline);
// Optional hardcoded connection string
// Example: 'postgresql://user:pass@host:5432/dbname'
const HARDCODED_DB_URL = ''; // set if needed
// Normalize postgres:// -> postgresql://
const normalizePostgresURL = (url) =>
url && url.startsWith('postgres://')
? url.replace('postgres://', 'postgresql://')
: url;
// Choose connection string: env > hardcoded
let dbUrl = process.env.DATABASE_URL
? normalizePostgresURL(process.env.DATABASE_URL)
: normalizePostgresURL(HARDCODED_DB_URL);
if (!dbUrl) {
console.error('Missing DATABASE_URL environment variable or HARDCODED_DB_URL');
process.exit(1);
}
// Database connection pool
const pool = new Pool({
connectionString: dbUrl,
ssl: { rejectUnauthorized: false },
application_name: 'dx-export-incidents-mttr',
});
// SQL query (your updated one)
const QUERY = `
SELECT
source_url,
source_id,
name,
priority,
started_at,
resolved_at,
ROUND(started_to_resolved / 60.0, 2) AS mttr_minutes
FROM incidents
WHERE started_to_resolved > 3600
AND started_to_resolved IS NOT NULL
AND resolved_at IS NOT NULL
AND deleted = false
ORDER BY started_to_resolved DESC;
`;
// CSV columns in order
const CSV_COLUMNS = [
'source_url',
'source_id',
'name',
'priority',
'started_at',
'resolved_at',
'mttr_minutes',
];
async function exportToCsv() {
let client;
try {
client = await pool.connect();
} catch (err) {
console.error('Failed to connect to the database:', err.message || err);
throw err;
}
try {
const queryStream = new QueryStream(QUERY, [], { batchSize: 1000 });
const rowStream = client.query(queryStream);
const csvStream = stringify({ header: true, columns: CSV_COLUMNS });
const fileStream = fs.createWriteStream(path.resolve('incidents_mttr_over_60.csv'));
let rowCount = 0;
rowStream.on('data', () => {
rowCount += 1;
if (rowCount % 1000 === 0) {
console.log(`Processed ${rowCount} rows...`);
}
});
await pipe(rowStream, csvStream, fileStream);
console.log(`Export complete. Wrote ${rowCount} rows to incidents_mttr_over_60.csv`);
} finally {
if (client) {
client.release();
}
}
}
// Graceful shutdown on SIGINT/SIGTERM
function setupSignalHandlers() {
const shutdown = async (signal) => {
console.log(`Received ${signal}. Shutting down...`);
try {
await pool.end();
console.log('Database pool closed.');
} catch (err) {
console.error('Error while closing database pool:', err);
} finally {
process.exit(0);
}
};
process.on('SIGINT', () => shutdown('SIGINT'));
process.on('SIGTERM', () => shutdown('SIGTERM'));
}
(async () => {
setupSignalHandlers();
try {
await exportToCsv();
} catch (err) {
console.error('Export failed:', err);
process.exit(1);
} finally {
await pool.end();
}
})();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment