Created
December 4, 2025 14:27
-
-
Save Sdy603/b417291c24ee9786949766b94ad079cd to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| require("dotenv").config(); | |
| const fs = require("fs"); | |
| const path = require("path"); | |
| const csv = require("csv-parser"); | |
| const axios = require("axios"); | |
| // Configuration via environment variables | |
| const DX_INSTANCE_URL = | |
| process.env.DX_INSTANCE_URL || ".getdx.net"; | |
| const DX_API_TOKEN = | |
| process.env.DX_API_TOKEN || "DX_API_Key"; | |
| const CSV_PATH = | |
| process.env.CSV_PATH || | |
| path.join(__dirname, "dx_cline_ai_daily_usages.csv"); | |
| const TOOL_NAME = process.env.TOOL_NAME || "Cline AI"; | |
| const BATCH_SIZE = parseInt(process.env.BATCH_SIZE || "500", 10); | |
| const DRY_RUN = process.env.DRY_RUN === "true"; | |
| if (!DX_API_TOKEN || DX_API_TOKEN === "REPLACE_ME") { | |
| console.error("DX_API_TOKEN is not set"); | |
| process.exit(1); | |
| } | |
| if (!fs.existsSync(CSV_PATH)) { | |
| console.error(`CSV file not found at path: ${CSV_PATH}`); | |
| process.exit(1); | |
| } | |
| console.log("Config:"); | |
| console.log(` DX_INSTANCE_URL: ${DX_INSTANCE_URL}`); | |
| console.log(` CSV_PATH: ${CSV_PATH}`); | |
| console.log(` TOOL_NAME: ${TOOL_NAME}`); | |
| console.log(` BATCH_SIZE: ${BATCH_SIZE}`); | |
| console.log(` DRY_RUN: ${DRY_RUN}`); | |
| console.log(""); | |
| // Helpers | |
| function toBoolean(value) { | |
| if (typeof value === "boolean") return value; | |
| if (value == null) return false; | |
| const v = String(value).trim().toLowerCase(); | |
| return v === "1" || v === "true" || v === "yes" || v === "y"; | |
| } | |
| function maybeNumber(value) { | |
| if (value == null) return null; | |
| const str = String(value).trim(); | |
| if (str === "") return null; | |
| if (/^-?\d+(\.\d+)?$/.test(str)) { | |
| const num = Number(str); | |
| if (!Number.isNaN(num)) return num; | |
| } | |
| return str; | |
| } | |
| // New helper: strips time from ISO timestamps | |
| function normalizeDate(dateString) { | |
| if (!dateString) return ""; | |
| const raw = String(dateString).trim(); | |
| // ISO timestamp patterns | |
| const match = raw.match(/^(\d{4}-\d{2}-\d{2})/); | |
| if (match) return match[1]; // return YYYY-MM-DD | |
| return raw; // fallback | |
| } | |
| // Build a single aiToolMetrics record from a CSV row | |
| function buildRecordFromRow(row) { | |
| const email = (row.user_email || row.userEmail || row.email || row.Email || "").trim(); | |
| if (!email) throw new Error("Missing email field"); | |
| const rawDate = row.log_date || row.date || row.Date || ""; | |
| const date = normalizeDate(rawDate); | |
| if (!date) throw new Error(`Missing date for email: ${email}`); | |
| const isActiveRaw = row.is_active || row.active || row.IsActive; | |
| const is_active = toBoolean(isActiveRaw); | |
| const tool = TOOL_NAME; | |
| const metrics = {}; | |
| const reservedKeys = new Set([ | |
| "email", "Email", "user_email", "userEmail", | |
| "username", "Username", "user", "User", | |
| "date", "Date", "log_date", "logDate", | |
| "is_active", "active", "IsActive", | |
| "tool", "Tool", | |
| "" | |
| ]); | |
| Object.keys(row).forEach((key) => { | |
| if (reservedKeys.has(key)) return; | |
| const val = row[key]; | |
| const converted = maybeNumber(val); | |
| if (converted !== null && converted !== "") { | |
| metrics[key] = converted; | |
| } | |
| }); | |
| const record = { | |
| email, | |
| date, | |
| is_active, | |
| tool | |
| }; | |
| if (Object.keys(metrics).length > 0) { | |
| record.metrics = metrics; | |
| } | |
| return record; | |
| } | |
| async function sendBatch(batch, batchIndex) { | |
| if (!batch || batch.length === 0) return; | |
| console.log( | |
| `Sending batch ${batchIndex} with ${batch.length} record(s)` + | |
| (DRY_RUN ? " [DRY RUN]" : "") | |
| ); | |
| if (DRY_RUN) { | |
| console.log( | |
| JSON.stringify( | |
| { | |
| url: `${DX_INSTANCE_URL}/api/aiToolMetrics.pushAll`, | |
| payloadSample: batch.slice(0, 5) | |
| }, | |
| null, | |
| 2 | |
| ) | |
| ); | |
| return; | |
| } | |
| try { | |
| const response = await axios.post( | |
| `${DX_INSTANCE_URL}/api/aiToolMetrics.pushAll`, | |
| { data: batch }, | |
| { | |
| headers: { | |
| Accept: "application/json", | |
| "Content-Type": "application/json", | |
| Authorization: `Bearer ${DX_API_TOKEN}` | |
| }, | |
| timeout: 60000 | |
| } | |
| ); | |
| console.log( | |
| `Batch ${batchIndex} success, DX created ${ | |
| Array.isArray(response.data) ? response.data.length : "unknown" | |
| } record(s)` | |
| ); | |
| } catch (error) { | |
| console.error(`Batch ${batchIndex} FAILED`); | |
| if (error.response) { | |
| console.error("Status:", error.response.status); | |
| console.error("Response body:", error.response.data); | |
| } else { | |
| console.error("Error:", error.message); | |
| } | |
| process.exit(1); | |
| } | |
| } | |
| async function run() { | |
| console.log("Starting CSV import to aiToolMetrics.pushAll"); | |
| const records = []; | |
| let totalRows = 0; | |
| let skippedMissingEmail = 0; | |
| await new Promise((resolve, reject) => { | |
| fs.createReadStream(CSV_PATH) | |
| .pipe(csv()) | |
| .on("data", (row) => { | |
| totalRows += 1; | |
| try { | |
| const record = buildRecordFromRow(row); | |
| records.push(record); | |
| } catch (err) { | |
| if (err.message === "Missing email field") { | |
| skippedMissingEmail += 1; | |
| } else { | |
| console.error("Error processing row:", err.message); | |
| console.error("Row content:", row); | |
| } | |
| } | |
| }) | |
| .on("end", () => { | |
| console.log( | |
| `Finished reading CSV. Total rows: ${totalRows}, usable records: ${records.length}, skipped missing email: ${skippedMissingEmail}` | |
| ); | |
| resolve(); | |
| }) | |
| .on("error", (err) => { | |
| reject(err); | |
| }); | |
| }); | |
| let batchIndex = 1; | |
| for (let i = 0; i < records.length; i += BATCH_SIZE) { | |
| const batch = records.slice(i, i + BATCH_SIZE); | |
| await sendBatch(batch, batchIndex); | |
| batchIndex += 1; | |
| } | |
| console.log("All done."); | |
| } | |
| run().catch((err) => { | |
| console.error("Fatal error:", err); | |
| process.exit(1); | |
| }); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment