gitclear-telemetry.mjs


#!/usr/bin/env node
/**
* GitClear Claude Code Telemetry Hook
*
* Records line writing events from Claude Code sessions, translating these lines into one-way
* encrypted (i.e., GitClear can't decrypt the content) "fingerprints" that periodically
* upload to GitClear. This hook is built in accord with GitClear's Developer-Friendly Metrics
* policy (https://www.gitclear.com/help/policy_for_developer_friendly_data_analytics).
* It is implemented to provide developers & their team with action-inspiring data on
* the extent to which unmodified Claude lines go on to be deployed and create value, sans defects.
*
* Modes:
* "dev" — records full line content, for local evaluation
* "production" — records one-way line fingerprints. This ensures no source code is
* transmitted. Instead, Claude line fingerprints allow evaluating DORA outcomes of
* the unvarnished Claude code lines.
*
* Install:
* 1. Copy to /opt/gitclear/gitclear-telemetry.mjs
* 2. Write your GitClear org token to /opt/gitclear/token
* 3. Configure Claude Code managed settings with the hook definitions
*
* See https://www.gitclear.com/help/claude_code_telemetry_measure_per_line_outcomes for full instructions.
*/
 
import {
readFileSync, appendFileSync, mkdirSync,
statSync, readdirSync, unlinkSync, renameSync,
} from "node:fs";
import { join, extname } from "node:path";
import { homedir, hostname as getHostname } from "node:os";
import { createHash } from "node:crypto";
 
// ---------------------------------------------------------------------------
// Configuration
// ---------------------------------------------------------------------------
 
const CONFIG = Object.freeze({
/** "dev" for plaintext line recording, "production" for hashed fingerprints */
mode: "dev",
 
/** GitClear ingest API */
endpoint: "https://app.gitclear.com/api/v1/ai_telemetry",
 
/** Where to look for the org API token file (first found wins) */
tokenFiles: [
"/opt/gitclear/token",
"C:\\Program Files\\GitClear\\token",
join(homedir(), ".gitclear", "token"),
],
 
/** Local journal directory */
cacheDir: join(homedir(), ".gitclear"),
 
/** Upload after this many recorded events */
flushCount: 50,
 
/** Upload if oldest event exceeds this age in seconds */
flushAgeSec: 300,
 
/** Hex characters per line fingerprint (16 = 64 bits) */
hashChars: 16,
 
/** Network timeout for uploads in milliseconds */
uploadTimeout: 10000,
});
 
const ACTIVE_LOG = join(CONFIG.cacheDir, "active.jsonl");
 
mkdirSync(CONFIG.cacheDir, { recursive: true });
 
// ---------------------------------------------------------------------------
// Utilities
// ---------------------------------------------------------------------------
 
function loadApiToken() {
for (const filepath of CONFIG.tokenFiles) {
try {
const contents = readFileSync(filepath, "utf-8").trim();
if (contents.length > 0) return contents;
} catch (_) {
continue;
}
}
return "";
}
 
function readInputFromStdin() {
try {
return readFileSync(0, "utf-8");
} catch (_) {
return "";
}
}
 
function countLogLines(filepath) {
try {
const text = readFileSync(filepath, "utf-8");
return text.trim() === "" ? 0 : text.trim().split("\n").length;
} catch (_) {
return 0;
}
}
 
function fileAgeSeconds(filepath) {
try {
return (Date.now() - statSync(filepath).mtimeMs) / 1000;
} catch (_) {
return 0;
}
}
 
function shortDigest(text) {
return createHash("sha256").update(text).digest("hex").slice(0, 12);
}
 
// ---------------------------------------------------------------------------
// Line fingerprinting (production mode)
// ---------------------------------------------------------------------------
 
/**
* Compute a one-way fingerprint for a single line of code.
*
* Normalization (applied before hashing):
* - Trim leading/trailing whitespace
* - Collapse interior whitespace runs to a single space
*
* GitClear applies the same normalization on the commit side
* so that re-indentation does not break fingerprint matching.
*/
function fingerprintLine(line) {
const normalized = line.trim().replace(/\s+/g, " ");
if (normalized === "") return "0".repeat(CONFIG.hashChars);
return createHash("sha256")
.update(normalized)
.digest("hex")
.slice(0, CONFIG.hashChars);
}
 
/**
* Convert a code block to its recorded form.
*
* dev mode: array of plaintext line strings
* production mode: array of { h, b } fingerprint objects
*/
function processContent(content) {
if (content == null) return null;
const lines = content.split("\n");
 
if (CONFIG.mode !== "production") {
return lines;
}
 
return lines.map((line) => ({
h: fingerprintLine(line),
b: line.trim() === "",
}));
}
 
// ---------------------------------------------------------------------------
// Diff payload
// ---------------------------------------------------------------------------
 
function buildPayload(toolName, toolInput) {
const filePath = toolInput.file_path || "unknown";
const ext = extname(filePath).toLowerCase();
const modeLabel = CONFIG.mode === "production" ? "hashed" : "plaintext";
 
if (toolName === "Edit" || toolName === "MultiEdit") {
const before = toolInput.old_string || "";
const after = toolInput.new_string || "";
return {
file_path: filePath,
extension: ext,
action: "edit",
mode: modeLabel,
old_content: processContent(before),
new_content: processContent(after),
old_lines: before.split("\n").length,
new_lines: after.split("\n").length,
content_hash: shortDigest(after),
};
}
 
if (toolName === "Write") {
const body = toolInput.content || toolInput.file_text || "";
return {
file_path: filePath,
extension: ext,
action: "write",
mode: modeLabel,
old_content: null,
new_content: processContent(body),
old_lines: 0,
new_lines: body.split("\n").length,
content_hash: shortDigest(body),
};
}
 
return {
file_path: filePath,
extension: ext,
action: toolName.toLowerCase(),
mode: modeLabel,
old_content: null,
new_content: null,
old_lines: 0,
new_lines: 0,
content_hash: shortDigest(JSON.stringify(toolInput)),
};
}
 
// ---------------------------------------------------------------------------
// Event recording
// ---------------------------------------------------------------------------
 
function recordEvent(input) {
const event = {
v: 2,
ts: new Date().toISOString(),
session_id: input.session_id || "unknown",
cwd: input.cwd || "",
tool: input.tool_name || "unknown",
authored_by_llm: true,
user: process.env.USER || process.env.USERNAME || "unknown",
hostname: getHostname(),
diff: buildPayload(
input.tool_name || "unknown",
input.tool_input || {},
),
};
 
appendFileSync(ACTIVE_LOG, JSON.stringify(event) + "\n");
}
 
// ---------------------------------------------------------------------------
// Upload
// ---------------------------------------------------------------------------
 
async function uploadBatch(events, apiToken) {
const response = await fetch(CONFIG.endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + apiToken,
},
body: JSON.stringify({ events }),
signal: AbortSignal.timeout(CONFIG.uploadTimeout),
});
return response.ok;
}
 
function parseBatchFile(filepath) {
const text = readFileSync(filepath, "utf-8").trim();
return text
.split("\n")
.map((line) => { try { return JSON.parse(line); } catch (_) { return null; } })
.filter(Boolean);
}
 
async function flushActive() {
if (countLogLines(ACTIVE_LOG) === 0) return;
 
const batchPath = join(CONFIG.cacheDir, "batch-" + Date.now() + ".jsonl");
try {
renameSync(ACTIVE_LOG, batchPath);
} catch (_) {
return;
}
 
const events = parseBatchFile(batchPath);
if (events.length === 0) {
unlinkSync(batchPath);
return;
}
 
const apiToken = loadApiToken();
if (!apiToken) return; // Keep batch on disk for later
 
try {
const ok = await uploadBatch(events, apiToken);
if (ok) unlinkSync(batchPath);
} catch (_) {
// Upload failed — batch file stays for retry
}
}
 
async function flushStaleBatches() {
const apiToken = loadApiToken();
if (!apiToken) return;
 
const batchFiles = readdirSync(CONFIG.cacheDir).filter(
(name) => name.startsWith("batch-") && name.endsWith(".jsonl"),
);
 
for (const name of batchFiles) {
const filepath = join(CONFIG.cacheDir, name);
const events = parseBatchFile(filepath);
if (events.length === 0) { unlinkSync(filepath); continue; }
 
try {
const ok = await uploadBatch(events, apiToken);
if (ok) unlinkSync(filepath);
} catch (_) {
continue;
}
}
}
 
// ---------------------------------------------------------------------------
// Flush decision
// ---------------------------------------------------------------------------
 
function isFlushDue() {
const count = countLogLines(ACTIVE_LOG);
if (count >= CONFIG.flushCount) return true;
if (count > 0 && fileAgeSeconds(ACTIVE_LOG) > CONFIG.flushAgeSec) return true;
return false;
}
 
// ---------------------------------------------------------------------------
// Entry point
// ---------------------------------------------------------------------------
 
async function main() {
if (process.argv.includes("--flush")) {
await flushActive();
await flushStaleBatches();
process.exit(0);
}
 
const raw = readInputFromStdin();
if (!raw) process.exit(0);
 
let input;
try {
input = JSON.parse(raw);
} catch (_) {
process.exit(0);
}
 
recordEvent(input);
 
if (isFlushDue()) {
await flushActive();
}
}
 
main().catch(() => process.exit(0));