diff --git a/.gitignore b/.gitignore index 7f3f076b..d5a73a07 100644 --- a/.gitignore +++ b/.gitignore @@ -21,5 +21,16 @@ # # dist build output target/ +/server/bindings/ + +# Runtime capture configuration and local fallback capture logs. +/capture_config.json +/capture-events-fallback.jsonl +/capture-metrics-*.csv +/capture-analysis-*/ +/server/scripts/output +/server/scripts/capture-metrics-*.csv +/server/scripts/capture-analysis-*/ +server/capture_config.json # CodeChat Editor lexer: python. See TODO. diff --git a/capture_config.example.json b/capture_config.example.json new file mode 100644 index 00000000..22980ee2 --- /dev/null +++ b/capture_config.example.json @@ -0,0 +1,9 @@ +{ + "host": "your-aws-rds-endpoint.amazonaws.com", + "port": 5432, + "user": "your-db-user", + "password": "your-db-password", + "dbname": "your-db-name", + "app_id": "dissertation", + "fallback_path": "capture-events-fallback.jsonl" +} diff --git a/client/src/shared.mts b/client/src/shared.mts index d3550b0f..caadb971 100644 --- a/client/src/shared.mts +++ b/client/src/shared.mts @@ -43,6 +43,8 @@ import { CodeMirrorDocBlockTuple } from "./rust-types/CodeMirrorDocBlockTuple.js import { UpdateMessageContents } from "./rust-types/UpdateMessageContents.js"; import { ResultOkTypes } from "./rust-types/ResultOkTypes.js"; import { ResultErrTypes } from "./rust-types/ResultErrTypes.js"; +import { CaptureEventWire } from "./rust-types/CaptureEventWire.js"; +import { CaptureStatus } from "./rust-types/CaptureStatus.js"; // Manually define this, since `ts-rs` can't export `webserver.MessageResult`. type MessageResult = { Ok: ResultOkTypes } | { Err: ResultErrTypes }; @@ -55,6 +57,8 @@ export type { CodeMirror, CodeMirrorDiffable, CodeMirrorDocBlockTuple, + CaptureEventWire, + CaptureStatus, CodeChatForWeb, EditorMessage, EditorMessageContents, diff --git a/extensions/VSCode/.gitignore b/extensions/VSCode/.gitignore index 8c5160c5..3780ba9f 100644 --- a/extensions/VSCode/.gitignore +++ b/extensions/VSCode/.gitignore @@ -33,5 +33,5 @@ src/index.d.ts src/index.js src/codechat-editor-client.win32-x64-msvc.node .windows/ - +*.log # CodeChat Editor lexer: python. See TODO. diff --git a/extensions/VSCode/package.json b/extensions/VSCode/package.json index 04129479..b166f77b 100644 --- a/extensions/VSCode/package.json +++ b/extensions/VSCode/package.json @@ -44,7 +44,15 @@ "version": "0.1.54-beta1", "activationEvents": [ "onCommand:extension.codeChatEditorActivate", - "onCommand:extension.codeChatEditorDeactivate" + "onCommand:extension.codeChatEditorDeactivate", + "onCommand:extension.codeChatCaptureStatus", + "onCommand:extension.codeChatInsertReflectionPrompt", + "onCommand:extension.codeChatCaptureTaskStart", + "onCommand:extension.codeChatCaptureTaskSubmit", + "onCommand:extension.codeChatCaptureDebugTaskStart", + "onCommand:extension.codeChatCaptureDebugTaskSubmit", + "onCommand:extension.codeChatCaptureHandoffStart", + "onCommand:extension.codeChatCaptureHandoffEnd" ], "contributes": { "configuration": { @@ -62,6 +70,26 @@ "In the default external web browser" ], "markdownDescription": "Select the location of the CodeChat Editor Client. After changing this value, you **must** close then restart the CodeChat Editor extension." + }, + "CodeChatEditor.Capture.Enabled": { + "type": "boolean", + "default": false, + "markdownDescription": "Enable dissertation instrumentation capture." + }, + "CodeChatEditor.Capture.ConsentEnabled": { + "type": "boolean", + "default": false, + "markdownDescription": "Allow capture after participant consent is recorded for the current study session." + }, + "CodeChatEditor.Capture.ParticipantId": { + "type": "string", + "default": "", + "markdownDescription": "Pseudonymous participant identifier used as the capture user_id. If left blank, CodeChat generates a UUID when the student gives consent." + }, + "CodeChatEditor.Capture.HashFilePaths": { + "type": "boolean", + "default": true, + "markdownDescription": "Hash local file paths before they are sent to capture storage." } } }, @@ -73,6 +101,14 @@ { "command": "extension.codeChatEditorDeactivate", "title": "Disable the CodeChat Editor" + }, + { + "command": "extension.codeChatCaptureStatus", + "title": "Manage CodeChat Capture" + }, + { + "command": "extension.codeChatInsertReflectionPrompt", + "title": "CodeChat: Insert Reflection Prompt" } ] }, diff --git a/extensions/VSCode/src/extension.ts b/extensions/VSCode/src/extension.ts index 9e003ea8..cc1aaeb1 100644 --- a/extensions/VSCode/src/extension.ts +++ b/extensions/VSCode/src/extension.ts @@ -3,7 +3,8 @@ // This file is part of the CodeChat Editor. The CodeChat Editor is free // software: you can redistribute it and/or modify it under the terms of the GNU // General Public License as published by the Free Software Foundation, either -// version 3 of the License, or (at your option) any later version. +// version 3 of the License, or (at your option) any later version of the GNU +// General Public License. // // The CodeChat Editor is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or @@ -40,6 +41,8 @@ import { CodeChatEditorServer, initServer } from "./index.js"; // ### Local packages import { auto_update_timeout_ms, + CaptureEventWire, + CaptureStatus, EditorMessage, EditorMessageContents, KeysOfRustEnum, @@ -53,6 +56,8 @@ import { } from "../../../client/src/debug_enabled.mjs"; import { ResultErrTypes } from "../../../client/src/rust-types/ResultErrTypes.js"; +import * as crypto from "crypto"; + // Globals // ------- enum CodeChatEditorClientLocation { @@ -60,6 +65,9 @@ enum CodeChatEditorClientLocation { browser, } +// Create a unique session ID for logging +const CAPTURE_SESSION_ID = crypto.randomUUID(); + // True on Windows, false on OS X / Linux. const is_windows = process.platform === "win32"; @@ -111,13 +119,681 @@ let codeChatEditorServer: CodeChatEditorServer | undefined; initServer(ext.extensionPath); } +// --- +// +// CAPTURE (Dissertation instrumentation) +// -------------------------------------- + +// Capture uses these helpers only for documentation-like files. Source files +// classify directly as code; Markdown/RST get a finer split so prose edits count +// as documentation activity while embedded snippets count as code activity. +function isInMarkdownCodeFence( + doc: vscode.TextDocument, + line: number, +): boolean { + // Very simple fence tracker: toggles when encountering ``` or ~~~ at + // start of line. Good enough for dissertation instrumentation; refine later + // if needed. + let inFence = false; + for (let i = 0; i <= line; i++) { + const t = doc.lineAt(i).text.trim(); + if (t.startsWith("```") || t.startsWith("~~~")) { + inFence = !inFence; + } + } + return inFence; +} + +function isInRstCodeBlock(doc: vscode.TextDocument, line: number): boolean { + // Heuristic: find the most recent ".. code-block::" (or "::") and see if + // we're in its indented region. This won’t be perfect, but it’s far better + // than file-level classification. + let blockLine = -1; + for (let i = line; i >= 0; i--) { + const t = doc.lineAt(i).text; + const tt = t.trim(); + if (tt.startsWith(".. code-block::") || tt === "::") { + blockLine = i; + break; + } + // If we hit a non-indented line after searching upward too far, keep + // going; rst blocks can be separated by blank lines. + } + if (blockLine < 0) return false; + + // RST code block content usually begins after optional blank line(s), + // indented. Determine whether current line is indented relative to block + // directive line. + const cur = doc.lineAt(line).text; + if (cur.trim().length === 0) return false; + + // If it's indented at least one space/tab, treat it as inside block. + return /^\s+/.test(cur); +} + +function classifyAtPosition( + doc: vscode.TextDocument, + pos: vscode.Position, +): ActivityKind { + // These helpers are only for documentation-like documents that may embed + // source snippets. Plain source files skip this branch and classify as + // code. + if (DOC_LANG_IDS.has(doc.languageId)) { + if (doc.languageId === "markdown") { + return isInMarkdownCodeFence(doc, pos.line) ? "code" : "doc"; + } + if (doc.languageId === "restructuredtext") { + return isInRstCodeBlock(doc, pos.line) ? "code" : "doc"; + } + // Other doc types: default to doc + return "doc"; + } + return "code"; +} + +// Event-specific payload attached to a capture event. Study metadata such as +// group, course, assignment, and condition is intentionally excluded from the +// student-facing capture settings; analysis can join those values later from a +// researcher-managed participant/date mapping. +type CaptureEventData = Record; + +// Event names are generated from the Rust `CaptureEventType` enum, keeping the +// extension and server in sync without re-declaring the string union here. +type CaptureEventType = CaptureEventWire["event_type"]; + +// Student-facing capture settings. The setup is intentionally small: students +// give consent, toggle capture, and receive or reuse a pseudonymous participant +// UUID. Assignment, course, group, and study-condition metadata are inferred +// during analysis from that participant ID and event timestamps. +interface StudySettings { + // True when the student wants capture enabled for the current work session. + enabled: boolean; + // True after the student has consented to study capture. + consentEnabled: boolean; + // Pseudonymous UUID used as the event user ID; generated when absent. + participantId: string; + // True to avoid storing raw local paths in capture events. + hashFilePaths: boolean; +} + +const CAPTURE_SCHEMA_VERSION = 2; +const CAPTURE_EVENT_SOURCE = "vscode_extension"; +const DEFAULT_REFLECTION_PROMPTS = [ + "What changed in your understanding of this code?", + "What assumption are you making, and how could you test it?", + "What would another developer need to know before maintaining this?", +]; + +// Output channel used for capture diagnostics that should not interrupt normal +// editor use. +let capture_output_channel: vscode.OutputChannel | undefined; +// True after the first failed send is logged to the console, suppressing repeat +// console warnings while still writing detailed failures to the output channel. +let captureFailureLogged = false; +// True once the CodeChat Client and Server have completed enough startup +// handshake work for capture events to be accepted. +let captureTransportReady = false; +// True after a capture-enabled extension session has emitted `session_start`. +let extensionCaptureSessionStarted = false; +// Monotonic per-extension event sequence number used to order events produced +// by this VS Code session. +let captureSequenceNumber = 0; +// Status bar item that reports capture health and opens the capture controls. +let capture_status_bar_item: vscode.StatusBarItem | undefined; +// Timer used to refresh capture status from the running server. +let capture_status_timer: NodeJS.Timeout | undefined; + +// Simple classification of what the user is currently doing. `doc` means +// prose/documentation activity, whether in a Markdown/RST document or a +// CodeChat doc block; write events from the server provide the more precise +// doc-block classification when it is available. +type ActivityKind = "doc" | "code" | "other"; + +// Language IDs that we treat as "documentation" for the dissertation metrics. +// You can refine this later if you want. +const DOC_LANG_IDS = new Set([ + "markdown", + "plaintext", + "latex", + "restructuredtext", +]); + +// Track the last activity kind and when a reflective-writing (doc) session +// started. +let lastActivityKind: ActivityKind = "other"; +let docSessionStart: number | null = null; + +function optionalString(value: unknown): string | undefined { + return typeof value === "string" && value.trim().length > 0 + ? value.trim() + : undefined; +} + +function loadStudySettings(): StudySettings { + const config = vscode.workspace.getConfiguration("CodeChatEditor.Capture"); + return { + enabled: config.get("Enabled", false), + consentEnabled: config.get("ConsentEnabled", false), + participantId: optionalString(config.get("ParticipantId")) ?? "", + hashFilePaths: config.get("HashFilePaths", true), + }; +} + +function captureDisabledReason(settings: StudySettings): string | undefined { + if (!settings.enabled) { + return "disabled in settings"; + } + if (!settings.consentEnabled) { + return "waiting for consent"; + } + return undefined; +} + +async function updateCaptureSetting( + name: string, + value: string | boolean, +): Promise { + const config = vscode.workspace.getConfiguration("CodeChatEditor.Capture"); + await config.update(name, value, vscode.ConfigurationTarget.Global); +} + +async function ensureParticipantId(): Promise { + const config = vscode.workspace.getConfiguration("CodeChatEditor.Capture"); + const existing = optionalString(config.get("ParticipantId")); + if (existing !== undefined) { + return existing; + } + + const generated = crypto.randomUUID(); + await config.update( + "ParticipantId", + generated, + vscode.ConfigurationTarget.Global, + ); + return generated; +} + +function hashText(value: string): string { + return crypto.createHash("sha256").update(value).digest("hex"); +} + +function buildFileFields( + filePath: string | undefined, + settings: StudySettings, +): Pick { + if (filePath === undefined) { + return { + language_id: vscode.window.activeTextEditor?.document.languageId, + }; + } + const document = get_document(filePath); + return { + file_path: settings.hashFilePaths ? undefined : filePath, + file_hash: settings.hashFilePaths ? hashText(filePath) : undefined, + language_id: document?.languageId, + }; +} + +// Helper to send a capture event to the Rust server. +async function sendCaptureEvent( + eventType: CaptureEventType, + filePath?: string, + data: CaptureEventData = {}, +): Promise { + const settings = loadStudySettings(); + const disabledReason = captureDisabledReason(settings); + if (disabledReason !== undefined) { + updateCaptureStatusBar(`Capture: ${disabledReason}`, disabledReason); + return; + } + const participantId = await ensureParticipantId(); + const fileFields = buildFileFields(filePath, settings); + const payload: CaptureEventWire = { + event_id: crypto.randomUUID(), + sequence_number: BigInt(++captureSequenceNumber), + schema_version: CAPTURE_SCHEMA_VERSION, + user_id: participantId, + event_source: CAPTURE_EVENT_SOURCE, + ...fileFields, + event_type: eventType, + client_timestamp_ms: BigInt(Date.now()), + client_tz_offset_min: new Date().getTimezoneOffset(), + data: { + ...data, + session_id: CAPTURE_SESSION_ID, + path_privacy: settings.hashFilePaths ? "sha256" : "plain", + }, + }; + + if (codeChatEditorServer === undefined) { + reportCaptureFailure("CodeChat server is not running"); + return; + } + if (!captureTransportReady) { + capture_output_channel?.appendLine( + `${new Date().toISOString()} capture skipped before server handshake: ${stringifyCapturePayload(payload)}`, + ); + return; + } + + try { + await codeChatEditorServer.sendCaptureEvent( + stringifyCapturePayload(payload), + ); + captureFailureLogged = false; + await refreshCaptureStatus(); + } catch (err) { + reportCaptureFailure(err instanceof Error ? err.message : String(err)); + } +} + +function stringifyCapturePayload(payload: CaptureEventWire): string { + return JSON.stringify(payload, (_key, value) => + typeof value === "bigint" ? Number(value) : value, + ); +} + +function reportCaptureFailure(message: string) { + capture_output_channel?.appendLine( + `${new Date().toISOString()} capture send failed: ${message}`, + ); + updateCaptureStatusBar("Capture: Error", message); + if (captureFailureLogged) { + return; + } + captureFailureLogged = true; + console.warn(`CodeChat capture event was not queued: ${message}`); +} + +function updateCaptureStatusBar(text: string, tooltip?: string) { + if (capture_status_bar_item === undefined) { + return; + } + capture_status_bar_item.text = text; + capture_status_bar_item.tooltip = tooltip; + capture_status_bar_item.show(); +} + +async function refreshCaptureStatus(): Promise { + const settings = loadStudySettings(); + const disabledReason = captureDisabledReason(settings); + if (disabledReason !== undefined) { + updateCaptureStatusBar(`Capture: ${disabledReason}`, disabledReason); + return; + } + if (codeChatEditorServer === undefined) { + updateCaptureStatusBar( + "Capture: Waiting", + "CodeChat server is not running", + ); + return; + } + + try { + const status = JSON.parse( + codeChatEditorServer.getCaptureStatus(), + ) as CaptureStatus; + let label: string; + switch (status.state) { + case "database": + label = "Capture: DB"; + break; + case "fallback": + label = "Capture: Fallback"; + break; + case "starting": + label = "Capture: Starting"; + break; + default: + label = "Capture: Off"; + break; + } + updateCaptureStatusBar( + label, + [ + `state=${status.state}`, + `queued=${status.queued_events}`, + `db=${status.persisted_events}`, + `fallback=${status.fallback_events}`, + `failed=${status.failed_events}`, + status.last_error ? `last_error=${status.last_error}` : "", + status.fallback_path + ? `fallback_path=${status.fallback_path}` + : "", + ] + .filter((line) => line.length > 0) + .join("\n"), + ); + } catch (err) { + updateCaptureStatusBar( + "Capture: Error", + err instanceof Error ? err.message : String(err), + ); + } +} + +// A status-bar QuickPick action. Each item owns the async work needed after the +// student chooses it, keeping the capture UI small and easy to scan. +interface CaptureStatusAction extends vscode.QuickPickItem { + run: () => Promise; +} + +function captureStatusDetails(): string { + const tooltip = capture_status_bar_item?.tooltip; + return typeof tooltip === "string" + ? tooltip + : (tooltip?.value ?? "Capture status unavailable"); +} + +async function setCaptureEnabled(enabled: boolean): Promise { + const active = vscode.window.activeTextEditor; + const filePath = active?.document.fileName; + const settings = loadStudySettings(); + + if (enabled) { + await ensureParticipantId(); + if (!settings.consentEnabled) { + await updateCaptureSetting("ConsentEnabled", true); + } + await updateCaptureSetting("Enabled", true); + extensionCaptureSessionStarted = false; + await startExtensionCaptureSession(filePath); + vscode.window.showInformationMessage("CodeChat capture is enabled."); + } else { + await endExtensionCaptureSession(filePath, "capture_disabled"); + await updateCaptureSetting("Enabled", false); + vscode.window.showInformationMessage("CodeChat capture is off."); + } + + await refreshCaptureStatus(); +} + +async function copyParticipantId(): Promise { + const participantId = await ensureParticipantId(); + await vscode.env.clipboard.writeText(participantId); + vscode.window.showInformationMessage( + "CodeChat capture participant ID copied.", + ); +} + +async function showCaptureStatus(): Promise { + await refreshCaptureStatus(); + const settings = loadStudySettings(); + const actions: CaptureStatusAction[] = []; + + if (!settings.consentEnabled) { + actions.push({ + label: "Give Consent and Enable Capture", + description: "Generate a pseudonymous participant ID if needed.", + run: () => setCaptureEnabled(true), + }); + } else if (settings.enabled) { + actions.push({ + label: "Turn Capture Off", + description: "Stop recording study events for this editor.", + run: () => setCaptureEnabled(false), + }); + } else { + actions.push({ + label: "Turn Capture On", + description: "Resume recording study events for this editor.", + run: () => setCaptureEnabled(true), + }); + } + + actions.push( + { + label: "Copy Participant ID", + description: settings.participantId || "Generate a new UUID.", + run: copyParticipantId, + }, + { + label: "Show Capture Details", + description: captureStatusDetails().split("\n")[0], + run: async () => { + vscode.window.showInformationMessage(captureStatusDetails()); + }, + }, + ); + + const selected = await vscode.window.showQuickPick(actions, { + placeHolder: "Manage CodeChat capture", + }); + if (selected !== undefined) { + await selected.run(); + } +} + +async function recordStudyLifecycleEvent( + eventType: CaptureEventType, +): Promise { + const active = vscode.window.activeTextEditor; + await sendCaptureEvent(eventType, active?.document.fileName, { + command: eventType, + languageId: active?.document.languageId, + }); +} + +function reflectionPromptText(languageId: string, prompt: string): string { + if (languageId === "markdown") { + return `\n\n### Reflection\n\n${prompt}\n\n`; + } + if (languageId === "restructuredtext") { + return `\n.. ${prompt}\n`; + } + if (languageId === "plaintext" || languageId === "latex") { + return `\n${prompt}\n`; + } + const commentPrefix = + languageId === "python" || + languageId === "shellscript" || + languageId === "powershell" || + languageId === "ruby" + ? "#" + : "//"; + return `\n${commentPrefix} Reflection: ${prompt}\n`; +} + +async function insertReflectionPrompt(): Promise { + const editor = vscode.window.activeTextEditor; + if (editor === undefined) { + vscode.window.showInformationMessage("Open a text editor first."); + return; + } + const prompt = await vscode.window.showQuickPick( + DEFAULT_REFLECTION_PROMPTS, + { + placeHolder: "Select a reflection prompt", + }, + ); + if (prompt === undefined) { + return; + } + + await editor.insertSnippet( + new vscode.SnippetString( + reflectionPromptText(editor.document.languageId, prompt), + ), + ); + await sendCaptureEvent( + "reflection_prompt_inserted", + editor.document.fileName, + { + prompt_hash: hashText(prompt), + prompt_length: prompt.length, + languageId: editor.document.languageId, + }, + ); +} + +async function startExtensionCaptureSession(filePath?: string) { + if (extensionCaptureSessionStarted) { + return; + } + if (captureDisabledReason(loadStudySettings()) !== undefined) { + return; + } + extensionCaptureSessionStarted = true; + await sendCaptureEvent("session_start", filePath, { + mode: "vscode_extension", + }); +} + +async function endExtensionCaptureSession( + filePath: string | undefined, + closedBy: string, +): Promise { + if (!extensionCaptureSessionStarted) { + return; + } + await closeDocSession(filePath, closedBy); + await sendCaptureEvent("session_end", filePath, { + mode: "vscode_extension", + closed_by: closedBy, + }); + extensionCaptureSessionStarted = false; +} + +async function closeDocSession( + filePath: string | undefined, + closedBy: string, +): Promise { + if (docSessionStart === null) { + return; + } + + const durationMs = Date.now() - docSessionStart; + docSessionStart = null; + await sendCaptureEvent("doc_session", filePath, { + duration_ms: durationMs, + duration_seconds: durationMs / 1000.0, + closed_by: closedBy, + }); + await sendCaptureEvent("session_end", filePath, { + mode: "doc", + closed_by: closedBy, + }); +} + +// Update activity state and emit switch/doc-session events. Markdown/RST prose +// and CodeChat doc-block edits are both documentation activity for analysis; +// server-side write events classify CodeChat doc-block edits precisely, while +// this extension-side activity tracker uses the best cursor/file context +// available before translation. +function noteActivity(kind: ActivityKind, filePath?: string) { + const now = Date.now(); + + // Handle entering / leaving a "doc" session. + if (kind === "doc") { + if (docSessionStart === null) { + // Starting a new reflective-writing session. + docSessionStart = now; + sendCaptureEvent("session_start", filePath, { + mode: "doc", + }); + } + } else { + if (docSessionStart !== null) { + // Ending a reflective-writing session. + const durationMs = now - docSessionStart; + docSessionStart = null; + sendCaptureEvent("doc_session", filePath, { + duration_ms: durationMs, + duration_seconds: durationMs / 1000.0, + }); + sendCaptureEvent("session_end", filePath, { + mode: "doc", + }); + } + } + + // If we switched between doc and code, log a switch\_pane event. + const docOrCode = (k: ActivityKind) => k === "doc" || k === "code"; + if ( + docOrCode(lastActivityKind) && + docOrCode(kind) && + kind !== lastActivityKind + ) { + sendCaptureEvent("switch_pane", filePath, { + from: lastActivityKind, + to: kind, + }); + } + + lastActivityKind = kind; +} + // Activation/deactivation // ----------------------- // // This is invoked when the extension is activated. It either creates a new // CodeChat Editor Server instance or reveals the currently running one. export const activate = (context: vscode.ExtensionContext) => { + capture_output_channel = + vscode.window.createOutputChannel("CodeChat Capture"); + context.subscriptions.push(capture_output_channel); + capture_status_bar_item = vscode.window.createStatusBarItem( + vscode.StatusBarAlignment.Left, + 100, + ); + capture_status_bar_item.command = "extension.codeChatCaptureStatus"; + context.subscriptions.push(capture_status_bar_item); + capture_status_timer = setInterval(() => { + refreshCaptureStatus(); + }, 5000); + context.subscriptions.push({ + dispose: () => { + if (capture_status_timer !== undefined) { + clearInterval(capture_status_timer); + capture_status_timer = undefined; + } + }, + }); + context.subscriptions.push( + vscode.workspace.onDidChangeConfiguration(async (event) => { + if (event.affectsConfiguration("CodeChatEditor.Capture")) { + await refreshCaptureStatus(); + } + }), + ); + refreshCaptureStatus(); + context.subscriptions.push( + vscode.commands.registerCommand( + "extension.codeChatCaptureStatus", + showCaptureStatus, + ), + vscode.commands.registerCommand( + "extension.codeChatInsertReflectionPrompt", + insertReflectionPrompt, + ), + // Study lifecycle commands are registered for optional study + // automation/keybindings, but they are not contributed to the Command + // Palette. Normal users should only see status and reflection commands. + vscode.commands.registerCommand( + "extension.codeChatCaptureTaskStart", + () => recordStudyLifecycleEvent("task_start"), + ), + vscode.commands.registerCommand( + "extension.codeChatCaptureTaskSubmit", + () => recordStudyLifecycleEvent("task_submit"), + ), + vscode.commands.registerCommand( + "extension.codeChatCaptureDebugTaskStart", + () => recordStudyLifecycleEvent("debug_task_start"), + ), + vscode.commands.registerCommand( + "extension.codeChatCaptureDebugTaskSubmit", + () => recordStudyLifecycleEvent("debug_task_submit"), + ), + vscode.commands.registerCommand( + "extension.codeChatCaptureHandoffStart", + () => recordStudyLifecycleEvent("handoff_start"), + ), + vscode.commands.registerCommand( + "extension.codeChatCaptureHandoffEnd", + () => recordStudyLifecycleEvent("handoff_end"), + ), vscode.commands.registerCommand( "extension.codeChatEditorDeactivate", deactivate, @@ -148,6 +824,20 @@ export const activate = (context: vscode.ExtensionContext) => { event.reason }, ${format_struct(event.contentChanges)}.`, ); + + // CAPTURE: update session/switch state. The server + // classifies write_* events after parsing. + const doc = event.document; + const firstChange = event.contentChanges[0]; + const pos = firstChange.range.start; + const kind = classifyAtPosition(doc, pos); + + const filePath = doc.fileName; + + // Update our notion of current activity + doc + // session. + noteActivity(kind, filePath); + send_update(true); }), ); @@ -172,24 +862,105 @@ export const activate = (context: vscode.ExtensionContext) => { ) { return; } + + // CAPTURE: update activity + possible + // switch\_pane/doc\_session. + const doc = event.document; + const pos = + event.selection?.active ?? + new vscode.Position(0, 0); + const kind = classifyAtPosition(doc, pos); + + const filePath = doc.fileName; + noteActivity(kind, filePath); + send_update(true); }), ); context.subscriptions.push( vscode.window.onDidChangeTextEditorSelection( - (_event) => { + (event) => { if (ignore_selection_change) { ignore_selection_change = false; return; } + console_log( "CodeChat Editor extension: sending updated cursor/scroll position.", ); + + // CAPTURE: treat a selection change as "activity" + // in this document. + const doc = event.textEditor.document; + const pos = + event.selections?.[0]?.active ?? + event.textEditor.selection.active; + const kind = classifyAtPosition(doc, pos); + const filePath = doc.fileName; + noteActivity(kind, filePath); + send_update(false); }, ), ); + + // CAPTURE: listen for file saves. + context.subscriptions.push( + vscode.workspace.onDidSaveTextDocument((doc) => { + sendCaptureEvent("save", doc.fileName, { + reason: "manual_save", + languageId: doc.languageId, + lineCount: doc.lineCount, + }); + }), + ); + + // CAPTURE: start and end of a debug/run session. + context.subscriptions.push( + vscode.debug.onDidStartDebugSession((session) => { + const active = vscode.window.activeTextEditor; + const filePath = active?.document.fileName; + sendCaptureEvent("run", filePath, { + sessionName: session.name, + sessionType: session.type, + }); + }), + vscode.debug.onDidTerminateDebugSession((session) => { + const active = vscode.window.activeTextEditor; + const filePath = active?.document.fileName; + sendCaptureEvent("run_end", filePath, { + sessionName: session.name, + sessionType: session.type, + }); + }), + ); + + // CAPTURE: start and end compile/build events via VS Code + // tasks. + context.subscriptions.push( + vscode.tasks.onDidStartTaskProcess((e) => { + const active = vscode.window.activeTextEditor; + const filePath = active?.document.fileName; + const task = e.execution.task; + sendCaptureEvent("compile", filePath, { + taskName: task.name, + taskSource: task.source, + definition: task.definition, + processId: e.processId, + }); + }), + vscode.tasks.onDidEndTaskProcess((e) => { + const active = vscode.window.activeTextEditor; + const filePath = active?.document.fileName; + const task = e.execution.task; + sendCaptureEvent("compile_end", filePath, { + taskName: task.name, + taskSource: task.source, + exitCode: e.exitCode, + }); + }), + ); } // Get the CodeChat Client's location from the VSCode @@ -277,6 +1048,10 @@ export const activate = (context: vscode.ExtensionContext) => { // Start the server. console_log("CodeChat Editor extension: starting server."); codeChatEditorServer = new CodeChatEditorServer(); + captureFailureLogged = false; + captureTransportReady = false; + extensionCaptureSessionStarted = false; + refreshCaptureStatus(); const hosted_in_ide = codechat_client_location === @@ -285,6 +1060,7 @@ export const activate = (context: vscode.ExtensionContext) => { `CodeChat Editor extension: sending message Opened(${hosted_in_ide}).`, ); await codeChatEditorServer.sendMessageOpened(hosted_in_ide); + // For the external browser, we can immediately send the // `CurrentFile` message. For the WebView, we must first wait to // receive the HTML for the WebView (the `ClientHtml` message). @@ -292,6 +1068,11 @@ export const activate = (context: vscode.ExtensionContext) => { codechat_client_location === CodeChatEditorClientLocation.browser ) { + captureTransportReady = true; + const active = vscode.window.activeTextEditor; + await startExtensionCaptureSession( + active?.document.fileName, + ); send_update(false); } @@ -301,6 +1082,7 @@ export const activate = (context: vscode.ExtensionContext) => { console_log("CodeChat Editor extension: queue closed."); break; } + // Parse the data into a message. const { id, message } = JSON.parse( message_raw, @@ -335,16 +1117,19 @@ export const activate = (context: vscode.ExtensionContext) => { } if (current_update.contents !== undefined) { const source = current_update.contents.source; + // This will produce a change event, which we'll // ignore. The change may also produce a // selection change, which should also be // ignored. ignore_text_document_change = true; ignore_selection_change = true; + // Use a workspace edit, since calls to // `TextEditor.edit` must be made to the active // editor only. const wse = new vscode.WorkspaceEdit(); + // Is this plain text, or a diff? if ("Plain" in source) { wse.replace( @@ -361,6 +1146,7 @@ export const activate = (context: vscode.ExtensionContext) => { ); } else { assert("Diff" in source); + // If this diff was not made against the // text we currently have, reject it. if (source.Diff.version !== version) { @@ -380,8 +1166,8 @@ export const activate = (context: vscode.ExtensionContext) => { } const diffs = source.Diff.doc; for (const diff of diffs) { - // Convert from character offsets from the - // beginning of the document to a + // Convert from character offsets from + // the beginning of the document to a // `Position` (line, then offset on that // line) needed by VSCode. const from = doc.positionAt(diff.from); @@ -415,11 +1201,12 @@ export const activate = (context: vscode.ExtensionContext) => { // Update the cursor and scroll position if // provided. const editor = get_text_editor(doc); + const scroll_line = current_update.scroll_position; if (scroll_line !== undefined && editor) { - // Don't set `ignore_scroll_position` here, - // since `revealRange` doesn't change the - // editor's text selection. + // Don't set `ignore_selection_change` here: + // `revealRange` doesn't change the editor's + // text selection. const scroll_position = new vscode.Position( // The VSCode line is zero-based; the // CodeMirror line is one-based. @@ -439,9 +1226,15 @@ export const activate = (context: vscode.ExtensionContext) => { const cursor_position = current_update.cursor_position; - if (cursor_position !== undefined && editor) { - assert("Line" in cursor_position); - const cursor_line = cursor_position.Line; + if ( + cursor_position !== undefined && + typeof cursor_position === "object" && + "Line" in cursor_position && + editor + ) { + const cursor_line = ( + cursor_position as { Line: number } + ).Line; ignore_selection_change = true; const vscode_cursor_position = new vscode.Position( @@ -509,19 +1302,13 @@ export const activate = (context: vscode.ExtensionContext) => { .executeCommand( "vscode.open", vscode.Uri.file(current_file), - { - viewColumn: - current_editor?.viewColumn, - }, + { viewColumn: current_editor?.viewColumn }, ) .then( async () => await sendResult(id), async (reason) => await sendResult(id, { - OpenFileFailed: [ - current_file, - reason, - ], + OpenFileFailed: [current_file, reason], }), ); */ @@ -595,6 +1382,11 @@ export const activate = (context: vscode.ExtensionContext) => { assert(webview_panel !== undefined); webview_panel.webview.html = client_html; await sendResult(id); + captureTransportReady = true; + const active = vscode.window.activeTextEditor; + await startExtensionCaptureSession( + active?.document.fileName, + ); // Now that the Client is loaded, send the editor's // current file to the server. send_update(false); @@ -603,9 +1395,7 @@ export const activate = (context: vscode.ExtensionContext) => { default: console.error( - `Unhandled message ${key}(${format_struct( - value, - )}`, + `Unhandled message ${key}(${format_struct(value)}`, ); break; } @@ -618,6 +1408,13 @@ export const activate = (context: vscode.ExtensionContext) => { // On deactivation, close everything down. export const deactivate = async () => { console_log("CodeChat Editor extension: deactivating."); + + const active = vscode.window.activeTextEditor; + await endExtensionCaptureSession( + active?.document.fileName, + "extension_deactivate", + ); + await stop_client(); webview_panel?.dispose(); console_log("CodeChat Editor extension: deactivated."); @@ -640,7 +1437,9 @@ const format_struct = (complex_data_structure: any): string => const sendResult = async (id: number, result?: ResultErrTypes) => { assert(codeChatEditorServer); console_log( - `CodeChat Editor extension: sending Result(id = ${id}, ${format_struct(result)}).`, + `CodeChat Editor extension: sending Result(id = ${id}, ${format_struct( + result, + )}).`, ); try { await codeChatEditorServer.sendResult( @@ -701,13 +1500,17 @@ const send_update = (this_is_dirty: boolean) => { const scroll_position = current_editor!.visibleRanges[0].start.line + 1; const file_path = current_editor!.document.fileName; + // Send contents only if necessary. const option_contents: null | [string, number] = is_dirty ? [current_editor!.document.getText(), (version = rand())] : null; is_dirty = false; + console_log( - `CodeChat Editor extension: sending Update(${file_path}, ${cursor_position}, ${scroll_position}, ${format_struct(option_contents)})`, + `CodeChat Editor extension: sending Update(${file_path}, ${cursor_position}, ${scroll_position}, ${format_struct( + option_contents, + )})`, ); await codeChatEditorServer!.sendMessageUpdatePlain( file_path, @@ -724,14 +1527,19 @@ const send_update = (this_is_dirty: boolean) => { // well. const stop_client = async () => { console_log("CodeChat Editor extension: stopping client."); + const active = vscode.window.activeTextEditor; + await endExtensionCaptureSession( + active?.document.fileName, + "client_stopped", + ); if (codeChatEditorServer !== undefined) { console_log("CodeChat Editor extension: stopping server."); await codeChatEditorServer.stopServer(); codeChatEditorServer = undefined; } + captureTransportReady = false; + await refreshCaptureStatus(); - // Shut the timer down after the client is undefined, to ensure it can't be - // started again by a call to `start_render()`. if (idle_timer !== undefined) { clearTimeout(idle_timer); idle_timer = undefined; @@ -748,7 +1556,6 @@ const show_error = (message: string) => { } console.error(`CodeChat Editor extension: ${message}`); if (webview_panel !== undefined) { - // If the panel was displaying other content, reset it for errors. if ( !webview_panel.webview.html.startsWith("

CodeChat Editor

") ) { diff --git a/extensions/VSCode/src/lib.rs b/extensions/VSCode/src/lib.rs index ceec586a..aa8169fa 100644 --- a/extensions/VSCode/src/lib.rs +++ b/extensions/VSCode/src/lib.rs @@ -80,6 +80,19 @@ impl CodeChatEditorServer { self.0.send_message_opened(hosted_in_ide).await } + #[napi] + pub async fn send_capture_event(&self, capture_event_json: String) -> std::io::Result { + let capture_event = serde_json::from_str(&capture_event_json) + .map_err(|err| std::io::Error::other(err.to_string()))?; + self.0.send_capture_event(capture_event).await + } + + #[napi] + pub fn get_capture_status(&self) -> Result { + serde_json::to_string(&self.0.capture_status()) + .map_err(|err| Error::new(Status::GenericFailure, err.to_string())) + } + #[napi] pub async fn send_message_current_file(&self, url: String) -> std::io::Result { self.0.send_message_current_file(url).await diff --git a/server/log4rs.yml b/server/log4rs.yml index 544068f2..d534ba2a 100644 --- a/server/log4rs.yml +++ b/server/log4rs.yml @@ -40,7 +40,7 @@ loggers: level: warn root: - level: info + level: debug appenders: - console_appender - file_appender \ No newline at end of file diff --git a/server/scripts/capture_events_schema.sql b/server/scripts/capture_events_schema.sql new file mode 100644 index 00000000..ddaee807 --- /dev/null +++ b/server/scripts/capture_events_schema.sql @@ -0,0 +1,176 @@ +-- CodeChat capture event schema for dissertation analysis. +-- +-- This script updates an existing legacy `events` table to the lean capture +-- schema used for dissertation telemetry. It converts `timestamp` and `data` to +-- analysis-friendly PostgreSQL types and backfills typed telemetry from +-- existing JSON payloads where possible. Study metadata such as course, group, +-- assignment, condition, and task is intentionally omitted: those values are +-- joined during analysis from researcher-managed participant/date mappings. + +BEGIN; + +CREATE TABLE IF NOT EXISTS public.events ( + id BIGSERIAL PRIMARY KEY, + event_id TEXT, + sequence_number BIGINT, + schema_version INTEGER, + user_id TEXT NOT NULL, + session_id TEXT, + event_source TEXT, + language_id TEXT, + file_hash TEXT, + file_path TEXT, + path_privacy TEXT, + event_type TEXT NOT NULL, + "timestamp" TIMESTAMPTZ NOT NULL DEFAULT now(), + client_timestamp_ms BIGINT, + client_tz_offset_min INTEGER, + server_timestamp_ms BIGINT, + data JSONB NOT NULL DEFAULT '{}'::jsonb, + inserted_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS event_id TEXT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS sequence_number BIGINT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS schema_version INTEGER; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS session_id TEXT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS event_source TEXT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS language_id TEXT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS file_hash TEXT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS path_privacy TEXT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS client_timestamp_ms BIGINT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS client_tz_offset_min INTEGER; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS server_timestamp_ms BIGINT; +ALTER TABLE public.events ADD COLUMN IF NOT EXISTS inserted_at TIMESTAMPTZ NOT NULL DEFAULT now(); + +ALTER TABLE public.events DROP COLUMN IF EXISTS assignment_id; +ALTER TABLE public.events DROP COLUMN IF EXISTS group_id; +ALTER TABLE public.events DROP COLUMN IF EXISTS condition; +ALTER TABLE public.events DROP COLUMN IF EXISTS course_id; +ALTER TABLE public.events DROP COLUMN IF EXISTS task_id; +ALTER TABLE public.events DROP COLUMN IF EXISTS capture_mode; + +DO $$ +DECLARE + current_type TEXT; +BEGIN + SELECT data_type INTO current_type + FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'events' + AND column_name = 'timestamp'; + + IF current_type IS DISTINCT FROM 'timestamp with time zone' THEN + ALTER TABLE public.events + ALTER COLUMN "timestamp" TYPE TIMESTAMPTZ + USING COALESCE(NULLIF("timestamp"::text, '')::timestamptz, now()); + END IF; +END $$; + +DO $$ +DECLARE + current_type TEXT; +BEGIN + SELECT data_type INTO current_type + FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'events' + AND column_name = 'data'; + + IF current_type IS DISTINCT FROM 'jsonb' THEN + ALTER TABLE public.events + ALTER COLUMN data TYPE JSONB + USING CASE + WHEN data IS NULL OR btrim(data::text) = '' THEN '{}'::jsonb + ELSE data::jsonb + END; + END IF; +END $$; + +UPDATE public.events +SET data = '{}'::jsonb +WHERE data IS NULL; + +ALTER TABLE public.events ALTER COLUMN data SET DEFAULT '{}'::jsonb; +ALTER TABLE public.events ALTER COLUMN data SET NOT NULL; +ALTER TABLE public.events ALTER COLUMN "timestamp" SET DEFAULT now(); +ALTER TABLE public.events ALTER COLUMN "timestamp" SET NOT NULL; + +UPDATE public.events +SET + event_id = COALESCE(event_id, NULLIF(data->>'event_id', '')), + sequence_number = COALESCE( + sequence_number, + CASE + WHEN data->>'sequence_number' ~ '^-?[0-9]+$' + THEN (data->>'sequence_number')::bigint + END + ), + schema_version = COALESCE( + schema_version, + CASE + WHEN data->>'schema_version' ~ '^-?[0-9]+$' + THEN (data->>'schema_version')::integer + END + ), + session_id = COALESCE(session_id, NULLIF(data->>'session_id', '')), + event_source = COALESCE(event_source, NULLIF(data->>'event_source', '')), + language_id = COALESCE( + language_id, + NULLIF(data->>'language_id', ''), + NULLIF(data->>'languageId', '') + ), + file_hash = COALESCE(file_hash, NULLIF(data->>'file_hash', '')), + path_privacy = COALESCE(path_privacy, NULLIF(data->>'path_privacy', '')), + client_timestamp_ms = COALESCE( + client_timestamp_ms, + CASE + WHEN data->>'client_timestamp_ms' ~ '^-?[0-9]+$' + THEN (data->>'client_timestamp_ms')::bigint + END + ), + client_tz_offset_min = COALESCE( + client_tz_offset_min, + CASE + WHEN data->>'client_tz_offset_min' ~ '^-?[0-9]+$' + THEN (data->>'client_tz_offset_min')::integer + END + ), + server_timestamp_ms = COALESCE( + server_timestamp_ms, + CASE + WHEN data->>'server_timestamp_ms' ~ '^-?[0-9]+$' + THEN (data->>'server_timestamp_ms')::bigint + ELSE floor(extract(epoch from "timestamp") * 1000)::bigint + END + ); + +CREATE INDEX IF NOT EXISTS events_timestamp_idx + ON public.events ("timestamp"); + +CREATE INDEX IF NOT EXISTS events_type_timestamp_idx + ON public.events (event_type, "timestamp"); + +CREATE INDEX IF NOT EXISTS events_participant_session_idx + ON public.events (user_id, session_id); + +CREATE INDEX IF NOT EXISTS events_file_hash_idx + ON public.events (file_hash) + WHERE file_hash IS NOT NULL; + +CREATE INDEX IF NOT EXISTS events_event_id_idx + ON public.events (event_id) + WHERE event_id IS NOT NULL; + +CREATE INDEX IF NOT EXISTS events_data_gin_idx + ON public.events USING GIN (data); + +COMMENT ON TABLE public.events IS + 'CodeChat dissertation capture events. Course, group, assignment, condition, and task context are joined during analysis from participant/date mappings.'; +COMMENT ON COLUMN public.events.user_id IS 'Pseudonymous participant UUID generated or supplied by the VS Code extension.'; +COMMENT ON COLUMN public.events.session_id IS 'Capture session UUID emitted by the VS Code extension.'; +COMMENT ON COLUMN public.events.file_hash IS 'SHA-256 hash of the file path when path hashing is enabled.'; +COMMENT ON COLUMN public.events.file_path IS 'Raw captured file path; NULL when path hashing is enabled.'; +COMMENT ON COLUMN public.events.data IS 'Event-specific JSON payload. Duplicates typed telemetry metadata for portable fallback exports.'; + +COMMIT; diff --git a/server/scripts/export_capture_metrics.py b/server/scripts/export_capture_metrics.py new file mode 100644 index 00000000..9c515735 --- /dev/null +++ b/server/scripts/export_capture_metrics.py @@ -0,0 +1,1448 @@ +#!/usr/bin/env python3 +"""Export dissertation-oriented metrics from CodeChat capture events. + +Default use pulls events directly from PostgreSQL using `capture_config.json` +or the `CODECHAT_CAPTURE_*` environment variables: + + python server/scripts/export_capture_metrics.py --out capture-metrics.csv + +To produce the richer analysis dataset: + + python server/scripts/export_capture_metrics.py --dataset-dir capture-analysis + +The optional positional `input` is only for fallback JSONL logs: + + python server/scripts/export_capture_metrics.py capture-events-fallback.jsonl --out capture-metrics.csv +""" + +from __future__ import annotations + +import argparse +import csv +import hashlib +import json +import os +import re +import shutil +import subprocess +from collections import Counter, defaultdict, deque +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Iterable, Iterator + + +EVENT_FIELDS = [ + "session_start", + "session_end", + "write_doc", + "write_code", + "doc_session", + "switch_pane", + "save", + "compile", + "compile_end", + "run", + "run_end", + "task_start", + "task_submit", + "debug_task_start", + "debug_task_submit", + "handoff_start", + "handoff_end", + "reflection_prompt_inserted", +] + +IDENTITY_FIELDS = [ + "user_id", + "session_id", +] + +EVENT_ROW_FIELDS = [ + "event_index", + *IDENTITY_FIELDS, + "event_id", + "sequence_number", + "schema_version", + "event_source", + "event_type", + "timestamp", + "client_timestamp_ms", + "server_timestamp_ms", + "client_tz_offset_min", + "client_server_latency_ms", + "elapsed_session_seconds", + "gap_seconds", + "file_id", + "file_hash", + "path_privacy", + "language_id", + "classification_basis", + "write_source", + "mode", + "activity_from", + "activity_to", + "duration_seconds", + "duration_ms", + "line_count", + "prompt_hash", + "prompt_length", + "command", + "task_name", + "task_source", + "exit_code", + "run_session_name", + "run_session_type", + "save_reason", + "doc_block_count_before", + "doc_block_count_after", + "diff_hunks", + "diff_inserted_chars", + "diff_deleted_units", + "diff_replacement_hunks", + "doc_block_transactions", + "doc_block_diff_hunks", + "doc_block_inserted_chars", + "doc_block_deleted_units", +] + +SESSION_SUMMARY_FIELDS = [ + *IDENTITY_FIELDS, + "event_count", + "first_event_at", + "last_event_at", + "active_span_seconds", + "events_per_minute", + "mean_gap_seconds", + "max_gap_seconds", + "doc_session_seconds", + "doc_session_share_of_span", + "write_events", + "doc_write_share", + *[f"{event_type}_events" for event_type in EVENT_FIELDS], + "doc_to_code_switches", + "code_to_doc_switches", + "compile_success_events", + "compile_failure_events", + "total_prompt_chars", + "unique_file_count", + "unique_language_count", + "file_ids", + "language_ids", + "event_sources", + "diff_hunks", + "diff_inserted_chars", + "diff_deleted_units", + "doc_block_transactions", + "doc_block_diff_hunks", + "doc_block_inserted_chars", + "doc_block_deleted_units", + "client_server_latency_ms_mean", + "client_server_latency_ms_max", + "first_sequence_number", + "last_sequence_number", + "missing_sequence_gaps", + "duplicate_event_ids", + "data_quality_notes", +] + +FILE_SUMMARY_FIELDS = [ + *IDENTITY_FIELDS, + "file_id", + "file_hash", + "language_id", + "path_privacy", + "event_count", + "first_event_at", + "last_event_at", + "active_span_seconds", + "doc_session_seconds", + "write_doc_events", + "write_code_events", + "save_events", + "compile_events", + "compile_end_events", + "run_events", + "run_end_events", + "switch_pane_events", + "line_count_first", + "line_count_last", + "line_count_max", + "doc_block_count_before_min", + "doc_block_count_after_last", + "classification_bases", + "write_sources", + "diff_hunks", + "diff_inserted_chars", + "diff_deleted_units", + "doc_block_transactions", + "doc_block_diff_hunks", + "doc_block_inserted_chars", + "doc_block_deleted_units", +] + +TASK_LIFECYCLE_FIELDS = [ + *IDENTITY_FIELDS, + "lifecycle_kind", + "lifecycle_index", + "completed", + "start_event_type", + "end_event_type", + "start_at", + "end_at", + "duration_seconds", + "start_event_id", + "end_event_id", + "start_file_id", + "end_file_id", + "language_id", + "command", + "data_quality_notes", +] + +LIFECYCLE_PAIRS = { + "task_start": ("task", "task_submit"), + "debug_task_start": ("debug_task", "debug_task_submit"), + "handoff_start": ("handoff", "handoff_end"), +} + +LIFECYCLE_END_TYPES = { + end_type: (kind, start_type) + for start_type, (kind, end_type) in LIFECYCLE_PAIRS.items() +} + +RAW_FILE_PATH_FIELD = "file_path" + +DB_METADATA_FIELDS = [ + "event_id", + "sequence_number", + "schema_version", + "session_id", + "event_source", + "language_id", + "file_hash", + "path_privacy", + "client_timestamp_ms", + "client_tz_offset_min", + "server_timestamp_ms", +] + +FIELD_DESCRIPTIONS = { + "event_index": "One-based event order after sorting by timestamp and sequence number.", + "user_id": "Pseudonymous participant UUID generated or supplied by the VS Code extension.", + "session_id": "Capture session UUID emitted by the VS Code extension.", + "event_id": "Client-generated UUID when available.", + "sequence_number": "Client-side monotonically increasing sequence number when available.", + "schema_version": "Capture payload schema version.", + "event_source": "Capture source, such as vscode_extension.", + "event_type": "Canonical CodeChat capture event type.", + "timestamp": "Server-recorded event timestamp.", + "client_timestamp_ms": "Client-side timestamp in milliseconds since Unix epoch.", + "server_timestamp_ms": "Server-side timestamp in milliseconds since Unix epoch.", + "client_tz_offset_min": "Client timezone offset from JavaScript Date().getTimezoneOffset().", + "client_server_latency_ms": "Approximate server timestamp minus client timestamp.", + "elapsed_session_seconds": "Seconds since the first event in the same participant/session row.", + "gap_seconds": "Seconds since the prior event in the same participant/session row.", + "file_id": "Privacy-preserving file identifier. Uses captured file hash when available, otherwise a SHA-256 hash of the captured path.", + "file_hash": "Captured SHA-256 file path hash when the extension supplied one.", + "file_path": "Raw captured file path. Only exported with --include-file-paths.", + "path_privacy": "Path privacy mode reported by capture settings.", + "language_id": "VS Code language identifier when available.", + "classification_basis": "Server-side write-classification basis when available.", + "write_source": "Write event source, such as server_translation or CodeMirror update path.", + "mode": "Event-specific mode or CodeChat lexer mode.", + "activity_from": "Previous activity kind for switch_pane events.", + "activity_to": "New activity kind for switch_pane events.", + "duration_seconds": "Event-specific duration in seconds.", + "duration_ms": "Event-specific duration in milliseconds.", + "line_count": "Document line count captured on save events.", + "prompt_hash": "SHA-256 hash of the inserted reflection prompt.", + "prompt_length": "Length of the inserted reflection prompt.", + "command": "Lifecycle command name recorded by the extension.", + "task_name": "VS Code task name for compile/build events.", + "task_source": "VS Code task source for compile/build events.", + "exit_code": "Compile/build process exit code when available.", + "run_session_name": "VS Code debug/run session name.", + "run_session_type": "VS Code debug/run session type.", + "save_reason": "Save reason reported by the extension.", + "doc_block_count_before": "Documentation block count before a classified doc-block edit.", + "doc_block_count_after": "Documentation block count after a classified doc-block edit.", + "diff_hunks": "Number of text diff hunks in the event payload.", + "diff_inserted_chars": "Characters inserted across text diff hunks.", + "diff_deleted_units": "UTF-16 code units removed across text diff hunks.", + "diff_replacement_hunks": "Text diff hunks that both removed and inserted content.", + "doc_block_transactions": "Number of doc-block add/update/delete transactions.", + "doc_block_diff_hunks": "Nested text diff hunks inside doc-block transactions.", + "doc_block_inserted_chars": "Characters inserted inside doc-block transaction diffs.", + "doc_block_deleted_units": "UTF-16 code units removed inside doc-block transaction diffs.", + "event_count": "Number of events in the aggregate row.", + "first_event_at": "Earliest event timestamp in the aggregate row.", + "last_event_at": "Latest event timestamp in the aggregate row.", + "active_span_seconds": "Seconds between first and last event in the aggregate row.", + "events_per_minute": "Event count divided by active span in minutes.", + "mean_gap_seconds": "Mean within-session gap between consecutive timestamped events.", + "max_gap_seconds": "Largest within-session gap between consecutive timestamped events.", + "doc_session_seconds": "Total duration of doc_session events.", + "doc_session_share_of_span": "doc_session_seconds divided by active_span_seconds.", + "write_events": "write_doc_events plus write_code_events.", + "doc_write_share": "write_doc_events divided by all write events.", + "doc_to_code_switches": "switch_pane events moving from documentation to code.", + "code_to_doc_switches": "switch_pane events moving from code to documentation.", + "compile_success_events": "compile_end events with exit_code equal to 0.", + "compile_failure_events": "compile_end events with nonzero exit_code.", + "total_prompt_chars": "Sum of reflection prompt lengths.", + "unique_file_count": "Number of distinct file_id values in the aggregate row.", + "unique_language_count": "Number of distinct language_id values in the aggregate row.", + "file_ids": "Semicolon-delimited file_id values in the aggregate row.", + "language_ids": "Semicolon-delimited language_id values in the aggregate row.", + "event_sources": "Semicolon-delimited event_source values in the aggregate row.", + "client_server_latency_ms_mean": "Mean approximate client-to-server timestamp delta.", + "client_server_latency_ms_max": "Largest approximate client-to-server timestamp delta.", + "first_sequence_number": "Smallest captured sequence number in the aggregate row.", + "last_sequence_number": "Largest captured sequence number in the aggregate row.", + "missing_sequence_gaps": "Count of missing sequence-number slots within the aggregate row.", + "duplicate_event_ids": "Number of repeated event_id values in the aggregate row.", + "data_quality_notes": "Semicolon-delimited notes about missing or suspicious capture metadata.", + "line_count_first": "First observed line count for the file aggregate.", + "line_count_last": "Last observed line count for the file aggregate.", + "line_count_max": "Maximum observed line count for the file aggregate.", + "doc_block_count_before_min": "Minimum observed doc block count before edits.", + "doc_block_count_after_last": "Last observed doc block count after edits.", + "classification_bases": "Semicolon-delimited write classification bases.", + "write_sources": "Semicolon-delimited write event sources.", + "lifecycle_kind": "Lifecycle family: task, debug_task, or handoff.", + "lifecycle_index": "One-based lifecycle index within participant/session/task/kind.", + "completed": "1 when a lifecycle end event was observed, otherwise 0.", + "start_event_type": "Lifecycle start event type.", + "end_event_type": "Lifecycle end/submit event type.", + "start_at": "Lifecycle start timestamp.", + "end_at": "Lifecycle end timestamp.", + "start_event_id": "event_id for the lifecycle start event.", + "end_event_id": "event_id for the lifecycle end event.", + "start_file_id": "file_id associated with the lifecycle start event.", + "end_file_id": "file_id associated with the lifecycle end event.", +} + + +@dataclass(frozen=True) +class DbConfig: + host: str + user: str + password: str + dbname: str + port: int | None = None + + +def parse_timestamp(value: Any) -> datetime | None: + if isinstance(value, datetime): + return value + if not isinstance(value, str) or not value: + return None + try: + return datetime.fromisoformat(value.replace("Z", "+00:00")) + except ValueError: + return None + + +def as_data(value: Any) -> dict[str, Any]: + if isinstance(value, dict): + return value + if isinstance(value, str): + try: + parsed = json.loads(value) + except json.JSONDecodeError: + return {} + return parsed if isinstance(parsed, dict) else {} + return {} + + +def normalize_db_record(record: dict[str, Any]) -> dict[str, Any]: + data = as_data(record.get("data")) + for field_name in DB_METADATA_FIELDS: + value = record.get(field_name) + if value is not None and data.get(field_name) is None: + data[field_name] = value + + return { + "user_id": record.get("user_id"), + "file_path": record.get("file_path"), + "event_type": record.get("event_type"), + "timestamp": record.get("timestamp"), + "data": data, + } + + +def iter_jsonl_events(path: Path) -> Iterator[dict[str, Any]]: + with path.open("r", encoding="utf-8") as input_file: + for line_number, line in enumerate(input_file, start=1): + line = line.strip() + if not line: + continue + try: + record = json.loads(line) + except json.JSONDecodeError as err: + raise SystemExit(f"{path}:{line_number}: invalid JSON: {err}") from err + event = record.get("event", record) + if not isinstance(event, dict): + continue + event["data"] = as_data(event.get("data")) + yield event + + +def load_db_config(config_path: Path) -> DbConfig: + env_config = db_config_from_env() + if env_config is not None: + return env_config + + config_path = resolve_config_path(config_path) + try: + config = json.loads(config_path.read_text(encoding="utf-8")) + except FileNotFoundError as err: + searched = "\n ".join(str(path) for path in config_search_paths(config_path)) + raise SystemExit( + "No DB config found. Create a local capture_config.json, set " + "CODECHAT_CAPTURE_* env vars, or pass a fallback JSONL input file.\n" + f"Searched:\n {searched}" + ) from err + except json.JSONDecodeError as err: + raise SystemExit(f"{config_path}: invalid JSON: {err}") from err + + missing = [name for name in ["host", "user", "password", "dbname"] if not config.get(name)] + if missing: + raise SystemExit(f"{config_path}: missing required DB field(s): {', '.join(missing)}") + + return DbConfig( + host=str(config["host"]), + user=str(config["user"]), + password=str(config["password"]), + dbname=str(config["dbname"]), + port=int(config["port"]) if config.get("port") is not None else None, + ) + + +def resolve_config_path(config_path: Path) -> Path: + for candidate in config_search_paths(config_path): + if candidate.exists(): + return candidate + return config_path + + +def config_search_paths(config_path: Path) -> list[Path]: + if config_path.is_absolute(): + return [config_path] + + script_repo_root = Path(__file__).resolve().parents[2] + paths = [Path.cwd() / config_path, script_repo_root / config_path] + + unique_paths: list[Path] = [] + for path in paths: + if path not in unique_paths: + unique_paths.append(path) + return unique_paths + + +def db_config_from_env() -> DbConfig | None: + host = env_value("CODECHAT_CAPTURE_HOST") + if host is None: + return None + missing = [ + name + for name in [ + "CODECHAT_CAPTURE_USER", + "CODECHAT_CAPTURE_PASSWORD", + "CODECHAT_CAPTURE_DBNAME", + ] + if env_value(name) is None + ] + if missing: + raise SystemExit( + "Missing required capture DB environment variable(s): " + ", ".join(missing) + ) + + port_text = env_value("CODECHAT_CAPTURE_PORT") + return DbConfig( + host=host, + user=env_value("CODECHAT_CAPTURE_USER") or "", + password=env_value("CODECHAT_CAPTURE_PASSWORD") or "", + dbname=env_value("CODECHAT_CAPTURE_DBNAME") or "", + port=int(port_text) if port_text is not None else None, + ) + + +def env_value(name: str) -> str | None: + value = os.environ.get(name) + if value is None: + return None + value = value.strip() + return value or None + + +def sql_identifier(identifier: str) -> str: + parts = identifier.split(".") + for part in parts: + if not re.fullmatch(r"[A-Za-z_][A-Za-z0-9_]*", part): + raise SystemExit(f"Invalid SQL identifier: {identifier!r}") + return ".".join(f'"{part}"' for part in parts) + + +def iter_db_events(config: DbConfig, table: str) -> Iterator[dict[str, Any]]: + try: + import psycopg + except ImportError: + yield from iter_db_events_with_psql(config, table) + return + + connect_kwargs = { + "host": config.host, + "user": config.user, + "password": config.password, + "dbname": config.dbname, + } + if config.port is not None: + connect_kwargs["port"] = config.port + + query = psql_json_query(table) + with psycopg.connect(**connect_kwargs) as conn: + with conn.cursor() as cursor: + cursor.execute(query) + for (record_text,) in cursor: + yield normalize_db_record(json.loads(record_text)) + + +def iter_db_events_with_psql(config: DbConfig, table: str) -> Iterator[dict[str, Any]]: + psql_path = find_psql() + if psql_path is None: + raise SystemExit( + "PostgreSQL export needs a local PostgreSQL client to connect to the AWS DB.\n" + "The AWS PostgreSQL server is remote; it cannot provide Python's local DB driver.\n" + "Install one of these on this Windows machine:\n" + " python -m pip install \"psycopg[binary]\"\n" + "or install PostgreSQL command-line tools so psql.exe is available on PATH." + ) + + env = os.environ.copy() + env["PGPASSWORD"] = config.password + command = [ + psql_path, + "--no-password", + "--no-align", + "--tuples-only", + "--quiet", + "--set", + "ON_ERROR_STOP=1", + "--host", + config.host, + "--username", + config.user, + "--dbname", + config.dbname, + "--command", + psql_json_query(table), + ] + if config.port is not None: + command.extend(["--port", str(config.port)]) + + result = subprocess.run( + command, + env=env, + check=False, + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise SystemExit( + "psql failed while querying the AWS PostgreSQL DB:\n" + f"{result.stderr.strip() or result.stdout.strip()}" + ) + + for line_number, line in enumerate(result.stdout.splitlines(), start=1): + line = line.strip() + if not line: + continue + try: + record = json.loads(line) + except json.JSONDecodeError as err: + raise SystemExit(f"psql output line {line_number}: invalid JSON: {err}") from err + yield normalize_db_record(record) + + +def find_psql() -> str | None: + psql_path = shutil.which("psql") + if psql_path is not None: + return psql_path + + program_files = Path(os.environ.get("ProgramFiles", r"C:\Program Files")) + candidates = sorted(program_files.glob(r"PostgreSQL/*/bin/psql.exe"), reverse=True) + return str(candidates[0]) if candidates else None + + +def psql_json_query(table: str) -> str: + return ( + "SELECT to_jsonb(events_row)::text " + f"FROM {sql_identifier(table)} AS events_row " + 'ORDER BY events_row."timestamp"' + ) + + +def text_value(value: Any) -> str: + if value is None: + return "" + if isinstance(value, str): + return value + if isinstance(value, (dict, list)): + return json.dumps(value, sort_keys=True, separators=(",", ":")) + return str(value) + + +def first_data_value(data: dict[str, Any], *names: str) -> Any: + for name in names: + if name in data and data[name] is not None: + return data[name] + return None + + +def data_text(data: dict[str, Any], *names: str) -> str: + return text_value(first_data_value(data, *names)) + + +def int_value(value: Any) -> int | None: + if isinstance(value, bool): + return int(value) + if isinstance(value, int): + return value + if isinstance(value, float) and value.is_integer(): + return int(value) + if isinstance(value, str): + value = value.strip() + if not value: + return None + try: + return int(value) + except ValueError: + return None + return None + + +def float_value(value: Any) -> float | None: + if isinstance(value, bool): + return float(value) + if isinstance(value, (int, float)): + return float(value) + if isinstance(value, str): + value = value.strip() + if not value: + return None + try: + return float(value) + except ValueError: + return None + return None + + +def int_or_blank(value: Any) -> int | str: + number = int_value(value) + return number if number is not None else "" + + +def float_or_blank(value: Any) -> float | str: + number = float_value(value) + return number if number is not None else "" + + +def csv_value(value: Any) -> str | int: + if value is None: + return "" + if isinstance(value, float): + return f"{value:.3f}" + if isinstance(value, bool): + return "1" if value else "0" + return value + + +def write_csv(path: Path, fieldnames: list[str], rows: Iterable[dict[str, Any]]) -> None: + if path.parent != Path("."): + path.parent.mkdir(parents=True, exist_ok=True) + with path.open("w", encoding="utf-8", newline="") as output_file: + writer = csv.DictWriter(output_file, fieldnames=fieldnames, extrasaction="ignore") + writer.writeheader() + for row in rows: + writer.writerow({field: csv_value(row.get(field, "")) for field in fieldnames}) + + +def aware_datetime(value: datetime | None) -> datetime | None: + if value is None: + return None + if value.tzinfo is None: + return value.replace(tzinfo=timezone.utc) + return value.astimezone(timezone.utc) + + +def seconds_between(start: datetime | None, end: datetime | None) -> float | None: + if start is None or end is None: + return None + return (end - start).total_seconds() + + +def timestamp_for_csv(value: datetime | None, fallback: Any = "") -> str: + if value is not None: + return value.isoformat() + return text_value(fallback) + + +def sha256_text(value: str) -> str: + return hashlib.sha256(value.encode("utf-8")).hexdigest() + + +def file_id_for(file_path: str, file_hash: str) -> str: + if file_hash: + return file_hash + if file_path: + return sha256_text(file_path) + return "" + + +def fields_with_optional_file_path( + fieldnames: list[str], include_file_paths: bool +) -> list[str]: + if not include_file_paths or RAW_FILE_PATH_FIELD in fieldnames: + return fieldnames + fields = list(fieldnames) + insert_after = "file_hash" if "file_hash" in fields else "file_id" + fields.insert(fields.index(insert_after) + 1, RAW_FILE_PATH_FIELD) + return fields + + +def identity_key(row: dict[str, Any]) -> tuple[str, ...]: + return tuple(text_value(row.get(field)) for field in IDENTITY_FIELDS) + + +def semicolon_join(values: Iterable[str]) -> str: + return ";".join(sorted(value for value in values if value)) + + +def add_number(acc: list[float], value: Any) -> None: + number = float_value(value) + if number is not None: + acc.append(number) + + +def string_diff_stats(value: Any) -> Counter[str]: + stats: Counter[str] = Counter() + if isinstance(value, list): + for item in value: + stats.update(string_diff_stats(item)) + return stats + if not isinstance(value, dict): + return stats + + if "from" in value and "insert" in value: + from_value = int_value(value.get("from")) or 0 + to_value = int_value(value.get("to")) + removed_units = max(0, (to_value or from_value) - from_value) + inserted_chars = len(text_value(value.get("insert"))) + stats["hunks"] += 1 + stats["inserted_chars"] += inserted_chars + stats["deleted_units"] += removed_units + if removed_units > 0 and inserted_chars > 0: + stats["replacement_hunks"] += 1 + return stats + + for child in value.values(): + stats.update(string_diff_stats(child)) + return stats + + +def doc_block_contents(value: Any) -> str: + if isinstance(value, list) and len(value) >= 5: + return text_value(value[4]) + if isinstance(value, dict): + return text_value(value.get("contents")) + return "" + + +def doc_block_diff_stats(value: Any) -> Counter[str]: + stats: Counter[str] = Counter() + if not isinstance(value, list): + return stats + + for transaction in value: + stats["transactions"] += 1 + if not isinstance(transaction, dict): + continue + if "Add" in transaction: + stats["inserted_chars"] += len(doc_block_contents(transaction["Add"])) + elif "Update" in transaction: + update_stats = string_diff_stats(transaction["Update"]) + stats["hunks"] += update_stats["hunks"] + stats["inserted_chars"] += update_stats["inserted_chars"] + stats["deleted_units"] += update_stats["deleted_units"] + elif "Delete" in transaction: + continue + else: + update_stats = string_diff_stats(transaction) + stats["hunks"] += update_stats["hunks"] + stats["inserted_chars"] += update_stats["inserted_chars"] + stats["deleted_units"] += update_stats["deleted_units"] + return stats + + +def normalize_event_rows( + events: Iterable[dict[str, Any]], include_file_paths: bool = False +) -> list[dict[str, Any]]: + sortable_rows: list[dict[str, Any]] = [] + for original_index, event in enumerate(events, start=1): + data = as_data(event.get("data")) + timestamp = aware_datetime(parse_timestamp(event.get("timestamp"))) + file_path = text_value(event.get("file_path")) + file_hash = data_text(data, "file_hash") + client_timestamp_ms = int_value(data.get("client_timestamp_ms")) + server_timestamp_ms = int_value(data.get("server_timestamp_ms")) + latency_ms = ( + server_timestamp_ms - client_timestamp_ms + if client_timestamp_ms is not None and server_timestamp_ms is not None + else "" + ) + diff_stats = string_diff_stats(data.get("diff")) + doc_block_stats = doc_block_diff_stats(data.get("doc_block_diff")) + + row: dict[str, Any] = { + "event_index": original_index, + "user_id": text_value(event.get("user_id")), + "session_id": data_text(data, "session_id"), + "event_id": data_text(data, "event_id"), + "sequence_number": int_or_blank(data.get("sequence_number")), + "schema_version": int_or_blank(data.get("schema_version")), + "event_source": data_text(data, "event_source"), + "event_type": text_value(event.get("event_type")), + "timestamp": timestamp_for_csv(timestamp, event.get("timestamp")), + "client_timestamp_ms": client_timestamp_ms + if client_timestamp_ms is not None + else "", + "server_timestamp_ms": server_timestamp_ms + if server_timestamp_ms is not None + else "", + "client_tz_offset_min": int_or_blank(data.get("client_tz_offset_min")), + "client_server_latency_ms": latency_ms, + "elapsed_session_seconds": "", + "gap_seconds": "", + "file_id": file_id_for(file_path, file_hash), + "file_hash": file_hash, + "path_privacy": data_text(data, "path_privacy"), + "language_id": data_text(data, "language_id", "languageId"), + "classification_basis": data_text(data, "classification_basis"), + "write_source": data_text(data, "source"), + "mode": data_text(data, "mode"), + "activity_from": data_text(data, "from"), + "activity_to": data_text(data, "to"), + "duration_seconds": float_or_blank(data.get("duration_seconds")), + "duration_ms": float_or_blank(data.get("duration_ms")), + "line_count": int_or_blank(first_data_value(data, "lineCount", "line_count")), + "prompt_hash": data_text(data, "prompt_hash"), + "prompt_length": int_or_blank(data.get("prompt_length")), + "command": data_text(data, "command"), + "task_name": data_text(data, "taskName", "task_name"), + "task_source": data_text(data, "taskSource", "task_source"), + "exit_code": int_or_blank(first_data_value(data, "exitCode", "exit_code")), + "run_session_name": data_text(data, "sessionName", "session_name"), + "run_session_type": data_text(data, "sessionType", "session_type"), + "save_reason": data_text(data, "reason"), + "doc_block_count_before": int_or_blank(data.get("doc_block_count_before")), + "doc_block_count_after": int_or_blank(data.get("doc_block_count_after")), + "diff_hunks": diff_stats["hunks"], + "diff_inserted_chars": diff_stats["inserted_chars"], + "diff_deleted_units": diff_stats["deleted_units"], + "diff_replacement_hunks": diff_stats["replacement_hunks"], + "doc_block_transactions": doc_block_stats["transactions"], + "doc_block_diff_hunks": doc_block_stats["hunks"], + "doc_block_inserted_chars": doc_block_stats["inserted_chars"], + "doc_block_deleted_units": doc_block_stats["deleted_units"], + } + if include_file_paths: + row[RAW_FILE_PATH_FIELD] = file_path + + sortable_rows.append( + { + "row": row, + "timestamp": timestamp, + "sequence_number": int_value(row["sequence_number"]), + "original_index": original_index, + } + ) + + max_timestamp = datetime.max.replace(tzinfo=timezone.utc) + sortable_rows.sort( + key=lambda item: ( + item["timestamp"] is None, + item["timestamp"] or max_timestamp, + item["sequence_number"] if item["sequence_number"] is not None else 10**18, + item["original_index"], + ) + ) + + first_by_session: dict[tuple[str, ...], datetime] = {} + previous_by_session: dict[tuple[str, ...], datetime] = {} + for event_index, item in enumerate(sortable_rows, start=1): + row = item["row"] + row["event_index"] = event_index + timestamp = item["timestamp"] + if timestamp is None: + continue + key = identity_key(row) + first = first_by_session.setdefault(key, timestamp) + row["elapsed_session_seconds"] = seconds_between(first, timestamp) or 0.0 + previous = previous_by_session.get(key) + if previous is not None: + row["gap_seconds"] = seconds_between(previous, timestamp) or 0.0 + previous_by_session[key] = timestamp + + return [item["row"] for item in sortable_rows] + + +def new_session_acc(row: dict[str, Any]) -> dict[str, Any]: + return { + "identity": {field: text_value(row.get(field)) for field in IDENTITY_FIELDS}, + "counts": Counter(), + "event_count": 0, + "first_dt": None, + "last_dt": None, + "gaps": [], + "doc_session_seconds": 0.0, + "doc_to_code_switches": 0, + "code_to_doc_switches": 0, + "compile_success_events": 0, + "compile_failure_events": 0, + "total_prompt_chars": 0, + "file_ids": set(), + "language_ids": set(), + "event_sources": set(), + "latencies": [], + "sequence_numbers": [], + "event_ids": Counter(), + "diff_hunks": 0, + "diff_inserted_chars": 0, + "diff_deleted_units": 0, + "doc_block_transactions": 0, + "doc_block_diff_hunks": 0, + "doc_block_inserted_chars": 0, + "doc_block_deleted_units": 0, + "missing_timestamp_count": 0, + "missing_session_count": 0, + "missing_schema_count": 0, + } + + +def update_time_acc(acc: dict[str, Any], row: dict[str, Any]) -> None: + timestamp = aware_datetime(parse_timestamp(row.get("timestamp"))) + if timestamp is None: + acc["missing_timestamp_count"] += 1 + return + if acc["first_dt"] is None or timestamp < acc["first_dt"]: + acc["first_dt"] = timestamp + if acc["last_dt"] is None or timestamp > acc["last_dt"]: + acc["last_dt"] = timestamp + + +def update_session_acc(acc: dict[str, Any], row: dict[str, Any]) -> None: + event_type = text_value(row.get("event_type")) + acc["event_count"] += 1 + acc["counts"][event_type] += 1 + update_time_acc(acc, row) + + add_number(acc["gaps"], row.get("gap_seconds")) + if event_type == "doc_session": + duration = float_value(row.get("duration_seconds")) + if duration is not None: + acc["doc_session_seconds"] += duration + if event_type == "switch_pane": + if row.get("activity_from") == "doc" and row.get("activity_to") == "code": + acc["doc_to_code_switches"] += 1 + if row.get("activity_from") == "code" and row.get("activity_to") == "doc": + acc["code_to_doc_switches"] += 1 + if event_type == "compile_end": + exit_code = int_value(row.get("exit_code")) + if exit_code == 0: + acc["compile_success_events"] += 1 + elif exit_code is not None: + acc["compile_failure_events"] += 1 + + acc["total_prompt_chars"] += int_value(row.get("prompt_length")) or 0 + if row.get("file_id"): + acc["file_ids"].add(text_value(row.get("file_id"))) + if row.get("language_id"): + acc["language_ids"].add(text_value(row.get("language_id"))) + if row.get("event_source"): + acc["event_sources"].add(text_value(row.get("event_source"))) + add_number(acc["latencies"], row.get("client_server_latency_ms")) + + sequence_number = int_value(row.get("sequence_number")) + if sequence_number is not None: + acc["sequence_numbers"].append(sequence_number) + event_id = text_value(row.get("event_id")) + if event_id: + acc["event_ids"][event_id] += 1 + + for field in [ + "diff_hunks", + "diff_inserted_chars", + "diff_deleted_units", + "doc_block_transactions", + "doc_block_diff_hunks", + "doc_block_inserted_chars", + "doc_block_deleted_units", + ]: + acc[field] += int_value(row.get(field)) or 0 + if not row.get("session_id"): + acc["missing_session_count"] += 1 + if not row.get("schema_version"): + acc["missing_schema_count"] += 1 + + +def finalize_session_acc(acc: dict[str, Any]) -> dict[str, Any]: + first_dt = acc["first_dt"] + last_dt = acc["last_dt"] + active_span = seconds_between(first_dt, last_dt) or 0.0 + counts = acc["counts"] + write_events = counts["write_doc"] + counts["write_code"] + sequence_numbers = sorted(set(acc["sequence_numbers"])) + missing_sequence_gaps = sum( + max(0, current - previous - 1) + for previous, current in zip(sequence_numbers, sequence_numbers[1:]) + ) + duplicate_event_ids = sum( + count - 1 for count in acc["event_ids"].values() if count > 1 + ) + notes = [] + if acc["missing_timestamp_count"]: + notes.append(f"missing_timestamp:{acc['missing_timestamp_count']}") + if acc["missing_session_count"]: + notes.append(f"missing_session_id:{acc['missing_session_count']}") + if acc["missing_schema_count"]: + notes.append(f"missing_schema_version:{acc['missing_schema_count']}") + if missing_sequence_gaps: + notes.append(f"missing_sequence_slots:{missing_sequence_gaps}") + if duplicate_event_ids: + notes.append(f"duplicate_event_ids:{duplicate_event_ids}") + + row = { + **acc["identity"], + "event_count": acc["event_count"], + "first_event_at": timestamp_for_csv(first_dt), + "last_event_at": timestamp_for_csv(last_dt), + "active_span_seconds": active_span, + "events_per_minute": (acc["event_count"] / (active_span / 60.0)) + if active_span > 0 + else "", + "mean_gap_seconds": sum(acc["gaps"]) / len(acc["gaps"]) if acc["gaps"] else "", + "max_gap_seconds": max(acc["gaps"]) if acc["gaps"] else "", + "doc_session_seconds": acc["doc_session_seconds"], + "doc_session_share_of_span": acc["doc_session_seconds"] / active_span + if active_span > 0 + else "", + "write_events": write_events, + "doc_write_share": counts["write_doc"] / write_events if write_events else "", + **{f"{event_type}_events": counts[event_type] for event_type in EVENT_FIELDS}, + "doc_to_code_switches": acc["doc_to_code_switches"], + "code_to_doc_switches": acc["code_to_doc_switches"], + "compile_success_events": acc["compile_success_events"], + "compile_failure_events": acc["compile_failure_events"], + "total_prompt_chars": acc["total_prompt_chars"], + "unique_file_count": len(acc["file_ids"]), + "unique_language_count": len(acc["language_ids"]), + "file_ids": semicolon_join(acc["file_ids"]), + "language_ids": semicolon_join(acc["language_ids"]), + "event_sources": semicolon_join(acc["event_sources"]), + "diff_hunks": acc["diff_hunks"], + "diff_inserted_chars": acc["diff_inserted_chars"], + "diff_deleted_units": acc["diff_deleted_units"], + "doc_block_transactions": acc["doc_block_transactions"], + "doc_block_diff_hunks": acc["doc_block_diff_hunks"], + "doc_block_inserted_chars": acc["doc_block_inserted_chars"], + "doc_block_deleted_units": acc["doc_block_deleted_units"], + "client_server_latency_ms_mean": sum(acc["latencies"]) / len(acc["latencies"]) + if acc["latencies"] + else "", + "client_server_latency_ms_max": max(acc["latencies"]) if acc["latencies"] else "", + "first_sequence_number": sequence_numbers[0] if sequence_numbers else "", + "last_sequence_number": sequence_numbers[-1] if sequence_numbers else "", + "missing_sequence_gaps": missing_sequence_gaps, + "duplicate_event_ids": duplicate_event_ids, + "data_quality_notes": ";".join(notes), + } + return row + + +def session_summary_rows(event_rows: Iterable[dict[str, Any]]) -> list[dict[str, Any]]: + accs: dict[tuple[str, ...], dict[str, Any]] = {} + for row in event_rows: + key = identity_key(row) + acc = accs.setdefault(key, new_session_acc(row)) + update_session_acc(acc, row) + return [ + finalize_session_acc(acc) + for _, acc in sorted(accs.items(), key=lambda item: item[0]) + ] + + +def new_file_acc(row: dict[str, Any], include_file_paths: bool) -> dict[str, Any]: + acc = { + "identity": {field: text_value(row.get(field)) for field in IDENTITY_FIELDS}, + "file_id": text_value(row.get("file_id")), + "file_hash": text_value(row.get("file_hash")), + "language_id": text_value(row.get("language_id")), + "path_privacy": text_value(row.get("path_privacy")), + "counts": Counter(), + "event_count": 0, + "first_dt": None, + "last_dt": None, + "doc_session_seconds": 0.0, + "line_count_first": "", + "line_count_last": "", + "line_count_max": "", + "doc_block_count_before_min": "", + "doc_block_count_after_last": "", + "classification_bases": set(), + "write_sources": set(), + "diff_hunks": 0, + "diff_inserted_chars": 0, + "diff_deleted_units": 0, + "doc_block_transactions": 0, + "doc_block_diff_hunks": 0, + "doc_block_inserted_chars": 0, + "doc_block_deleted_units": 0, + } + if include_file_paths: + acc[RAW_FILE_PATH_FIELD] = text_value(row.get(RAW_FILE_PATH_FIELD)) + return acc + + +def update_file_acc(acc: dict[str, Any], row: dict[str, Any]) -> None: + event_type = text_value(row.get("event_type")) + acc["event_count"] += 1 + acc["counts"][event_type] += 1 + update_time_acc(acc, row) + + if event_type == "doc_session": + duration = float_value(row.get("duration_seconds")) + if duration is not None: + acc["doc_session_seconds"] += duration + + line_count = int_value(row.get("line_count")) + if line_count is not None: + if acc["line_count_first"] == "": + acc["line_count_first"] = line_count + acc["line_count_last"] = line_count + acc["line_count_max"] = max(int_value(acc["line_count_max"]) or 0, line_count) + + before_count = int_value(row.get("doc_block_count_before")) + if before_count is not None: + current_min = int_value(acc["doc_block_count_before_min"]) + acc["doc_block_count_before_min"] = ( + before_count if current_min is None else min(current_min, before_count) + ) + after_count = int_value(row.get("doc_block_count_after")) + if after_count is not None: + acc["doc_block_count_after_last"] = after_count + + if row.get("classification_basis"): + acc["classification_bases"].add(text_value(row.get("classification_basis"))) + if row.get("write_source"): + acc["write_sources"].add(text_value(row.get("write_source"))) + + for field in [ + "diff_hunks", + "diff_inserted_chars", + "diff_deleted_units", + "doc_block_transactions", + "doc_block_diff_hunks", + "doc_block_inserted_chars", + "doc_block_deleted_units", + ]: + acc[field] += int_value(row.get(field)) or 0 + + +def finalize_file_acc(acc: dict[str, Any], include_file_paths: bool) -> dict[str, Any]: + first_dt = acc["first_dt"] + last_dt = acc["last_dt"] + row = { + **acc["identity"], + "file_id": acc["file_id"], + "file_hash": acc["file_hash"], + "language_id": acc["language_id"], + "path_privacy": acc["path_privacy"], + "event_count": acc["event_count"], + "first_event_at": timestamp_for_csv(first_dt), + "last_event_at": timestamp_for_csv(last_dt), + "active_span_seconds": seconds_between(first_dt, last_dt) or 0.0, + "doc_session_seconds": acc["doc_session_seconds"], + "write_doc_events": acc["counts"]["write_doc"], + "write_code_events": acc["counts"]["write_code"], + "save_events": acc["counts"]["save"], + "compile_events": acc["counts"]["compile"], + "compile_end_events": acc["counts"]["compile_end"], + "run_events": acc["counts"]["run"], + "run_end_events": acc["counts"]["run_end"], + "switch_pane_events": acc["counts"]["switch_pane"], + "line_count_first": acc["line_count_first"], + "line_count_last": acc["line_count_last"], + "line_count_max": acc["line_count_max"], + "doc_block_count_before_min": acc["doc_block_count_before_min"], + "doc_block_count_after_last": acc["doc_block_count_after_last"], + "classification_bases": semicolon_join(acc["classification_bases"]), + "write_sources": semicolon_join(acc["write_sources"]), + "diff_hunks": acc["diff_hunks"], + "diff_inserted_chars": acc["diff_inserted_chars"], + "diff_deleted_units": acc["diff_deleted_units"], + "doc_block_transactions": acc["doc_block_transactions"], + "doc_block_diff_hunks": acc["doc_block_diff_hunks"], + "doc_block_inserted_chars": acc["doc_block_inserted_chars"], + "doc_block_deleted_units": acc["doc_block_deleted_units"], + } + if include_file_paths: + row[RAW_FILE_PATH_FIELD] = acc.get(RAW_FILE_PATH_FIELD, "") + return row + + +def file_summary_rows( + event_rows: Iterable[dict[str, Any]], include_file_paths: bool +) -> list[dict[str, Any]]: + accs: dict[tuple[str, ...], dict[str, Any]] = {} + for row in event_rows: + key = ( + *identity_key(row), + text_value(row.get("file_id")), + text_value(row.get("language_id")), + ) + acc = accs.setdefault(key, new_file_acc(row, include_file_paths)) + update_file_acc(acc, row) + return [ + finalize_file_acc(acc, include_file_paths) + for _, acc in sorted(accs.items(), key=lambda item: item[0]) + ] + + +def lifecycle_row( + kind: str, + lifecycle_index: int, + start: dict[str, Any] | None, + end: dict[str, Any] | None, +) -> dict[str, Any]: + source = start or end or {} + start_dt = aware_datetime(parse_timestamp(start.get("timestamp") if start else None)) + end_dt = aware_datetime(parse_timestamp(end.get("timestamp") if end else None)) + notes = [] + if start is None: + notes.append("missing_start") + if end is None: + notes.append("missing_end") + return { + **{field: text_value(source.get(field)) for field in IDENTITY_FIELDS}, + "lifecycle_kind": kind, + "lifecycle_index": lifecycle_index, + "completed": 1 if end is not None else 0, + "start_event_type": text_value(start.get("event_type")) if start else "", + "end_event_type": text_value(end.get("event_type")) if end else "", + "start_at": timestamp_for_csv(start_dt), + "end_at": timestamp_for_csv(end_dt), + "duration_seconds": seconds_between(start_dt, end_dt) + if start_dt is not None and end_dt is not None + else "", + "start_event_id": text_value(start.get("event_id")) if start else "", + "end_event_id": text_value(end.get("event_id")) if end else "", + "start_file_id": text_value(start.get("file_id")) if start else "", + "end_file_id": text_value(end.get("file_id")) if end else "", + "language_id": text_value(source.get("language_id")), + "command": text_value(source.get("command")), + "data_quality_notes": ";".join(notes), + } + + +def task_lifecycle_rows(event_rows: Iterable[dict[str, Any]]) -> list[dict[str, Any]]: + starts: dict[tuple[tuple[str, ...], str], deque[dict[str, Any]]] = defaultdict(deque) + lifecycle_indexes: Counter[tuple[tuple[str, ...], str]] = Counter() + rows: list[dict[str, Any]] = [] + + for row in event_rows: + event_type = text_value(row.get("event_type")) + if event_type in LIFECYCLE_PAIRS: + kind, _ = LIFECYCLE_PAIRS[event_type] + starts[(identity_key(row), kind)].append(row) + continue + if event_type not in LIFECYCLE_END_TYPES: + continue + + kind, _start_type = LIFECYCLE_END_TYPES[event_type] + key = (identity_key(row), kind) + start = starts[key].popleft() if starts[key] else None + lifecycle_indexes[key] += 1 + rows.append(lifecycle_row(kind, lifecycle_indexes[key], start, row)) + + for key, queue in sorted(starts.items(), key=lambda item: item[0]): + identity, kind = key + while queue: + start = queue.popleft() + lifecycle_indexes[(identity, kind)] += 1 + rows.append(lifecycle_row(kind, lifecycle_indexes[(identity, kind)], start, None)) + + rows.sort( + key=lambda row: ( + row["user_id"], + row["session_id"], + row["lifecycle_kind"], + row["lifecycle_index"], + ) + ) + return rows + + +def data_dictionary_rows(fieldsets: dict[str, list[str]]) -> list[dict[str, str]]: + rows = [] + for dataset, fields in fieldsets.items(): + for field in fields: + rows.append( + { + "dataset": dataset, + "column": field, + "description": FIELD_DESCRIPTIONS.get(field, ""), + } + ) + return rows + + +def export_metrics(event_rows: list[dict[str, Any]], output_path: Path) -> None: + write_csv(output_path, SESSION_SUMMARY_FIELDS, session_summary_rows(event_rows)) + + +def export_analysis_dataset( + event_rows: list[dict[str, Any]], dataset_dir: Path, include_file_paths: bool +) -> list[Path]: + dataset_dir.mkdir(parents=True, exist_ok=True) + event_fields = fields_with_optional_file_path(EVENT_ROW_FIELDS, include_file_paths) + file_fields = fields_with_optional_file_path(FILE_SUMMARY_FIELDS, include_file_paths) + fieldsets = { + "events.csv": event_fields, + "session_summary.csv": SESSION_SUMMARY_FIELDS, + "file_summary.csv": file_fields, + "task_lifecycle.csv": TASK_LIFECYCLE_FIELDS, + } + outputs = [ + dataset_dir / "events.csv", + dataset_dir / "session_summary.csv", + dataset_dir / "file_summary.csv", + dataset_dir / "task_lifecycle.csv", + dataset_dir / "data_dictionary.csv", + ] + write_csv(outputs[0], event_fields, event_rows) + write_csv(outputs[1], SESSION_SUMMARY_FIELDS, session_summary_rows(event_rows)) + write_csv(outputs[2], file_fields, file_summary_rows(event_rows, include_file_paths)) + write_csv(outputs[3], TASK_LIFECYCLE_FIELDS, task_lifecycle_rows(event_rows)) + write_csv( + outputs[4], + ["dataset", "column", "description"], + data_dictionary_rows(fieldsets), + ) + return outputs + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + "input", + nargs="?", + type=Path, + help="Optional capture JSONL fallback file. Omit to read PostgreSQL.", + ) + parser.add_argument( + "--out", + type=Path, + default=None, + help=( + "Output session-summary CSV file. Defaults to a timestamped " + "capture-metrics-YYYYMMDD-HHMMSS.csv file when --dataset-dir is omitted." + ), + ) + parser.add_argument( + "--dataset-dir", + nargs="?", + const=Path("__DEFAULT_CAPTURE_ANALYSIS_DIR__"), + type=Path, + default=None, + help=( + "Write a richer analysis dataset directory containing events.csv, " + "session_summary.csv, file_summary.csv, task_lifecycle.csv, and " + "data_dictionary.csv. If no path is supplied, defaults to " + "capture-analysis-YYYYMMDD-HHMMSS." + ), + ) + parser.add_argument( + "--include-file-paths", + action="store_true", + help=( + "Include raw captured file paths in event/file exports. By default, " + "the dataset uses file_id/file_hash only." + ), + ) + parser.add_argument( + "--db", + action="store_true", + help="Read PostgreSQL. This is the default when no JSONL input is supplied.", + ) + parser.add_argument( + "--config", + type=Path, + default=Path("capture_config.json"), + help="Capture DB config JSON path. Ignored when CODECHAT_CAPTURE_* env vars are set.", + ) + parser.add_argument( + "--table", + default="events", + help='Capture events table name. Defaults to "events".', + ) + args = parser.parse_args() + + if args.db and args.input is not None: + parser.error("do not pass a JSONL input path with --db") + + events = ( + iter_jsonl_events(args.input) + if args.input is not None + else iter_db_events(load_db_config(args.config), args.table) + ) + event_rows = normalize_event_rows(events, include_file_paths=args.include_file_paths) + + wrote_outputs = False + if args.out is not None or args.dataset_dir is None: + output_path = args.out or default_output_path() + export_metrics(event_rows, output_path) + print(f"Wrote {output_path}") + wrote_outputs = True + + if args.dataset_dir is not None: + dataset_dir = ( + default_dataset_dir() + if args.dataset_dir == Path("__DEFAULT_CAPTURE_ANALYSIS_DIR__") + else args.dataset_dir + ) + output_paths = export_analysis_dataset( + event_rows, dataset_dir, args.include_file_paths + ) + print(f"Wrote analysis dataset to {dataset_dir}") + for output_path in output_paths: + print(f" {output_path.name}") + wrote_outputs = True + + if not wrote_outputs: + raise SystemExit("No outputs were requested.") + + +def default_output_path() -> Path: + timestamp = datetime.now(timezone.utc).astimezone().strftime("%Y%m%d-%H%M%S") + return Path(f"capture-metrics-{timestamp}.csv") + + +def default_dataset_dir() -> Path: + timestamp = datetime.now(timezone.utc).astimezone().strftime("%Y%m%d-%H%M%S") + return Path(f"capture-analysis-{timestamp}") + + +if __name__ == "__main__": + main() diff --git a/server/src/capture.rs b/server/src/capture.rs index 3f8f7c15..dc072bf7 100644 --- a/server/src/capture.rs +++ b/server/src/capture.rs @@ -13,227 +13,1089 @@ // You should have received a copy of the GNU General Public License along with // the CodeChat Editor. If not, see // [http://www.gnu.org/licenses](http://www.gnu.org/licenses). -/// # `Capture.rs` -- Capture CodeChat Editor Events -// ## Submodules + +// `capture.rs` -- Capture CodeChat Editor Events +// ============================================================================ +// +// This module provides an asynchronous event capture facility backed by a +// PostgreSQL database. It is designed to support the dissertation study by +// recording process-level data such as: +// +// * Frequency and timing of writing entries +// * Edits to documentation and code +// * Switches between documentation and coding activity +// * Duration of engagement with reflective writing +// * Save, compile, and run events +// +// Events are sent from the client (browser and/or VS Code extension) to the +// server as JSON. The server enqueues events into an asynchronous worker which +// performs batched inserts into the `events` table. +// +// Database schema +// ---------------------------------------------------------------------------- // -// ## Imports +// The canonical schema and migration DDL lives in +// `server/scripts/capture_events_schema.sql`. The important analysis columns +// are: // -// Standard library -use indoc::indoc; -use std::fs; -use std::io; -use std::path::Path; -use std::sync::Arc; - -// Third-party -use chrono::Local; -use log::{error, info}; +// ```sql +// event_id, sequence_number, schema_version, +// user_id, session_id, event_source, language_id, file_hash, file_path, +// path_privacy, event_type, timestamp, client_timestamp_ms, +// client_tz_offset_min, server_timestamp_ms, data +// ``` +// +// * `user_id` – pseudonymous participant UUID. Course, group, assignment, and +// study condition are intentionally joined later from researcher-managed +// participant/date mappings instead of being configured by students. +// * `session_id`, `event_id`, `sequence_number`, `schema_version` – event +// integrity and versioning metadata. +// * `file_path` – logical path of the file being edited. +// * `file_hash` – privacy-preserving SHA-256 hash of the file path. +// * `event_type` – coarse event type (see `CaptureEventType` below). +// * `timestamp` – RFC3339 timestamp (in UTC). +// * `data` – JSONB payload with event-specific details. + +use std::{ + env, + fs::{self, OpenOptions}, + io::{self, Write}, + path::{Path, PathBuf}, + sync::{Arc, Mutex}, + thread, +}; + +use chrono::{DateTime, Utc}; +use log::{debug, error, info, warn}; use serde::{Deserialize, Serialize}; -use tokio::sync::Mutex; +use std::error::Error; +use tokio::sync::mpsc; use tokio_postgres::{Client, NoTls}; +use ts_rs::TS; + +/// Canonical event types. Keep the serialized strings stable for analysis. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, TS)] +#[serde(rename_all = "snake_case")] +#[ts(export)] +pub enum CaptureEventType { + /// Server-classified edit to documentation/prose. + WriteDoc, + /// Server-classified edit to executable source code. + WriteCode, + /// Editor activity moved between documentation and code contexts. + SwitchPane, + /// Duration summary for a documentation/prose activity interval. + DocSession, + /// File save observed by the editor. + Save, + /// Compile/build task started. + Compile, + /// Debug/run session started. + Run, + /// Capture or activity session started. + SessionStart, + /// Capture or activity session ended. + SessionEnd, + /// Compile/build task ended. + CompileEnd, + /// Debug/run session ended. + RunEnd, + /// Study task started by an external study workflow. + TaskStart, + /// Study task submitted by an external study workflow. + TaskSubmit, + /// Debugging study task started by an external study workflow. + DebugTaskStart, + /// Debugging study task submitted by an external study workflow. + DebugTaskSubmit, + /// Collaboration handoff interval started. + HandoffStart, + /// Collaboration handoff interval ended. + HandoffEnd, + /// A built-in reflection prompt was inserted into the active editor. + ReflectionPromptInserted, +} + +impl CaptureEventType { + pub const fn as_str(self) -> &'static str { + match self { + Self::WriteDoc => "write_doc", + Self::WriteCode => "write_code", + Self::SwitchPane => "switch_pane", + Self::DocSession => "doc_session", + Self::Save => "save", + Self::Compile => "compile", + Self::Run => "run", + Self::SessionStart => "session_start", + Self::SessionEnd => "session_end", + Self::CompileEnd => "compile_end", + Self::RunEnd => "run_end", + Self::TaskStart => "task_start", + Self::TaskSubmit => "task_submit", + Self::DebugTaskStart => "debug_task_start", + Self::DebugTaskSubmit => "debug_task_submit", + Self::HandoffStart => "handoff_start", + Self::HandoffEnd => "handoff_end", + Self::ReflectionPromptInserted => "reflection_prompt_inserted", + } + } +} + +impl std::fmt::Display for CaptureEventType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +pub mod event_types { + use super::CaptureEventType; -// Local + pub const WRITE_DOC: CaptureEventType = CaptureEventType::WriteDoc; + pub const WRITE_CODE: CaptureEventType = CaptureEventType::WriteCode; + pub const SWITCH_PANE: CaptureEventType = CaptureEventType::SwitchPane; + pub const DOC_SESSION: CaptureEventType = CaptureEventType::DocSession; + pub const SAVE: CaptureEventType = CaptureEventType::Save; + pub const COMPILE: CaptureEventType = CaptureEventType::Compile; + pub const RUN: CaptureEventType = CaptureEventType::Run; + pub const SESSION_START: CaptureEventType = CaptureEventType::SessionStart; + pub const SESSION_END: CaptureEventType = CaptureEventType::SessionEnd; + pub const COMPILE_END: CaptureEventType = CaptureEventType::CompileEnd; + pub const RUN_END: CaptureEventType = CaptureEventType::RunEnd; + pub const TASK_START: CaptureEventType = CaptureEventType::TaskStart; + pub const TASK_SUBMIT: CaptureEventType = CaptureEventType::TaskSubmit; + pub const DEBUG_TASK_START: CaptureEventType = CaptureEventType::DebugTaskStart; + pub const DEBUG_TASK_SUBMIT: CaptureEventType = CaptureEventType::DebugTaskSubmit; + pub const HANDOFF_START: CaptureEventType = CaptureEventType::HandoffStart; + pub const HANDOFF_END: CaptureEventType = CaptureEventType::HandoffEnd; + pub const REFLECTION_PROMPT_INSERTED: CaptureEventType = + CaptureEventType::ReflectionPromptInserted; +} -/* ## The Event Structure: +/// Configuration used to construct the PostgreSQL connection string. +/// +/// You can populate this from a JSON file or environment variables in +/// `main.rs`; this module stays agnostic. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CaptureConfig { + /// PostgreSQL host name or address. + pub host: String, + /// Optional PostgreSQL port. Uses libpq's default when omitted. + #[serde(default)] + pub port: Option, + /// PostgreSQL user name. + pub user: String, + /// PostgreSQL password. Never included in redacted summaries. + pub password: String, + /// PostgreSQL database name. + pub dbname: String, + /// Optional: application-level identifier for this deployment (e.g., course + /// code or semester). Not stored in the DB directly; callers can embed this + /// in `data` if desired. + #[serde(default)] + pub app_id: Option, + /// Local JSONL file used when PostgreSQL is unavailable. + #[serde(default)] + pub fallback_path: Option, +} - The `Event` struct represents an event to be stored in the database. +impl CaptureConfig { + /// Build a libpq-style connection string. + pub fn to_conn_str(&self) -> String { + let mut parts = vec![ + format!("host={}", self.host), + format!("user={}", self.user), + format!("password={}", self.password), + format!("dbname={}", self.dbname), + ]; + if let Some(port) = self.port { + parts.push(format!("port={port}")); + } + parts.join(" ") + } + + /// Return a human-readable summary that never includes the password. + pub fn redacted_summary(&self) -> String { + format!( + "host={}, port={:?}, user={}, dbname={}, app_id={:?}, fallback_path={:?}", + self.host, self.port, self.user, self.dbname, self.app_id, self.fallback_path + ) + } - Fields: - `user_id`: The ID of the user associated with the event. - - `event_type`: The type of event (e.g., "keystroke", "file_open"). - `data`: - Optional additional data associated with the event. + /// Build capture configuration from environment variables. If no capture + /// host is configured, return `Ok(None)` so callers can fall back to a file. + pub fn from_env() -> Result, String> { + let Some(host) = env_var_trimmed("CODECHAT_CAPTURE_HOST") else { + return Ok(None); + }; - ### Example + let port = match env_var_trimmed("CODECHAT_CAPTURE_PORT") { + Some(port) => Some(port.parse::().map_err(|err| { + format!("CODECHAT_CAPTURE_PORT must be a valid port number: {err}") + })?), + None => None, + }; - let event = Event { user_id: "user123".to_string(), event_type: - "keystroke".to_string(), data: Some("Pressed key A".to_string()), }; -*/ + Ok(Some(Self { + host, + port, + user: required_env_var("CODECHAT_CAPTURE_USER")?, + password: required_env_var("CODECHAT_CAPTURE_PASSWORD")?, + dbname: required_env_var("CODECHAT_CAPTURE_DBNAME")?, + app_id: env_var_trimmed("CODECHAT_CAPTURE_APP_ID"), + fallback_path: env_var_trimmed("CODECHAT_CAPTURE_FALLBACK_PATH").map(PathBuf::from), + })) + } +} + +fn env_var_trimmed(name: &str) -> Option { + env::var(name) + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) +} -#[derive(Deserialize, Debug)] -pub struct Event { +fn required_env_var(name: &str) -> Result { + env_var_trimmed(name).ok_or_else(|| format!("{name} is required when capture env is used")) +} + +/// Capture worker health, exposed through `/capture/status` and the VS Code +/// status item. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, TS)] +#[ts(export)] +pub struct CaptureStatus { + /// True when the capture worker is configured and accepting events. + pub enabled: bool, + /// Worker state: `starting`, `database`, `fallback`, or `disabled`. + pub state: String, + /// Number of events accepted into the worker queue. + pub queued_events: u64, + /// Number of events inserted into PostgreSQL. + pub persisted_events: u64, + /// Number of events written to the local JSONL fallback. + pub fallback_events: u64, + /// Number of failed enqueue or fallback-write attempts. + pub failed_events: u64, + /// Most recent capture error, if one is known. + pub last_error: Option, + /// Local JSONL fallback path when fallback capture is configured. + pub fallback_path: Option, +} + +impl CaptureStatus { + pub fn disabled() -> Self { + Self { + enabled: false, + state: "disabled".to_string(), + queued_events: 0, + persisted_events: 0, + fallback_events: 0, + failed_events: 0, + last_error: None, + fallback_path: None, + } + } + + fn starting(fallback_path: Option) -> Self { + Self { + enabled: true, + state: "starting".to_string(), + queued_events: 0, + persisted_events: 0, + fallback_events: 0, + failed_events: 0, + last_error: None, + fallback_path, + } + } +} + +/// The in-memory representation of a single capture event. +#[derive(Debug, Clone)] +pub struct CaptureEvent { + /// Pseudonymous participant UUID supplied by the extension. pub user_id: String, - pub event_type: String, - pub data: Option, + /// Raw file path when path hashing is disabled. + pub file_path: Option, + /// Canonical type of the captured event. + pub event_type: CaptureEventType, + /// When the event occurred, in UTC. + pub timestamp: DateTime, + /// Event-specific payload, stored as JSON text in the DB. + pub data: serde_json::Value, +} + +impl CaptureEvent { + /// Convenience constructor when the caller already has a timestamp. + pub fn new( + user_id: String, + file_path: Option, + event_type: CaptureEventType, + timestamp: DateTime, + data: serde_json::Value, + ) -> Self { + Self { + user_id, + file_path, + event_type, + timestamp, + data, + } + } + + /// Convenience constructor which uses the current time. + pub fn now( + user_id: String, + file_path: Option, + event_type: CaptureEventType, + data: serde_json::Value, + ) -> Self { + Self::new(user_id, file_path, event_type, Utc::now(), data) + } +} + +/// Internal worker message. Identical to `CaptureEvent`, but separated in case +/// we later want to add batching / flush control signals. +type WorkerMsg = CaptureEvent; + +/// Handle used by the rest of the server to record events. +/// +/// Cloning this handle is cheap: it only clones an `mpsc::UnboundedSender`. +#[derive(Clone)] +pub struct EventCapture { + tx: mpsc::UnboundedSender, + status: Arc>, +} + +impl EventCapture { + /// Create a new `EventCapture` instance and spawn a background worker which + /// consumes events and inserts them into PostgreSQL. + /// + /// This function is synchronous so it can be called from non-async server + /// setup code. It spawns an async task internally which performs the + /// database connection and event processing. + pub fn new(mut config: CaptureConfig) -> Result { + let fallback_path = config + .fallback_path + .get_or_insert_with(|| PathBuf::from("capture-events-fallback.jsonl")) + .clone(); + let conn_str = config.to_conn_str(); + let status = Arc::new(Mutex::new(CaptureStatus::starting(Some( + fallback_path.clone(), + )))); + + // High-level DB connection details (no password). + info!( + "Capture: preparing PostgreSQL connection ({})", + config.redacted_summary() + ); + + let (tx, mut rx) = mpsc::unbounded_channel::(); + let status_worker = status.clone(); + + // Create a dedicated runtime so capture can be started from sync code + // before the Actix/Tokio server runtime exists. + thread::Builder::new() + .name("codechat-capture".to_string()) + .spawn(move || { + let runtime = tokio::runtime::Builder::new_multi_thread() + .worker_threads(1) + .enable_all() + .build() + .expect("Capture: failed to build Tokio runtime"); + + runtime.block_on(async move { + info!("Capture: attempting to connect to PostgreSQL."); + + match tokio_postgres::connect(&conn_str, NoTls).await { + Ok((client, connection)) => { + info!("Capture: successfully connected to PostgreSQL."); + update_status(&status_worker, |status| { + status.state = "database".to_string(); + status.last_error = None; + }); + + // Drive the connection in its own task. + let status_connection = status_worker.clone(); + tokio::spawn(async move { + if let Err(err) = connection.await { + error!("Capture PostgreSQL connection error: {err}"); + update_status(&status_connection, |status| { + status.state = "fallback".to_string(); + status.last_error = Some(format!( + "PostgreSQL connection error: {err}" + )); + }); + } + }); + + // Main event loop: pull events off the channel and insert + // them into the database. + while let Some(event) = rx.recv().await { + debug!( + "Capture: inserting event: type={}, user_id={}, file_path={:?}", + event.event_type, event.user_id, event.file_path + ); + + if let Err(err) = insert_event(&client, &event).await { + error!( + "Capture: FAILED to insert event (type={}, user_id={}): {err}", + event.event_type, event.user_id + ); + update_status(&status_worker, |status| { + status.state = "fallback".to_string(); + status.last_error = Some(format!( + "PostgreSQL insert failed: {err}" + )); + }); + write_event_to_fallback( + &fallback_path, + &event, + &status_worker, + Some(format!("PostgreSQL insert failed: {err}")), + ); + } else { + update_status(&status_worker, |status| { + status.persisted_events += 1; + if status.state != "database" { + status.state = "database".to_string(); + } + }); + debug!("Capture: event insert successful."); + } + } + + info!("Capture: event channel closed; background worker exiting."); + } + + Err(err) => { + let ctx = format!( + "Capture: FAILED to connect to PostgreSQL (host={}, dbname={}, user={})", + config.host, config.dbname, config.user + ); + + log_pg_connect_error(&ctx, &err); + + update_status(&status_worker, |status| { + status.state = "fallback".to_string(); + status.last_error = Some(format!( + "PostgreSQL connection failed: {err}" + )); + }); + + warn!( + "Capture: writing pending events to fallback JSONL at {:?}.", + fallback_path + ); + while let Some(event) = rx.recv().await { + write_event_to_fallback( + &fallback_path, + &event, + &status_worker, + Some("PostgreSQL connection unavailable".to_string()), + ); + } + warn!("Capture: event channel closed; fallback worker exiting."); + } + } + }); + }) + .map_err(|err| { + io::Error::other(format!("Capture: failed to start worker thread: {err}")) + })?; + + Ok(Self { tx, status }) + } + + /// Enqueue an event for insertion. This is non-blocking. + pub fn log(&self, event: CaptureEvent) { + debug!( + "Capture: queueing event: type={}, user_id={}, file_path={:?}", + event.event_type, event.user_id, event.file_path + ); + + if let Err(err) = self.tx.send(event) { + error!("Capture: FAILED to enqueue capture event: {err}"); + update_status(&self.status, |status| { + status.failed_events += 1; + status.last_error = Some(format!("Failed to enqueue capture event: {err}")); + }); + } else { + update_status(&self.status, |status| { + status.queued_events += 1; + }); + } + } + + pub fn status(&self) -> CaptureStatus { + self.status + .lock() + .map(|status| status.clone()) + .unwrap_or_else(|_| { + let mut status = CaptureStatus::disabled(); + status.last_error = Some("Capture status lock is poisoned".to_string()); + status + }) + } } -/* - ## The Config Structure: +fn update_status(status: &Arc>, f: impl FnOnce(&mut CaptureStatus)) { + match status.lock() { + Ok(mut guard) => f(&mut guard), + Err(err) => error!("Capture: unable to update status: {err}"), + } +} - The `Config` struct represents the database connection parameters read from - `config.json`. +fn write_event_to_fallback( + fallback_path: &Path, + event: &CaptureEvent, + status: &Arc>, + last_error: Option, +) { + match append_fallback_event(fallback_path, event) { + Ok(()) => update_status(status, |status| { + status.fallback_events += 1; + status.last_error = last_error; + }), + Err(err) => { + error!( + "Capture: FAILED to write fallback event to {:?}: {err}", + fallback_path + ); + update_status(status, |status| { + status.failed_events += 1; + status.last_error = Some(format!("Fallback write failed: {err}")); + }); + } + } +} - Fields: - `db_host`: The hostname or IP address of the database server. - - `db_user`: The username for the database connection. - `db_password`: The - password for the database connection. - `db_name`: The name of the database. +fn append_fallback_event(fallback_path: &Path, event: &CaptureEvent) -> io::Result<()> { + if let Some(parent) = fallback_path.parent() + && !parent.as_os_str().is_empty() + { + fs::create_dir_all(parent)?; + } - let config = Config { db_host: "localhost".to_string(), db_user: - "your_db_user".to_string(), db_password: "your_db_password".to_string(), - db_name: "your_db_name".to_string(), }; -*/ + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open(fallback_path)?; + let record = serde_json::json!({ + "fallback_timestamp": Utc::now().to_rfc3339(), + "event": { + "user_id": event.user_id, + "file_path": event.file_path, + "event_type": event.event_type.as_str(), + "timestamp": event.timestamp.to_rfc3339(), + "data": event.data, + } + }); + writeln!(file, "{record}")?; + Ok(()) +} + +fn log_pg_connect_error(context: &str, err: &tokio_postgres::Error) { + // If Postgres returned a structured DbError, log it ONCE and bail. + if let Some(db) = err.as_db_error() { + // Example: 28P01 = invalid\_password + error!( + "{context}: PostgreSQL {} (SQLSTATE {})", + db.message(), + db.code().code() + ); + + if let Some(detail) = db.detail() { + error!("{context}: detail: {detail}"); + } + if let Some(hint) = db.hint() { + error!("{context}: hint: {hint}"); + } + return; + } + + // Otherwise, try to find an underlying std::io::Error (refused, timed out, + // DNS, etc.) + let mut current: &(dyn Error + 'static) = err; + while let Some(source) = current.source() { + if let Some(ioe) = source.downcast_ref::() { + error!( + "{context}: I/O error kind={:?} raw_os_error={:?} msg={}", + ioe.kind(), + ioe.raw_os_error(), + ioe + ); + return; + } + current = source; + } + + // Fallback: log once (Display) + error!("{context}: {err}"); +} -#[derive(Deserialize, Serialize, Debug)] -pub struct Config { - pub db_ip: String, - pub db_user: String, - pub db_password: String, - pub db_name: String, +fn capture_data_str(data: &serde_json::Value, names: &[&str]) -> Option { + names.iter().find_map(|name| { + data.get(*name) + .and_then(serde_json::Value::as_str) + .map(str::trim) + .filter(|value| !value.is_empty()) + .map(str::to_string) + }) } -/* +fn capture_data_i64(data: &serde_json::Value, name: &str) -> Option { + let value = data.get(name)?; + value + .as_i64() + .or_else(|| value.as_str()?.trim().parse::().ok()) +} - ## The EventCapture Structure: +fn capture_data_i32(data: &serde_json::Value, name: &str) -> Option { + capture_data_i64(data, name).and_then(|value| i32::try_from(value).ok()) +} - The `EventCapture` struct provides methods to interact with the database. It -holds a `tokio_postgres::Client` for database operations. +fn should_retry_legacy_insert(err: &tokio_postgres::Error) -> bool { + matches!( + err.code().map(|code| code.code()), + Some("42703" | "42P01" | "42804") + ) +} -### Usage Example +/// Insert a single event into the `events` table. +async fn insert_event(client: &Client, event: &CaptureEvent) -> Result { + match insert_rich_event(client, event).await { + Ok(rows) => Ok(rows), + Err(err) if should_retry_legacy_insert(&err) => { + warn!( + "Capture: rich events insert failed against the current schema; retrying legacy insert: {err}" + ); + insert_legacy_event(client, event).await + } + Err(err) => Err(err), + } +} -#\[tokio::main\] async fn main() -> Result<(), Box> { +async fn insert_rich_event( + client: &Client, + event: &CaptureEvent, +) -> Result { + let timestamp = event.timestamp.to_rfc3339(); + let event_id = capture_data_str(&event.data, &["event_id"]); + let sequence_number = capture_data_i64(&event.data, "sequence_number"); + let schema_version = capture_data_i32(&event.data, "schema_version"); + let session_id = capture_data_str(&event.data, &["session_id"]); + let event_source = capture_data_str(&event.data, &["event_source"]); + let language_id = capture_data_str(&event.data, &["language_id", "languageId"]); + let file_hash = capture_data_str(&event.data, &["file_hash"]); + let path_privacy = capture_data_str(&event.data, &["path_privacy"]); + let client_timestamp_ms = capture_data_i64(&event.data, "client_timestamp_ms"); + let client_tz_offset_min = capture_data_i32(&event.data, "client_tz_offset_min"); + let server_timestamp_ms = capture_data_i64(&event.data, "server_timestamp_ms") + .unwrap_or_else(|| event.timestamp.timestamp_millis()); + let data_text = event.data.to_string(); + let event_type = event.event_type.as_str(); -``` - // Create an instance of EventCapture using the configuration file - let event_capture = EventCapture::new("config.json").await?; + debug!( + "Capture: executing rich INSERT for user_id={}, event_type={}, timestamp={}", + event.user_id, event_type, timestamp + ); - // Create an event - let event = Event { - user_id: "user123".to_string(), - event_type: "keystroke".to_string(), - data: Some("Pressed key A".to_string()), - }; + client + .execute( + "INSERT INTO events \ + (event_id, sequence_number, schema_version, \ + user_id, session_id, \ + event_source, language_id, file_hash, file_path, path_privacy, \ + event_type, timestamp, client_timestamp_ms, client_tz_offset_min, \ + server_timestamp_ms, data) \ + VALUES \ + ($1, $2, $3, \ + $4, $5, \ + $6, $7, $8, $9, $10, \ + $11, $12::text::timestamptz, $13, $14, \ + $15, $16::text::jsonb)", + &[ + &event_id, + &sequence_number, + &schema_version, + &event.user_id, + &session_id, + &event_source, + &language_id, + &file_hash, + &event.file_path, + &path_privacy, + &event_type, + ×tamp, + &client_timestamp_ms, + &client_tz_offset_min, + &server_timestamp_ms, + &data_text, + ], + ) + .await +} - // Insert the event into the database - event_capture.insert_event(event).await?; +async fn insert_legacy_event( + client: &Client, + event: &CaptureEvent, +) -> Result { + let timestamp = event.timestamp.to_rfc3339(); + let data_text = event.data.to_string(); + let event_type = event.event_type.as_str(); - Ok(()) -``` -} */ + debug!( + "Capture: executing legacy INSERT for user_id={}, event_type={}, timestamp={}", + event.user_id, event_type, timestamp + ); -pub struct EventCapture { - db_client: Arc>, + client + .execute( + "INSERT INTO events \ + (user_id, file_path, event_type, timestamp, data) \ + VALUES ($1, $2, $3, $4::text::timestamptz, $5::text::jsonb)", + &[ + &event.user_id, + &event.file_path, + &event_type, + ×tamp, + &data_text, + ], + ) + .await } -/* - ## The EventCapture Implementation -*/ +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; -impl EventCapture { - /* - Creates a new `EventCapture` instance by reading the database connection parameters from the `config.json` file and connecting to the PostgreSQL database. - # Arguments - - config_path: The file path to the config.json file. - - # Returns - - A `Result` containing an `EventCapture` instance - */ - - pub async fn new>(config_path: P) -> Result { - // Read the configuration file - let config_content = fs::read_to_string(config_path).map_err(io::Error::other)?; - let config: Config = serde_json::from_str(&config_content) - .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; - - // Build the connection string for the PostgreSQL database - let conn_str = format!( - "host={} user={} password={} dbname={}", - config.db_ip, config.db_user, config.db_password, config.db_name + #[test] + fn capture_config_to_conn_str_is_well_formed() { + let cfg = CaptureConfig { + host: "localhost".to_string(), + port: Some(5432), + user: "alice".to_string(), + password: "secret".to_string(), + dbname: "codechat_capture".to_string(), + app_id: Some("spring25-study".to_string()), + fallback_path: Some(PathBuf::from("capture-events-fallback.jsonl")), + }; + + let conn = cfg.to_conn_str(); + // Very simple checks: we don't care about ordering beyond what we + // format. + assert!(conn.contains("host=localhost")); + assert!(conn.contains("user=alice")); + assert!(conn.contains("password=secret")); + assert!(conn.contains("dbname=codechat_capture")); + assert!(conn.contains("port=5432")); + assert!(!cfg.redacted_summary().contains("secret")); + } + + #[test] + fn capture_event_type_uses_stable_serialized_strings() { + assert_eq!( + serde_json::to_value(event_types::WRITE_DOC).unwrap(), + json!("write_doc") ); + assert_eq!( + serde_json::from_value::(json!("compile_end")).unwrap(), + event_types::COMPILE_END + ); + assert!(serde_json::from_value::(json!("random")).is_err()); + } - info!( - "Attempting Capture Database Connection. IP:[{}] Username:[{}] Database Name:[{}]", - config.db_ip, config.db_user, config.db_name + #[test] + fn capture_event_new_sets_all_fields() { + let ts = Utc::now(); + + let ev = CaptureEvent::new( + "user123".to_string(), + Some("/path/to/file.rs".to_string()), + event_types::WRITE_DOC, + ts, + json!({ "chars_typed": 42 }), ); - // Connect to the database asynchronously - let (client, connection) = tokio_postgres::connect(&conn_str, NoTls) - .await - .map_err(|e| io::Error::new(io::ErrorKind::ConnectionRefused, e))?; + assert_eq!(ev.user_id, "user123"); + assert_eq!(ev.file_path.as_deref(), Some("/path/to/file.rs")); + assert_eq!(ev.event_type, event_types::WRITE_DOC); + assert_eq!(ev.timestamp, ts); + assert_eq!(ev.data, json!({ "chars_typed": 42 })); + } - // Spawn a task to manage the database connection in the background + #[test] + fn capture_event_now_uses_current_time_and_fields() { + let before = Utc::now(); + let ev = CaptureEvent::now( + "user123".to_string(), + None, + event_types::SAVE, + json!({ "reason": "manual" }), + ); + let after = Utc::now(); + + assert_eq!(ev.user_id, "user123"); + assert!(ev.file_path.is_none()); + assert_eq!(ev.event_type, event_types::SAVE); + assert_eq!(ev.data, json!({ "reason": "manual" })); + + // Timestamp sanity check: it should be between before and after + assert!(ev.timestamp >= before); + assert!(ev.timestamp <= after); + } + + #[test] + fn capture_metadata_helpers_extract_typed_values() { + let data = json!({ + "event_id": "abc-123", + "sequence_number": "42", + "schema_version": 2, + "languageId": "rust", + "client_tz_offset_min": "-360" + }); + + assert_eq!( + capture_data_str(&data, &["language_id", "languageId"]).as_deref(), + Some("rust") + ); + assert_eq!( + capture_data_str(&data, &["event_id"]).as_deref(), + Some("abc-123") + ); + assert_eq!(capture_data_i64(&data, "sequence_number"), Some(42)); + assert_eq!(capture_data_i32(&data, "schema_version"), Some(2)); + assert_eq!(capture_data_i32(&data, "client_tz_offset_min"), Some(-360)); + } + + #[test] + fn capture_config_json_round_trip() { + let json_text = r#" + { + "host": "db.example.com", + "user": "bob", + "port": 5433, + "password": "hunter2", + "dbname": "cc_events", + "app_id": "fall25", + "fallback_path": "capture-events-fallback.jsonl" + } + "#; + + let cfg: CaptureConfig = serde_json::from_str(json_text).expect("JSON should parse"); + assert_eq!(cfg.host, "db.example.com"); + assert_eq!(cfg.port, Some(5433)); + assert_eq!(cfg.user, "bob"); + assert_eq!(cfg.password, "hunter2"); + assert_eq!(cfg.dbname, "cc_events"); + assert_eq!(cfg.app_id.as_deref(), Some("fall25")); + assert_eq!( + cfg.fallback_path.as_deref(), + Some(std::path::Path::new("capture-events-fallback.jsonl")) + ); + + // And it should serialize back to JSON without error + let _back = serde_json::to_string(&cfg).expect("Should serialize"); + } + + use std::fs; + //use tokio::time::{sleep, Duration}; + + /// Integration-style test: verify that EventCapture inserts into the rich + /// capture schema used by dissertation analysis. + /// + /// Reads connection parameters from `capture_config.json` in the current + /// working directory. Logs the config and connection details via log4rs so + /// you can confirm what is used. + /// + /// Run this test with: + /// cargo test event\_capture\_inserts\_rich_schema\_event\_into\_db + /// -- --ignored --nocapture + /// + /// You must have a PostgreSQL database and a `capture_config.json` file + /// such as: { "host": "localhost", "user": "codechat\_test\_user", + /// "password": "codechat\_test\_password", "dbname": + /// "codechat\_capture\_test", "app\_id": "integration-test" } + #[tokio::test] + #[ignore] + async fn event_capture_inserts_rich_schema_event_into_db() + -> Result<(), Box> { + // Initialize logging for this test, using the same log4rs.yml as the + // server. If logging is already initialized, this will just return an + // error which we ignore. + let _ = log4rs::init_file("log4rs.yml", Default::default()); + + // 1. Load the capture configuration from file. + let cfg_text = fs::read_to_string("capture_config.json") + .or_else(|_| fs::read_to_string("../capture_config.json")) + .expect( + "capture_config.json must exist in the server directory or repo root for this test", + ); + let cfg: CaptureConfig = + serde_json::from_str(&cfg_text).expect("capture_config.json must be valid JSON"); + + log::info!( + "TEST: Loaded DB config from capture_config.json: host={}, user={}, dbname={}, app_id={:?}", + cfg.host, + cfg.user, + cfg.dbname, + cfg.app_id + ); + + // 2. Connect directly for setup + verification. + let conn_str = cfg.to_conn_str(); + log::info!("TEST: Attempting direct tokio_postgres connection for verification."); + + let (client, connection) = tokio_postgres::connect(&conn_str, NoTls).await?; tokio::spawn(async move { if let Err(e) = connection.await { - error!("Database connection error: [{e}]"); + log::error!("TEST: direct connection error: {e}"); } }); - info!( - "Connected to Database [{}] as User [{}]", - config.db_name, config.db_user + let required_columns = [ + "event_id", + "sequence_number", + "schema_version", + "session_id", + "event_source", + "language_id", + "file_hash", + "path_privacy", + "client_timestamp_ms", + "client_tz_offset_min", + "server_timestamp_ms", + ]; + for column in required_columns { + let row = client + .query_one( + r#" + SELECT data_type + FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name = 'events' + AND column_name = $1 + "#, + &[&column], + ) + .await + .map_err(|err| { + format!( + "TEST SETUP ERROR: missing public.events.{column}; \ + run server/scripts/capture_events_schema.sql first: {err}" + ) + })?; + let data_type: String = row.get(0); + info!("TEST: public.events.{column} type={data_type}"); + } + + // 4. Start the EventCapture worker using the loaded config. + let capture = EventCapture::new(cfg.clone())?; + log::info!("TEST: EventCapture worker started."); + + // 5. Log a schema-v2 test event with all typed analysis metadata. + let test_suffix = Utc::now().timestamp_millis().to_string(); + let expected_event_id = format!("TEST_EVENT_{test_suffix}"); + let expected_user_id = format!("TEST_USER_{test_suffix}"); + let expected_session_id = format!("TEST_SESSION_{test_suffix}"); + let expected_file_hash = format!("TEST_FILE_HASH_{test_suffix}"); + let event_timestamp = Utc::now(); + let expected_server_timestamp_ms = event_timestamp.timestamp_millis(); + let expected_client_timestamp_ms = expected_server_timestamp_ms - 50; + let expected_data = json!({ + "event_id": expected_event_id, + "sequence_number": 42, + "schema_version": 2, + "session_id": expected_session_id, + "event_source": "integration_test", + "language_id": "rust", + "file_hash": expected_file_hash, + "path_privacy": "sha256", + "client_timestamp_ms": expected_client_timestamp_ms, + "client_tz_offset_min": 360, + "server_timestamp_ms": expected_server_timestamp_ms, + "chars_typed": 123, + "classification_basis": "integration_test" + }); + let event = CaptureEvent::now( + expected_user_id.clone(), + None, + event_types::WRITE_DOC, + expected_data.clone(), ); - Ok(EventCapture { - db_client: Arc::new(Mutex::new(client)), - }) - } - - /* - Inserts an event into the database. - - # Arguments - - `event`: An `Event` instance containing the event data to insert. - - # Returns - A `Result` indicating success or containing a `tokio_postgres::Error`. - - # Example - #[tokio::main] - async fn main() -> Result<(), Box> { - let event_capture = EventCapture::new("config.json").await?; - - let event = Event { - user_id: "user123".to_string(), - event_type: "keystroke".to_string(), - data: Some("Pressed key A".to_string()), - }; - - event_capture.insert_event(event).await?; - Ok(()) - } - */ - - pub async fn insert_event(&self, event: Event) -> Result<(), io::Error> { - let current_time = Local::now(); - let formatted_time = current_time.to_rfc3339(); - - // SQL statement to insert the event into the 'events' table - let stmt = indoc! {" - INSERT INTO events (user_id, event_type, timestamp, data) - VALUES ($1, $2, $3, $4) - "}; - - // Acquire a lock on the database client for thread-safe access - let client = self.db_client.lock().await; - - // Execute the SQL statement with the event data - client - .execute( - stmt, - &[ - &event.user_id, - &event.event_type, - &formatted_time, - &event.data, - ], - ) - .await - .map_err(io::Error::other)?; - - info!("Event inserted into database: {event:?}"); + log::info!("TEST: logging a test capture event."); + capture.log(event); - Ok(()) - } -} + // 6. Wait (deterministically) for the background worker to insert the event, + // then fetch THAT row (instead of "latest row in the table"). + use tokio::time::{Duration, Instant, sleep}; + + let deadline = Instant::now() + Duration::from_secs(2); -/* Database Schema (SQL DDL) + let row = loop { + match client + .query_one( + r#" + SELECT user_id, file_path, event_type, + event_id, sequence_number, schema_version, + session_id, event_source, language_id, file_hash, + path_privacy, client_timestamp_ms, + client_tz_offset_min, server_timestamp_ms, data::text + FROM events + WHERE event_id = $1 + ORDER BY id DESC + LIMIT 1 + "#, + &[&expected_event_id], + ) + .await + { + Ok(row) => break row, // found it + Err(_) => { + if Instant::now() >= deadline { + return Err("Timed out waiting for EventCapture insert".into()); + } + sleep(Duration::from_millis(50)).await; + } + } + }; -The following SQL statement creates the `events` table used by this library: + let user_id: String = row.get("user_id"); + let file_path: Option = row.get(1); + let event_type: String = row.get(2); + let event_id: Option = row.get(3); + let sequence_number: Option = row.get(4); + let schema_version: Option = row.get(5); + let session_id: Option = row.get(6); + let event_source: Option = row.get(7); + let language_id: Option = row.get(8); + let file_hash: Option = row.get(9); + let path_privacy: Option = row.get(10); + let client_timestamp_ms: Option = row.get(11); + let client_tz_offset_min: Option = row.get(12); + let server_timestamp_ms: Option = row.get(13); + let data_text: String = row.get(14); + let data_value: serde_json::Value = serde_json::from_str(&data_text)?; -CREATE TABLE events ( id SERIAL PRIMARY KEY, user_id TEXT NOT NULL, -event_type TEXT NOT NULL, timestamp TEXT NOT NULL, data TEXT ); + assert_eq!(user_id, expected_user_id); + assert!(file_path.is_none()); + assert_eq!(event_type, event_types::WRITE_DOC.as_str()); + assert_eq!(event_id.as_deref(), Some(expected_event_id.as_str())); + assert_eq!(sequence_number, Some(42)); + assert_eq!(schema_version, Some(2)); + assert_eq!(session_id.as_deref(), Some(expected_session_id.as_str())); + assert_eq!(event_source.as_deref(), Some("integration_test")); + assert_eq!(language_id.as_deref(), Some("rust")); + assert_eq!(file_hash.as_deref(), Some(expected_file_hash.as_str())); + assert_eq!(path_privacy.as_deref(), Some("sha256")); + assert_eq!(client_timestamp_ms, Some(expected_client_timestamp_ms)); + assert_eq!(client_tz_offset_min, Some(360)); + assert_eq!(server_timestamp_ms, Some(expected_server_timestamp_ms)); + assert_eq!(data_value, expected_data); -- **`id SERIAL PRIMARY KEY`**: Auto-incrementing primary key. -- **`user_id TEXT NOT NULL`**: The ID of the user associated with the event. -- **`event_type TEXT NOT NULL`**: The type of event. -- **`timestamp TEXT NOT NULL`**: The timestamp of the event. -- **`data TEXT`**: Optional additional data associated with the event. - **Note:** Ensure this table exists in your PostgreSQL database before using - the library. */ + log::info!("✅ TEST: EventCapture integration test succeeded and wrote to database."); + Ok(()) + } +} diff --git a/server/src/ide.rs b/server/src/ide.rs index 67d139de..aab9e932 100644 --- a/server/src/ide.rs +++ b/server/src/ide.rs @@ -93,6 +93,7 @@ async fn start_server( // Provide a class to start and stop the server. All its fields are opaque, // since only Rust should use them. pub struct CodeChatEditorServer { + app_state: WebAppState, server_handle: ServerHandle, from_ide_tx: Sender, to_ide_rx: Arc>>, @@ -141,6 +142,7 @@ impl CodeChatEditorServer { let (expired_messages_tx, expired_messages_rx) = mpsc::channel(100); Ok(CodeChatEditorServer { + app_state, server_handle, from_ide_tx: websocket_queues.from_websocket_tx, to_ide_rx: Arc::new(Mutex::new(websocket_queues.to_websocket_rx)), @@ -251,6 +253,18 @@ impl CodeChatEditorServer { .await } + pub async fn send_capture_event( + &self, + capture_event: webserver::CaptureEventWire, + ) -> std::io::Result { + self.send_message_timeout(EditorMessageContents::Capture(Box::new(capture_event))) + .await + } + + pub fn capture_status(&self) -> crate::capture::CaptureStatus { + webserver::capture_status(&self.app_state) + } + // Send a `CurrentFile` message. The other parameter (true if text/false if // binary/None if ignored) is ignored by the server, so it's always sent as // `None`. diff --git a/server/src/ide/filewatcher.rs b/server/src/ide/filewatcher.rs index 308056cc..3a524c8a 100644 --- a/server/src/ide/filewatcher.rs +++ b/server/src/ide/filewatcher.rs @@ -674,6 +674,7 @@ async fn processing_task( EditorMessageContents::Opened(_) | EditorMessageContents::OpenUrl(_) | + EditorMessageContents::Capture(_) | EditorMessageContents::ClientHtml(_) | EditorMessageContents::RequestClose => { let err = ResultErrTypes::ClientIllegalMessage; diff --git a/server/src/main.rs b/server/src/main.rs index bc443035..f98d4c96 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -332,10 +332,15 @@ fn port_in_range(s: &str) -> Result { fn parse_credentials(s: &str) -> Result { // For simplicity, require a username to have no colons. - let split: Vec<_> = s.splitn(2, ":").collect(); + let Some((username, password)) = s.split_once(':') else { + return Err("auth must use the form username:password".to_string()); + }; + if username.is_empty() { + return Err("auth username may not be empty".to_string()); + } Ok(Credentials { - username: split[0].to_string(), - password: split[1].to_string(), + username: username.to_string(), + password: password.to_string(), }) } diff --git a/server/src/translation.rs b/server/src/translation.rs index e87cf978..b915808f 100644 --- a/server/src/translation.rs +++ b/server/src/translation.rs @@ -222,6 +222,7 @@ use tokio::{ // ### Local use crate::{ + capture::{CaptureEventType, event_types}, lexer::{CodeDocBlock, DocBlock, supported_languages::MARKDOWN_MODE}, processing::{ CodeChatForWeb, CodeMirror, CodeMirrorDiff, CodeMirrorDiffable, CodeMirrorDocBlock, @@ -232,11 +233,11 @@ use crate::{ }, queue_send, queue_send_func, webserver::{ - CursorPosition, EditorMessage, EditorMessageContents, INITIAL_MESSAGE_ID, + CaptureEventWire, CursorPosition, EditorMessage, EditorMessageContents, INITIAL_MESSAGE_ID, MESSAGE_ID_INCREMENT, ProcessingTaskHttpRequest, ProcessingTaskHttpRequestFlags, ResultErrTypes, ResultOkTypes, SimpleHttpResponse, SimpleHttpResponseError, - UpdateMessageContents, WebAppState, WebsocketQueues, file_to_response, path_to_url, - send_response, try_canonicalize, try_read_as_text, url_to_path, + UpdateMessageContents, WebAppState, WebsocketQueues, file_to_response, log_capture_event, + path_to_url, send_response, try_canonicalize, try_read_as_text, url_to_path, }, }; @@ -387,6 +388,7 @@ pub fn create_translation_queues( /// allows factoring out lengthy contents in the loop into subfunctions. struct TranslationTask { // These parameters are passed to us. + app_state: WebAppState, connection_id_raw: String, prefix: &'static [&'static str], allow_source_diffs: bool, @@ -435,6 +437,89 @@ struct TranslationTask { /// Has the full (non-diff) version of the current file been sent? Don't /// send diffs until this is sent. sent_full: bool, + /// Most recent capture metadata supplied by the IDE. Server-generated + /// capture events reuse this so translated write events retain the same + /// participant/session identity as the extension events. + capture_context: CaptureContext, +} + +/// Participant and session metadata remembered from client capture events. +/// +/// The translation layer generates `write_doc`/`write_code` events after it +/// has parsed CodeChat content. Those events should share the same pseudonymous +/// participant and capture session as the extension-side events, but the server +/// should not ask students for course/group/assignment/task setup values. +#[derive(Clone, Debug, Default)] +struct CaptureContext { + /// Pseudonymous participant UUID from the latest client capture event. + user_id: Option, + /// Origin of the client event stream, such as the VS Code extension. + event_source: Option, + /// Extension session UUID carried in the event data payload. + session_id: Option, + /// Client timezone offset in minutes, retained for generated write events. + client_tz_offset_min: Option, + /// Capture payload schema version from the extension. + schema_version: Option, +} + +impl CaptureContext { + fn update_from_wire(&mut self, wire: &CaptureEventWire) { + if !wire.user_id.trim().is_empty() { + self.user_id = Some(wire.user_id.clone()); + } + if let Some(event_source) = &wire.event_source { + self.event_source = Some(event_source.clone()); + } + if let Some(schema_version) = wire.schema_version { + self.schema_version = Some(schema_version); + } + if let Some(client_tz_offset_min) = wire.client_tz_offset_min { + self.client_tz_offset_min = Some(client_tz_offset_min); + } + if let Some(serde_json::Value::Object(data)) = &wire.data + && let Some(session_id) = data.get("session_id").and_then(serde_json::Value::as_str) + { + self.session_id = Some(session_id.to_string()); + } + } + + fn capture_event( + &self, + event_type: CaptureEventType, + file_path: Option, + data: serde_json::Value, + ) -> Option { + let mut data = match data { + serde_json::Value::Object(map) => map, + other => { + let mut map = serde_json::Map::new(); + map.insert("value".to_string(), other); + map + } + }; + if let Some(session_id) = &self.session_id { + data.entry("session_id".to_string()) + .or_insert_with(|| serde_json::json!(session_id)); + } + data.entry("source".to_string()) + .or_insert_with(|| serde_json::json!("server_translation")); + + Some(CaptureEventWire { + event_id: None, + sequence_number: None, + schema_version: self.schema_version, + user_id: self.user_id.clone()?, + event_source: self.event_source.clone(), + language_id: None, + file_hash: None, + file_path, + event_type, + client_timestamp_ms: None, + client_tz_offset_min: self.client_tz_offset_min, + data: Some(serde_json::Value::Object(data)), + }) + } } /// This is the processing task for the Visual Studio Code IDE. It handles all @@ -466,6 +551,7 @@ pub async fn translation_task( let mut continue_loop = true; let mut tt = TranslationTask { + app_state: app_state.clone(), connection_id_raw, prefix, allow_source_diffs, @@ -489,6 +575,7 @@ pub async fn translation_task( version: 0.0, // Don't send diffs until this is sent. sent_full: false, + capture_context: CaptureContext::default(), }; while continue_loop { select! { @@ -515,6 +602,11 @@ pub async fn translation_task( EditorMessageContents::Result(_) => continue_loop = tt.ide_result(ide_message).await, EditorMessageContents::Update(_) => continue_loop = tt.ide_update(ide_message).await, + EditorMessageContents::Capture(capture_event) => { + tt.capture_context.update_from_wire(&capture_event); + log_capture_event(&app_state, *capture_event); + send_response(&tt.to_ide_tx, ide_message.id, Ok(ResultOkTypes::Void)).await; + }, // Update the current file; translate it to a URL then // pass it to the Client. @@ -610,6 +702,11 @@ pub async fn translation_task( }, EditorMessageContents::Update(_) => continue_loop = tt.client_update(client_message).await, + EditorMessageContents::Capture(capture_event) => { + tt.capture_context.update_from_wire(&capture_event); + log_capture_event(&app_state, *capture_event); + send_response(&tt.to_client_tx, client_message.id, Ok(ResultOkTypes::Void)).await; + }, // Update the current file; translate it to a URL then // pass it to the IDE. @@ -700,6 +797,103 @@ pub async fn translation_task( // These provide translation for messages passing through the Server. impl TranslationTask { + fn capture_file_path(file_path: &std::path::Path) -> Option { + file_path.to_str().map(str::to_string) + } + + fn log_server_capture_event( + &self, + event_type: CaptureEventType, + file_path: &std::path::Path, + data: serde_json::Value, + ) { + let Some(capture_event) = self.capture_context.capture_event( + event_type, + Self::capture_file_path(file_path), + data, + ) else { + debug!("Skipping server-classified capture event; capture identity is not known yet."); + return; + }; + log_capture_event(&self.app_state, capture_event); + } + + fn log_raw_write_event(&self, file_path: &std::path::Path, before: &str, after: &str) { + if before == after { + return; + } + self.log_server_capture_event( + event_types::WRITE_CODE, + file_path, + serde_json::json!({ + "source": "server_translation", + "classification_basis": "raw_text", + "diff": diff_str(before, after), + }), + ); + } + + fn log_code_mirror_write_events( + &self, + file_path: &std::path::Path, + metadata: &SourceFileMetadata, + before_doc: &str, + before_doc_blocks: Option<&CodeMirrorDocBlockVec>, + after: &CodeMirror, + source: &str, + ) { + if metadata.mode == MARKDOWN_MODE { + if !compare_html(before_doc, &after.doc) { + self.log_server_capture_event( + event_types::WRITE_DOC, + file_path, + serde_json::json!({ + "source": source, + "classification_basis": "markdown_document", + "mode": metadata.mode, + "diff": diff_str(before_doc, &after.doc), + }), + ); + } + return; + } + + if before_doc != after.doc { + self.log_server_capture_event( + event_types::WRITE_CODE, + file_path, + serde_json::json!({ + "source": source, + "classification_basis": "codemirror_code_text", + "mode": metadata.mode, + "diff": diff_str(before_doc, &after.doc), + }), + ); + } + + let doc_blocks_changed = match before_doc_blocks { + Some(before) => !doc_blocks_compare(before, &after.doc_blocks), + None => !after.doc_blocks.is_empty(), + }; + if doc_blocks_changed { + let doc_block_diff = before_doc_blocks.map(|before| { + serde_json::json!(diff_code_mirror_doc_blocks(before, &after.doc_blocks)) + }); + self.log_server_capture_event( + event_types::WRITE_DOC, + file_path, + serde_json::json!({ + "source": source, + "classification_basis": "codemirror_doc_blocks", + "mode": metadata.mode, + "doc_block_count_before": before_doc_blocks.map_or(0, Vec::len), + "doc_block_count_after": after.doc_blocks.len(), + "doc_block_diff": doc_block_diff, + }), + ); + } + } + // Pass a `Result` message to the Client, unless it's a `LoadFile` result. async fn ide_result(&mut self, ide_message: EditorMessage) -> bool { let EditorMessageContents::Result(ref result) = ide_message.message else { @@ -895,6 +1089,16 @@ impl TranslationTask { else { panic!("Unexpected diff value."); }; + if self.sent_full { + self.log_code_mirror_write_events( + &clean_file_path, + &ccfw.metadata, + &self.code_mirror_doc, + self.code_mirror_doc_blocks.as_ref(), + code_mirror_translated, + "ide", + ); + } // Send a diff if possible. let client_contents = if self.sent_full { self.diff_code_mirror( @@ -940,6 +1144,13 @@ impl TranslationTask { Err(ResultErrTypes::TodoBinarySupport) } TranslationResultsString::Unknown => { + if self.sent_full { + self.log_raw_write_event( + &clean_file_path, + &self.source_code, + &code_mirror.doc, + ); + } // Send the new raw contents. debug!("Sending translated contents to Client."); queue_send_func!(self.to_client_tx.send(EditorMessage { @@ -956,13 +1167,16 @@ impl TranslationTask { mode: "".to_string(), }, source: CodeMirrorDiffable::Plain(CodeMirror { - doc: code_mirror.doc, + doc: code_mirror.doc.clone(), doc_blocks: vec![] }), version: contents.version }), }), })); + self.source_code = code_mirror.doc; + self.code_mirror_doc = self.source_code.clone(); + self.code_mirror_doc_blocks = Some(vec![]); Ok(ResultOkTypes::Void) } TranslationResultsString::Toc(_) => { @@ -1045,12 +1259,22 @@ impl TranslationTask { // what we just received. This must be updated // before we can translate back to check for changes // (the next step). - let CodeMirrorDiffable::Plain(code_mirror) = cfw.source else { + let CodeMirrorDiffable::Plain(ref code_mirror) = cfw.source else { // TODO: support diffable! panic!("Diff not supported."); }; - self.code_mirror_doc = code_mirror.doc; - self.code_mirror_doc_blocks = Some(code_mirror.doc_blocks); + if self.sent_full { + self.log_code_mirror_write_events( + &clean_file_path, + &cfw.metadata, + &self.code_mirror_doc, + self.code_mirror_doc_blocks.as_ref(), + code_mirror, + "client", + ); + } + self.code_mirror_doc = code_mirror.doc.clone(); + self.code_mirror_doc_blocks = Some(code_mirror.doc_blocks.clone()); // We may need to change this version if we send a // diff back to the Client. let mut cfw_version = cfw.version; diff --git a/server/src/webserver.rs b/server/src/webserver.rs index 59df0f54..166d0afc 100644 --- a/server/src/webserver.rs +++ b/server/src/webserver.rs @@ -38,15 +38,17 @@ use std::{ // ### Third-party use actix_files; + use actix_web::{ App, HttpRequest, HttpResponse, HttpServer, dev::{Server, ServerHandle, ServiceFactory, ServiceRequest}, error::Error, get, http::header::{ContentType, DispositionType}, - middleware, + middleware, post, web::{self, Data}, }; + use actix_web_httpauth::{extractors::basic::BasicAuth, middleware::HttpAuthentication}; use actix_ws::AggregatedMessage; use bytes::Bytes; @@ -95,6 +97,10 @@ use crate::{ }, }; +use crate::capture::{CaptureConfig, CaptureEvent, CaptureEventType, CaptureStatus, EventCapture}; + +use chrono::Utc; + // Data structures // --------------- // @@ -201,6 +207,8 @@ pub enum EditorMessageContents { // Server will determine the value if needed. Option, ), + /// Record an instrumentation event. Valid destinations: Server. + Capture(Box), // #### These messages may only be sent by the IDE. /// This is the first message sent when the IDE starts up. It may only be @@ -405,6 +413,8 @@ pub struct AppState { pub connection_id: Mutex>, /// The auth credentials if authentication is used. credentials: Option, + // Added to support capture - JDS - 11/2025 + pub capture: Option, } pub type WebAppState = web::Data; @@ -415,6 +425,55 @@ pub struct Credentials { pub password: String, } +/// JSON payload received from clients for capture events. +/// +/// The server supplies the authoritative timestamp. Study metadata such as +/// course, assignment, group, condition, and task is not part of this wire type: +/// those values are inferred later from researcher-managed mappings keyed by +/// the pseudonymous `user_id` and event timestamps. +#[derive(Debug, Serialize, Deserialize, PartialEq, TS)] +#[ts(export, optional_fields)] +pub struct CaptureEventWire { + /// Client-generated unique event identifier. + #[serde(skip_serializing_if = "Option::is_none")] + pub event_id: Option, + /// Client-local event order for one extension session. + #[serde(skip_serializing_if = "Option::is_none")] + pub sequence_number: Option, + /// Capture payload schema version. + #[serde(skip_serializing_if = "Option::is_none")] + pub schema_version: Option, + /// Pseudonymous participant UUID. This is not the student's real identity. + pub user_id: String, + /// Source of this event, such as the VS Code extension or server translation. + #[serde(skip_serializing_if = "Option::is_none")] + pub event_source: Option, + /// VS Code language identifier for the active file, when known. + #[serde(skip_serializing_if = "Option::is_none")] + pub language_id: Option, + /// SHA-256 hash of the local file path when path hashing is enabled. + #[serde(skip_serializing_if = "Option::is_none")] + pub file_hash: Option, + /// Raw file path only when path hashing is disabled for debugging. + #[serde(skip_serializing_if = "Option::is_none")] + pub file_path: Option, + /// Canonical capture event type. + pub event_type: CaptureEventType, + + /// Optional client-side timestamp (milliseconds since Unix epoch). + #[serde(skip_serializing_if = "Option::is_none")] + pub client_timestamp_ms: Option, + + /// Optional client timezone offset in minutes (JS Date().getTimezoneOffset()). + #[serde(skip_serializing_if = "Option::is_none")] + pub client_tz_offset_min: Option, + + /// Arbitrary event-specific data stored as JSON (optional). + #[serde(skip_serializing_if = "Option::is_none")] + #[ts(type = "unknown")] + pub data: Option, +} + // Macros // ------ /// Create a macro to report an error when enqueueing an item. @@ -449,7 +508,7 @@ macro_rules! queue_send_func { // The timeout for a reply from a websocket, in ms. Use a short timeout to speed // up unit tests. pub const REPLY_TIMEOUT_MS: Duration = if cfg!(test) { - Duration::from_millis(500) + Duration::from_millis(2500) } else { Duration::from_millis(15000) }; @@ -603,6 +662,99 @@ async fn stop(app_state: WebAppState) -> HttpResponse { HttpResponse::NoContent().finish() } +#[post("/capture")] +async fn capture_endpoint( + app_state: WebAppState, + payload: web::Json, +) -> HttpResponse { + let status = log_capture_event(&app_state, payload.into_inner()); + if status.enabled { + HttpResponse::Accepted().json(status) + } else { + HttpResponse::ServiceUnavailable().json(status) + } +} + +#[get("/capture/status")] +async fn capture_status_endpoint(app_state: WebAppState) -> HttpResponse { + HttpResponse::Ok().json(capture_status(&app_state)) +} + +/// Log a capture event if capture is enabled. +pub fn log_capture_event(app_state: &WebAppState, wire: CaptureEventWire) -> CaptureStatus { + if let Some(capture) = &app_state.capture { + let server_timestamp = Utc::now(); + // Default missing data to empty object + let mut data = wire.data.unwrap_or_else(|| serde_json::json!({})); + + // Ensure data is an object so we can attach fields + if !data.is_object() { + data = serde_json::json!({ "value": data }); + } + + // Add client timestamp fields if present (even if extension also sends them; + // overwriting is fine and consistent). + if let serde_json::Value::Object(map) = &mut data { + if let Some(event_id) = &wire.event_id { + map.insert("event_id".to_string(), serde_json::json!(event_id)); + } + if let Some(sequence_number) = wire.sequence_number { + map.insert( + "sequence_number".to_string(), + serde_json::json!(sequence_number), + ); + } + if let Some(schema_version) = wire.schema_version { + map.insert( + "schema_version".to_string(), + serde_json::json!(schema_version), + ); + } + if let Some(event_source) = &wire.event_source { + map.insert("event_source".to_string(), serde_json::json!(event_source)); + } + if let Some(language_id) = &wire.language_id { + map.insert("language_id".to_string(), serde_json::json!(language_id)); + } + if let Some(file_hash) = &wire.file_hash { + map.insert("file_hash".to_string(), serde_json::json!(file_hash)); + } + if let Some(ms) = wire.client_timestamp_ms { + map.insert("client_timestamp_ms".to_string(), serde_json::json!(ms)); + } + if let Some(tz) = wire.client_tz_offset_min { + map.insert("client_tz_offset_min".to_string(), serde_json::json!(tz)); + } + map.insert( + "server_timestamp_ms".to_string(), + serde_json::json!(server_timestamp.timestamp_millis()), + ); + } + + let event = CaptureEvent { + user_id: wire.user_id, + file_path: wire.file_path, + event_type: wire.event_type, + // Server decides when the event is recorded. + timestamp: server_timestamp, + data, + }; + + capture.log(event); + capture.status() + } else { + CaptureStatus::disabled() + } +} + +pub fn capture_status(app_state: &WebAppState) -> CaptureStatus { + app_state + .capture + .as_ref() + .map(EventCapture::status) + .unwrap_or_else(CaptureStatus::disabled) +} + // Get the `mode` query parameter to determine `is_test_mode`; default to // `false`. pub fn get_test_mode(req: &HttpRequest) -> bool { @@ -1451,9 +1603,6 @@ pub fn setup_server( addr: &SocketAddr, credentials: Option, ) -> std::io::Result<(Server, Data)> { - // Connect to the Capture Database - //let _event_capture = EventCapture::new("config.json").await?; - // Pre-load the bundled files before starting the webserver. let _ = &*BUNDLED_FILES_MAP; let app_data = make_app_data(credentials); @@ -1529,6 +1678,21 @@ pub fn configure_logger(level: LevelFilter) -> Result<(), Box) -> WebAppState { + // Initialize event capture from a config file (optional). + let capture: Option = load_capture_config().and_then(|cfg| { + let summary = cfg.redacted_summary(); + match EventCapture::new(cfg) { + Ok(ec) => { + eprintln!("Capture: enabled ({summary})"); + Some(ec) + } + Err(err) => { + eprintln!("Capture: failed to initialize ({summary}): {err}"); + None + } + } + }); + web::Data::new(AppState { server_handle: Mutex::new(None), filewatcher_next_connection_id: Mutex::new(0), @@ -1539,9 +1703,54 @@ pub fn make_app_data(credentials: Option) -> WebAppState { client_queues: Arc::new(Mutex::new(HashMap::new())), connection_id: Mutex::new(HashSet::new()), credentials, + capture, }) } +fn load_capture_config() -> Option { + match CaptureConfig::from_env() { + Ok(Some(cfg)) => return Some(with_default_capture_fallback_path(cfg)), + Ok(None) => {} + Err(err) => { + eprintln!("Capture: invalid environment configuration: {err}"); + return None; + } + } + + let mut config_path = ROOT_PATH.lock().unwrap().clone(); + config_path.push("capture_config.json"); + + match fs::read_to_string(&config_path) { + Ok(json) => match serde_json::from_str::(&json) { + Ok(cfg) => Some(with_default_capture_fallback_path(cfg)), + Err(err) => { + eprintln!("Capture: invalid JSON in {config_path:?}: {err}"); + None + } + }, + Err(err) => { + eprintln!( + "Capture: disabled (no CODECHAT_CAPTURE_* env and no readable config at {config_path:?}: {err})" + ); + None + } + } +} + +fn with_default_capture_fallback_path(mut cfg: CaptureConfig) -> CaptureConfig { + let root_path = ROOT_PATH.lock().unwrap().clone(); + match &cfg.fallback_path { + Some(path) if path.is_relative() => { + cfg.fallback_path = Some(root_path.join(path)); + } + Some(_) => {} + None => { + cfg.fallback_path = Some(root_path.join("capture-events-fallback.jsonl")); + } + } + cfg +} + // Configure the web application. I'd like to make this return an // `App`, but `AppEntry` is a private module. pub fn configure_app(app: App, app_data: &WebAppState) -> App @@ -1568,6 +1777,8 @@ where .service(vscode_client_framework) .service(ping) .service(stop) + .service(capture_endpoint) + .service(capture_status_endpoint) // Reroute to the filewatcher filesystem for typical user-requested // URLs. .route("/", web::get().to(filewatcher_root_fs_redirect)) diff --git a/server/tests/overall_common/mod.rs b/server/tests/overall_common/mod.rs index 035eeeda..ec5e602a 100644 --- a/server/tests/overall_common/mod.rs +++ b/server/tests/overall_common/mod.rs @@ -145,8 +145,14 @@ impl ExpectedMessages { } } -// Time to wait for `ExpectedMessages`. -pub const TIMEOUT: Duration = Duration::from_millis(3000); +// Time to wait for browser/WebDriver-backed client-server messages. This +// matches the client-side response window and gives CI enough room for autosave +// and loadfile acknowledgements under matrix load. +pub const TIMEOUT: Duration = Duration::from_millis(15000); + +// Browser-backed tests share a single WebDriver endpoint. Safari on macOS CI is +// unreliable with overlapping sessions, so serialize the harness. +pub(crate) static WEB_DRIVER_TEST_LOCK: tokio::sync::Mutex<()> = tokio::sync::Mutex::const_new(()); // ### Test harness // @@ -161,6 +167,7 @@ pub async fn harness< // The output from calling `prep_test_dir!()`. prep_test_dir: (TempDir, PathBuf), ) -> Result<(), Box> { + let _webdriver_test_lock = WEB_DRIVER_TEST_LOCK.lock().await; // Send log events to the tracing subscriber, since the code currently uses // a log-based framework. As below, ignore re-initialization errors. let _ = LogTracer::init(); @@ -237,11 +244,10 @@ pub async fn harness< // Report any errors produced when removing the temporary directory. temp_dir.close()?; - ret.unwrap_or_else(|err| - // Convert a panic to an error. - Err::<(), Box>(Box::from(format!( - "{err:#?}" - )))) + ret.unwrap_or_else( + // Convert a panic to an error. + |err| Err::<(), Box>(Box::from(format!("{err:#?}"))), + ) } #[macro_export]