diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..e9fa78ed --- /dev/null +++ b/.prettierignore @@ -0,0 +1,15 @@ +# Generated files - do not format +**/styled-system/** +**/.next/** +**/dist/** +**/coverage/** +**/node_modules/** +**/storybook-static/** + +# Build outputs +*.min.js +*.min.css + +# Package manager +pnpm-lock.yaml +package-lock.json diff --git a/apps/web/.claude/CLAUDE.md b/apps/web/.claude/CLAUDE.md index 30e076f1..b88c8fed 100644 --- a/apps/web/.claude/CLAUDE.md +++ b/apps/web/.claude/CLAUDE.md @@ -605,6 +605,28 @@ css({ className = "bg-blue-200 border-gray-300 text-brand-600"; ``` +### Fixing Corrupted styled-system (Panda CSS) + +**If the CSS appears broken or styles aren't applying correctly**, the `styled-system/` directory may be corrupted. This can happen if prettier or other formatters modify the generated files. + +**Fix:** + +```bash +# 1. Delete the corrupted styled-system +rm -rf apps/web/styled-system + +# 2. Regenerate it +cd apps/web && pnpm panda codegen + +# 3. Clear Next.js cache (if build errors persist) +rm -rf apps/web/.next + +# 4. Rebuild +pnpm build +``` + +**Prevention:** The repo has a `.prettierignore` at the root that excludes `**/styled-system/**`. If this file is missing or incomplete, prettier will corrupt the generated files when running `pnpm format`. + See `.claude/GAME_THEMES.md` for standardized color theme usage in arcade games. ## Data Attributes for All Elements diff --git a/apps/web/src/app/practice/[studentId]/PracticeClient.tsx b/apps/web/src/app/practice/[studentId]/PracticeClient.tsx index c5cd99f9..f4e35795 100644 --- a/apps/web/src/app/practice/[studentId]/PracticeClient.tsx +++ b/apps/web/src/app/practice/[studentId]/PracticeClient.tsx @@ -1,10 +1,10 @@ -'use client' +"use client"; -import { useRouter } from 'next/navigation' -import { useCallback, useEffect, useMemo, useRef, useState } from 'react' -import { useToast } from '@/components/common/ToastContext' -import { useMyAbacus } from '@/contexts/MyAbacusContext' -import { PageWithNav } from '@/components/PageWithNav' +import { useRouter } from "next/navigation"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { useToast } from "@/components/common/ToastContext"; +import { useMyAbacus } from "@/contexts/MyAbacusContext"; +import { PageWithNav } from "@/components/PageWithNav"; import { ActiveSession, type AttemptTimingData, @@ -13,18 +13,18 @@ import { PracticeErrorBoundary, PracticeSubNav, type SessionHudData, -} from '@/components/practice' -import { GameBreakScreen } from '@/components/practice/GameBreakScreen' -import { GameBreakResultsScreen } from '@/components/practice/GameBreakResultsScreen' -import type { GameResultsReport } from '@/lib/arcade/game-sdk/types' -import type { Player } from '@/db/schema/players' +} from "@/components/practice"; +import { GameBreakScreen } from "@/components/practice/GameBreakScreen"; +import { GameBreakResultsScreen } from "@/components/practice/GameBreakResultsScreen"; +import type { GameResultsReport } from "@/lib/arcade/game-sdk/types"; +import type { Player } from "@/db/schema/players"; import type { GameBreakSettings, SessionHealth, SessionPart, SessionPlan, SlotResult, -} from '@/db/schema/session-plans' +} from "@/db/schema/session-plans"; /** * State for redoing a previously completed problem @@ -32,40 +32,40 @@ import type { */ export interface RedoState { /** Whether redo mode is currently active */ - isActive: boolean + isActive: boolean; /** Linear index of the problem being redone (flat across all parts) */ - linearIndex: number + linearIndex: number; /** Part index containing the redo problem */ - originalPartIndex: number + originalPartIndex: number; /** Slot index within the part */ - originalSlotIndex: number + originalSlotIndex: number; /** The original result (to check if it was correct) */ - originalResult: SlotResult + originalResult: SlotResult; /** Part index to return to after redo */ - returnToPartIndex: number + returnToPartIndex: number; /** Slot index to return to after redo */ - returnToSlotIndex: number + returnToSlotIndex: number; } import { type ReceivedAbacusControl, type TeacherPauseRequest, useSessionBroadcast, -} from '@/hooks/useSessionBroadcast' +} from "@/hooks/useSessionBroadcast"; import { sessionPlanKeys, useActiveSessionPlan, useEndSessionEarly, useRecordRedoResult, useRecordSlotResult, -} from '@/hooks/useSessionPlan' -import { useQueryClient } from '@tanstack/react-query' -import { useSaveGameResult } from '@/hooks/useGameResults' -import { css } from '../../../../styled-system/css' +} from "@/hooks/useSessionPlan"; +import { useQueryClient } from "@tanstack/react-query"; +import { useSaveGameResult } from "@/hooks/useGameResults"; +import { css } from "../../../../styled-system/css"; interface PracticeClientProps { - studentId: string - player: Player - initialSession: SessionPlan + studentId: string; + player: Player; + initialSession: SessionPlan; } /** @@ -76,207 +76,238 @@ interface PracticeClientProps { * * When the session completes, it redirects to /summary. */ -export function PracticeClient({ studentId, player, initialSession }: PracticeClientProps) { - const router = useRouter() - const { showError } = useToast() - const { setVisionFrameCallback } = useMyAbacus() - const queryClient = useQueryClient() +export function PracticeClient({ + studentId, + player, + initialSession, +}: PracticeClientProps) { + const router = useRouter(); + const { showError } = useToast(); + const { setVisionFrameCallback } = useMyAbacus(); + const queryClient = useQueryClient(); // Track pause state for HUD display (ActiveSession owns the modal and actual pause logic) - const [isPaused, setIsPaused] = useState(false) + const [isPaused, setIsPaused] = useState(false); // Track timing data from ActiveSession for the sub-nav HUD - const [timingData, setTimingData] = useState(null) + const [timingData, setTimingData] = useState(null); // Track broadcast state for session observation (digit-by-digit updates from ActiveSession) - const [broadcastState, setBroadcastState] = useState(null) + const [broadcastState, setBroadcastState] = useState( + null, + ); // Browse mode state - lifted here so PracticeSubNav can trigger it - const [isBrowseMode, setIsBrowseMode] = useState(false) + const [isBrowseMode, setIsBrowseMode] = useState(false); // Browse index - lifted for navigation from SessionProgressIndicator - const [browseIndex, setBrowseIndex] = useState(0) + const [browseIndex, setBrowseIndex] = useState(0); // Teacher abacus control - receives commands from observing teacher - const [teacherControl, setTeacherControl] = useState(null) + const [teacherControl, setTeacherControl] = + useState(null); // Teacher-initiated pause/resume requests from observing teacher - const [teacherPauseRequest, setTeacherPauseRequest] = useState(null) - const [teacherResumeRequest, setTeacherResumeRequest] = useState(false) + const [teacherPauseRequest, setTeacherPauseRequest] = + useState(null); + const [teacherResumeRequest, setTeacherResumeRequest] = useState(false); // Manual pause request from HUD - const [manualPauseRequest, setManualPauseRequest] = useState(false) + const [manualPauseRequest, setManualPauseRequest] = useState(false); // Game break state - const [showGameBreak, setShowGameBreak] = useState(false) - const [gameBreakStartTime, setGameBreakStartTime] = useState(Date.now()) + const [showGameBreak, setShowGameBreak] = useState(false); + const [gameBreakStartTime, setGameBreakStartTime] = useState( + Date.now(), + ); // Track pending game break - set when part transition happens, triggers after transition screen - const [pendingGameBreak, setPendingGameBreak] = useState(false) + const [pendingGameBreak, setPendingGameBreak] = useState(false); // Game break results - captured when game completes to show on interstitial screen - const [gameBreakResults, setGameBreakResults] = useState(null) + const [gameBreakResults, setGameBreakResults] = + useState(null); // Show results interstitial before returning to practice - const [showGameBreakResults, setShowGameBreakResults] = useState(false) + const [showGameBreakResults, setShowGameBreakResults] = useState(false); // Track previous part index to detect part transitions - const previousPartIndexRef = useRef(initialSession.currentPartIndex) + const previousPartIndexRef = useRef(initialSession.currentPartIndex); // Ref to store stopVisionRecording - populated later when useSessionBroadcast is called // This allows early-defined callbacks to access the function - const stopRecordingRef = useRef<(() => void) | undefined>(undefined) + const stopRecordingRef = useRef<(() => void) | undefined>(undefined); // Ref to store sendProblemMarker for timeline sync const sendProblemMarkerRef = useRef< | (( problemNumber: number, partIndex: number, - eventType: 'problem-shown' | 'answer-submitted' | 'feedback-shown', - isCorrect?: boolean + eventType: "problem-shown" | "answer-submitted" | "feedback-shown", + isCorrect?: boolean, ) => void) | undefined - >(undefined) + >(undefined); // Redo state - allows students to re-attempt any completed problem - const [redoState, setRedoState] = useState(null) + const [redoState, setRedoState] = useState(null); // Dev shortcut: Ctrl+Shift+G to trigger game break for testing useEffect(() => { - if (process.env.NODE_ENV !== 'development') return + if (process.env.NODE_ENV !== "development") return; const handleKeyDown = (e: KeyboardEvent) => { - if (e.ctrlKey && e.shiftKey && e.key === 'G') { - e.preventDefault() + if (e.ctrlKey && e.shiftKey && e.key === "G") { + e.preventDefault(); setShowGameBreak((prev) => { if (!prev) { - setGameBreakStartTime(Date.now()) + setGameBreakStartTime(Date.now()); } - return !prev - }) + return !prev; + }); } - } + }; - window.addEventListener('keydown', handleKeyDown) - return () => window.removeEventListener('keydown', handleKeyDown) - }, []) + window.addEventListener("keydown", handleKeyDown); + return () => window.removeEventListener("keydown", handleKeyDown); + }, []); // Session plan mutations - const recordResult = useRecordSlotResult() - const recordRedo = useRecordRedoResult() - const endEarly = useEndSessionEarly() + const recordResult = useRecordSlotResult(); + const recordRedo = useRecordRedoResult(); + const endEarly = useEndSessionEarly(); // Game results mutation - saves to scoreboard when game break completes - const saveGameResult = useSaveGameResult() + const saveGameResult = useSaveGameResult(); // Fetch active session plan from cache or API with server data as initial - const { data: fetchedPlan } = useActiveSessionPlan(studentId, initialSession) + const { data: fetchedPlan } = useActiveSessionPlan(studentId, initialSession); // Current plan - mutations take priority, then fetched/cached data - const currentPlan = endEarly.data ?? recordResult.data ?? fetchedPlan ?? initialSession + const currentPlan = + endEarly.data ?? recordResult.data ?? fetchedPlan ?? initialSession; // Game break settings from the session plan - const gameBreakSettings = currentPlan.gameBreakSettings as GameBreakSettings | null + const gameBreakSettings = + currentPlan.gameBreakSettings as GameBreakSettings | null; // Build game config with skipSetupPhase merged into each game's config // This allows games to start immediately without showing their setup screen const gameBreakGameConfig = useMemo(() => { - const baseConfig = gameBreakSettings?.gameConfig ?? {} - const skipSetup = gameBreakSettings?.skipSetupPhase ?? true // Default to true for practice breaks + const baseConfig = gameBreakSettings?.gameConfig ?? {}; + const skipSetup = gameBreakSettings?.skipSetupPhase ?? true; // Default to true for practice breaks if (!skipSetup) { - return baseConfig + return baseConfig; } // Merge skipSetupPhase into each game's config - const mergedConfig: Record> = {} + const mergedConfig: Record> = {}; for (const [gameName, config] of Object.entries(baseConfig)) { - mergedConfig[gameName] = { ...config, skipSetupPhase: true } + mergedConfig[gameName] = { ...config, skipSetupPhase: true }; } // Also add skipSetupPhase for the selected game if not already in config - const selectedGame = gameBreakSettings?.selectedGame - if (selectedGame && selectedGame !== 'random' && !mergedConfig[selectedGame]) { - mergedConfig[selectedGame] = { skipSetupPhase: true } + const selectedGame = gameBreakSettings?.selectedGame; + if ( + selectedGame && + selectedGame !== "random" && + !mergedConfig[selectedGame] + ) { + mergedConfig[selectedGame] = { skipSetupPhase: true }; } - return mergedConfig + return mergedConfig; }, [ gameBreakSettings?.gameConfig, gameBreakSettings?.skipSetupPhase, gameBreakSettings?.selectedGame, - ]) + ]); // Compute HUD data from current plan - const currentPart = currentPlan.parts[currentPlan.currentPartIndex] as SessionPart | undefined - const sessionHealth = currentPlan.sessionHealth as SessionHealth | null + const currentPart = currentPlan.parts[currentPlan.currentPartIndex] as + | SessionPart + | undefined; + const sessionHealth = currentPlan.sessionHealth as SessionHealth | null; // Calculate totals const { totalProblems, completedProblems } = useMemo(() => { - const total = currentPlan.parts.reduce((sum, part) => sum + part.slots.length, 0) - let completed = 0 + const total = currentPlan.parts.reduce( + (sum, part) => sum + part.slots.length, + 0, + ); + let completed = 0; for (let i = 0; i < currentPlan.currentPartIndex; i++) { - completed += currentPlan.parts[i].slots.length + completed += currentPlan.parts[i].slots.length; } - completed += currentPlan.currentSlotIndex - return { totalProblems: total, completedProblems: completed } - }, [currentPlan.parts, currentPlan.currentPartIndex, currentPlan.currentSlotIndex]) + completed += currentPlan.currentSlotIndex; + return { totalProblems: total, completedProblems: completed }; + }, [ + currentPlan.parts, + currentPlan.currentPartIndex, + currentPlan.currentSlotIndex, + ]); // Pause handler - triggers manual pause in ActiveSession const handlePause = useCallback(() => { - setManualPauseRequest(true) - }, []) + setManualPauseRequest(true); + }, []); const handleResume = useCallback(() => { - setIsPaused(false) - }, []) + setIsPaused(false); + }, []); // Handle recording an answer const handleAnswer = useCallback( - async (result: Omit): Promise => { + async ( + result: Omit, + ): Promise => { try { // Send problem marker for timeline sync (before mutation so it captures the submit moment) - const currentProblemNumber = currentPlan.currentSlotIndex + 1 + const currentProblemNumber = currentPlan.currentSlotIndex + 1; sendProblemMarkerRef.current?.( currentProblemNumber, currentPlan.currentPartIndex, - 'answer-submitted', - result.isCorrect - ) + "answer-submitted", + result.isCorrect, + ); - const previousPartIndex = previousPartIndexRef.current + const previousPartIndex = previousPartIndexRef.current; const updatedPlan = await recordResult.mutateAsync({ playerId: studentId, planId: currentPlan.id, result, - }) + }); // Update previous part index tracking - previousPartIndexRef.current = updatedPlan.currentPartIndex + previousPartIndexRef.current = updatedPlan.currentPartIndex; // If session just completed, redirect to summary with completed flag if (updatedPlan.completedAt) { // Stop vision recording if it was started - stopRecordingRef.current?.() + stopRecordingRef.current?.(); router.push(`/practice/${studentId}/summary?completed=1`, { scroll: false, - }) - return + }); + return; } // Check for part transition - queue game break to show AFTER transition screen - const partTransitioned = updatedPlan.currentPartIndex > previousPartIndex - const hasMoreParts = updatedPlan.currentPartIndex < updatedPlan.parts.length + const partTransitioned = + updatedPlan.currentPartIndex > previousPartIndex; + const hasMoreParts = + updatedPlan.currentPartIndex < updatedPlan.parts.length; const gameBreakEnabled = - (updatedPlan.gameBreakSettings as GameBreakSettings | null)?.enabled ?? false + (updatedPlan.gameBreakSettings as GameBreakSettings | null) + ?.enabled ?? false; if (partTransitioned && hasMoreParts && gameBreakEnabled) { console.log( - `[PracticeClient] Part completed (${previousPartIndex} → ${updatedPlan.currentPartIndex}), queuing game break for after transition screen` - ) + `[PracticeClient] Part completed (${previousPartIndex} → ${updatedPlan.currentPartIndex}), queuing game break for after transition screen`, + ); // Don't show game break immediately - wait for transition screen to complete // The game break will be triggered when onPartTransitionComplete is called - setPendingGameBreak(true) + setPendingGameBreak(true); } } catch (err) { - const message = err instanceof Error ? err.message : 'Unknown error' - if (message.includes('Not authorized')) { + const message = err instanceof Error ? err.message : "Unknown error"; + if (message.includes("Not authorized")) { showError( - 'Not authorized', - 'Only parents or teachers with the student present in their classroom can record answers.' - ) + "Not authorized", + "Only parents or teachers with the student present in their classroom can record answers.", + ); } else { - showError('Failed to record answer', message) + showError("Failed to record answer", message); } } }, - [studentId, currentPlan.id, recordResult, router, showError] - ) + [studentId, currentPlan.id, recordResult, router, showError], + ); // Handle ending session early const handleEndEarly = useCallback( @@ -286,59 +317,59 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl playerId: studentId, planId: currentPlan.id, reason, - }) + }); // Stop vision recording if it was started - stopRecordingRef.current?.() + stopRecordingRef.current?.(); // Redirect to summary after ending early with completed flag router.push(`/practice/${studentId}/summary?completed=1`, { scroll: false, - }) + }); } catch (err) { // Check if it's an authorization error - const message = err instanceof Error ? err.message : 'Unknown error' - if (message.includes('Not authorized')) { + const message = err instanceof Error ? err.message : "Unknown error"; + if (message.includes("Not authorized")) { showError( - 'Not authorized', - 'Only parents or teachers with the student present in their classroom can end sessions.' - ) + "Not authorized", + "Only parents or teachers with the student present in their classroom can end sessions.", + ); } else { - showError('Failed to end session', message) + showError("Failed to end session", message); } } }, - [studentId, currentPlan.id, endEarly, router, showError] - ) + [studentId, currentPlan.id, endEarly, router, showError], + ); // Handle session completion (called by ActiveSession when all problems done) const handleSessionComplete = useCallback(() => { // Stop vision recording if it was started - stopRecordingRef.current?.() + stopRecordingRef.current?.(); // Redirect to summary with completed flag router.push(`/practice/${studentId}/summary?completed=1`, { scroll: false, - }) - }, [studentId, router]) + }); + }, [studentId, router]); // Handle redoing a previously completed problem // Called when student taps a completed problem dot in the progress indicator const handleRedoProblem = useCallback( (linearIndex: number, originalResult: SlotResult) => { // Find the part and slot for this linear index - let partIndex = 0 - let remaining = linearIndex + let partIndex = 0; + let remaining = linearIndex; for (let i = 0; i < currentPlan.parts.length; i++) { - const partSlotCount = currentPlan.parts[i].slots.length + const partSlotCount = currentPlan.parts[i].slots.length; if (remaining < partSlotCount) { - partIndex = i - break + partIndex = i; + break; } - remaining -= partSlotCount + remaining -= partSlotCount; } - const slotIndex = remaining + const slotIndex = remaining; // Exit browse mode if active if (isBrowseMode) { - setIsBrowseMode(false) + setIsBrowseMode(false); } // Set redo state @@ -350,46 +381,54 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl originalResult, returnToPartIndex: currentPlan.currentPartIndex, returnToSlotIndex: currentPlan.currentSlotIndex, - }) + }); }, - [currentPlan.parts, currentPlan.currentPartIndex, currentPlan.currentSlotIndex, isBrowseMode] - ) + [ + currentPlan.parts, + currentPlan.currentPartIndex, + currentPlan.currentSlotIndex, + isBrowseMode, + ], + ); // Handle canceling a redo - exit without recording const handleCancelRedo = useCallback(() => { - setRedoState(null) - }, []) + setRedoState(null); + }, []); // Handle game break end - show results screen if game finished normally const handleGameBreakEnd = useCallback( - (reason: 'timeout' | 'gameFinished' | 'skipped', results?: GameResultsReport) => { - setShowGameBreak(false) + ( + reason: "timeout" | "gameFinished" | "skipped", + results?: GameResultsReport, + ) => { + setShowGameBreak(false); // If game finished normally with results, save to scoreboard and show interstitial - if (reason === 'gameFinished' && results) { + if (reason === "gameFinished" && results) { // Save result to database for scoreboard saveGameResult.mutate({ playerId: player.id, - sessionType: 'practice-break', + sessionType: "practice-break", sessionId: currentPlan.id, report: results, - }) + }); - setGameBreakResults(results) - setShowGameBreakResults(true) + setGameBreakResults(results); + setShowGameBreakResults(true); } else { // Timeout or skip - no results to show, return to practice immediately - setGameBreakResults(null) + setGameBreakResults(null); } }, - [saveGameResult, player.id, currentPlan.id] - ) + [saveGameResult, player.id, currentPlan.id], + ); // Handle results screen completion - return to practice const handleGameBreakResultsComplete = useCallback(() => { - setShowGameBreakResults(false) - setGameBreakResults(null) - }, []) + setShowGameBreakResults(false); + setGameBreakResults(null); + }, []); // Broadcast session state if student is in a classroom // broadcastState is updated by ActiveSession via the onBroadcastStateChange callback @@ -407,80 +446,121 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl onAbacusControl: setTeacherControl, onTeacherPause: setTeacherPauseRequest, onTeacherResume: () => setTeacherResumeRequest(true), - }) + }); // Track whether we've started vision recording for this session - const hasStartedRecordingRef = useRef(false) + const hasStartedRecordingRef = useRef(false); // Track previous problem number to detect when new problems appear - const previousProblemNumberRef = useRef(null) + const previousProblemNumberRef = useRef(null); + // Track previous isRecording state to detect when recording starts + const wasRecordingRef = useRef(false); // Update the refs so callbacks defined earlier can access these functions - stopRecordingRef.current = stopVisionRecording - sendProblemMarkerRef.current = sendProblemMarker + stopRecordingRef.current = stopVisionRecording; + sendProblemMarkerRef.current = sendProblemMarker; + + // When recording starts, emit problem-shown marker for the current problem + // This handles the case where recording starts mid-session after the first problem-shown was dropped + useEffect(() => { + const wasRecording = wasRecordingRef.current; + wasRecordingRef.current = isRecording; + + // Detect transition from not-recording to recording + if (!wasRecording && isRecording && broadcastState) { + const currentProblemNumber = broadcastState.currentProblemNumber; + console.log( + `[PracticeClient] Recording just started, emitting problem-shown for current problem ${currentProblemNumber}`, + ); + sendProblemMarker( + currentProblemNumber, + broadcastState.currentPartIndex ?? currentPlan.currentPartIndex, + "problem-shown", + ); + } + }, [ + isRecording, + broadcastState, + currentPlan.currentPartIndex, + sendProblemMarker, + ]); // Emit 'problem-shown' marker when a new problem appears (including first problem) useEffect(() => { - if (!broadcastState) return + if (!broadcastState) return; - const currentProblemNumber = broadcastState.currentProblemNumber - const previousProblemNumber = previousProblemNumberRef.current + const currentProblemNumber = broadcastState.currentProblemNumber; + const previousProblemNumber = previousProblemNumberRef.current; // Emit if this is a new problem OR the first problem (previousProblemNumber is null) if (currentProblemNumber !== previousProblemNumber) { console.log( - `[PracticeClient] Problem shown: ${previousProblemNumber ?? 'none'} → ${currentProblemNumber}, emitting problem-shown marker` - ) + `[PracticeClient] Problem shown: ${previousProblemNumber ?? "none"} → ${currentProblemNumber}, emitting problem-shown marker`, + ); sendProblemMarker( currentProblemNumber, broadcastState.currentPartIndex ?? currentPlan.currentPartIndex, - 'problem-shown' - ) + "problem-shown", + ); } // Update ref for next comparison - previousProblemNumberRef.current = currentProblemNumber - }, [broadcastState?.currentProblemNumber, broadcastState?.currentPartIndex, currentPlan.currentPartIndex, sendProblemMarker]) + previousProblemNumberRef.current = currentProblemNumber; + }, [ + broadcastState?.currentProblemNumber, + broadcastState?.currentPartIndex, + currentPlan.currentPartIndex, + sendProblemMarker, + ]); // Handle part transition complete - called when transition screen finishes // This is where we trigger game break (after "put away abacus" message is shown) const handlePartTransitionComplete = useCallback(() => { // First, broadcast to observers - sendPartTransitionComplete() + sendPartTransitionComplete(); // Then, check if we have a pending game break if (pendingGameBreak) { - console.log('[PracticeClient] Transition screen complete, now showing game break') - setGameBreakStartTime(Date.now()) - setShowGameBreak(true) - setPendingGameBreak(false) + console.log( + "[PracticeClient] Transition screen complete, now showing game break", + ); + setGameBreakStartTime(Date.now()); + setShowGameBreak(true); + setPendingGameBreak(false); } - }, [sendPartTransitionComplete, pendingGameBreak]) + }, [sendPartTransitionComplete, pendingGameBreak]); // Wire vision frame callback to broadcast vision frames to observers // Also auto-start recording when vision frames start flowing useEffect(() => { - console.log('[PracticeClient] Setting up vision frame callback') + console.log("[PracticeClient] Setting up vision frame callback"); setVisionFrameCallback((frame) => { console.log( - '[PracticeClient] Vision frame received, hasStartedRecording:', + "[PracticeClient] Vision frame received, hasStartedRecording:", hasStartedRecordingRef.current, - 'isRecording:', - isRecording - ) + "isRecording:", + isRecording, + ); // Start recording on first vision frame (if not already recording) if (!hasStartedRecordingRef.current && !isRecording) { - console.log('[PracticeClient] First vision frame received, calling startVisionRecording()') - hasStartedRecordingRef.current = true - startVisionRecording() + console.log( + "[PracticeClient] First vision frame received, calling startVisionRecording()", + ); + hasStartedRecordingRef.current = true; + startVisionRecording(); } - sendVisionFrame(frame.imageData, frame.detectedValue, frame.confidence) - }) + sendVisionFrame(frame.imageData, frame.detectedValue, frame.confidence); + }); return () => { - setVisionFrameCallback(null) - } - }, [setVisionFrameCallback, sendVisionFrame, isRecording, startVisionRecording]) + setVisionFrameCallback(null); + }; + }, [ + setVisionFrameCallback, + sendVisionFrame, + isRecording, + startVisionRecording, + ]); // Build session HUD data for PracticeSubNav const sessionHud: SessionHudData | undefined = currentPart @@ -514,7 +594,7 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl : undefined, onPause: handlePause, onResume: handleResume, - onEndEarly: () => handleEndEarly('Session ended'), + onEndEarly: () => handleEndEarly("Session ended"), isEndingSession: endEarly.isPending, isBrowseMode, onToggleBrowse: () => setIsBrowseMode((prev) => !prev), @@ -523,7 +603,7 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl redoLinearIndex: redoState?.linearIndex, plan: currentPlan, } - : undefined + : undefined; // Build game break HUD data for PracticeSubNav (when on game break) const gameBreakHud: GameBreakHudData | undefined = showGameBreak @@ -532,7 +612,7 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl maxDurationMs: (gameBreakSettings?.maxDurationMinutes ?? 5) * 60 * 1000, onSkip: handleGameBreakEnd, } - : undefined + : undefined; return ( @@ -548,16 +628,16 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl data-component="practice-page" className={css({ // Fixed positioning to precisely control bounds - position: 'fixed', + position: "fixed", // Top: main nav (80px) + sub-nav height (~52px mobile, ~60px desktop) - top: { base: '132px', md: '140px' }, + top: { base: "132px", md: "140px" }, left: 0, // Right: 0 by default, landscape mobile handled via media query below right: 0, // Bottom: keypad height on mobile portrait (48px), 0 on desktop // Landscape mobile handled via media query below - bottom: { base: '48px', md: 0 }, - overflow: 'hidden', // Prevent scrolling during practice + bottom: { base: "48px", md: 0 }, + overflow: "hidden", // Prevent scrolling during practice })} > {/* Landscape mobile: keypad is on right (100px) instead of bottom */} @@ -593,7 +673,7 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl maxDurationMinutes={gameBreakSettings?.maxDurationMinutes ?? 5} startTime={gameBreakStartTime} onComplete={handleGameBreakEnd} - selectionMode={gameBreakSettings?.selectionMode ?? 'kid-chooses'} + selectionMode={gameBreakSettings?.selectionMode ?? "kid-chooses"} selectedGame={gameBreakSettings?.selectedGame ?? null} gameConfig={gameBreakGameConfig} /> @@ -634,12 +714,12 @@ export function PracticeClient({ studentId, player, initialSession }: PracticeCl // Invalidate the session plan query to refetch updated results queryClient.invalidateQueries({ queryKey: sessionPlanKeys.active(studentId), - }) + }); }} /> )} - ) + ); } diff --git a/apps/web/src/components/vision/DockedVisionFeed.tsx b/apps/web/src/components/vision/DockedVisionFeed.tsx index 17306bbf..e070da64 100644 --- a/apps/web/src/components/vision/DockedVisionFeed.tsx +++ b/apps/web/src/components/vision/DockedVisionFeed.tsx @@ -1,16 +1,20 @@ -'use client' +"use client"; -import { useCallback, useEffect, useRef, useState } from 'react' -import { AbacusReact } from '@soroban/abacus-react' -import { useMyAbacus } from '@/contexts/MyAbacusContext' -import { useFrameStability } from '@/hooks/useFrameStability' -import { useMarkerDetection } from '@/hooks/useMarkerDetection' -import { useRemoteCameraDesktop } from '@/hooks/useRemoteCameraDesktop' -import { useColumnClassifier } from '@/hooks/useColumnClassifier' -import { usePassiveBoundaryCapture } from '@/hooks/usePassiveBoundaryCapture' -import { processVideoFrame, processImageFrame, digitsToNumber } from '@/lib/vision/frameProcessor' -import { VisionCameraFeed } from './VisionCameraFeed' -import { css } from '../../../styled-system/css' +import { useCallback, useEffect, useRef, useState } from "react"; +import { AbacusReact } from "@soroban/abacus-react"; +import { useMyAbacus } from "@/contexts/MyAbacusContext"; +import { useFrameStability } from "@/hooks/useFrameStability"; +import { useMarkerDetection } from "@/hooks/useMarkerDetection"; +import { useRemoteCameraDesktop } from "@/hooks/useRemoteCameraDesktop"; +import { useColumnClassifier } from "@/hooks/useColumnClassifier"; +import { usePassiveBoundaryCapture } from "@/hooks/usePassiveBoundaryCapture"; +import { + processVideoFrame, + processImageFrame, + digitsToNumber, +} from "@/lib/vision/frameProcessor"; +import { VisionCameraFeed } from "./VisionCameraFeed"; +import { css } from "../../../styled-system/css"; /** * Feature flag: Enable automatic abacus value detection from video feed. @@ -25,19 +29,19 @@ import { css } from '../../../styled-system/css' * - Hides the detection overlay * - Does not interfere with student's manual input */ -const ENABLE_AUTO_DETECTION = true +const ENABLE_AUTO_DETECTION = true; interface DockedVisionFeedProps { /** Called when a stable value is detected */ - onValueDetected?: (value: number) => void + onValueDetected?: (value: number) => void; /** Number of columns to detect */ - columnCount?: number + columnCount?: number; /** Called when user wants to undock the abacus */ - onUndock?: () => void + onUndock?: () => void; /** Current practice session ID (for passive training data capture) */ - practiceSessionId?: string + practiceSessionId?: string; /** Current player/student ID (for passive training data capture) */ - playerId?: string + playerId?: string; } /** @@ -65,50 +69,52 @@ export function DockedVisionFeed({ openVisionSetup, emitVisionFrame, visionSourceRef, - } = useMyAbacus() + } = useMyAbacus(); - const videoRef = useRef(null) - const remoteImageRef = useRef(null) - const rectifiedCanvasRef = useRef(null) - const animationFrameRef = useRef(null) - const lastInferenceTimeRef = useRef(0) - const lastBroadcastTimeRef = useRef(0) - const isInferringRef = useRef(false) // Prevent concurrent inference + const videoRef = useRef(null); + const remoteImageRef = useRef(null); + const rectifiedCanvasRef = useRef(null); + const animationFrameRef = useRef(null); + const lastInferenceTimeRef = useRef(0); + const lastBroadcastTimeRef = useRef(0); + const isInferringRef = useRef(false); // Prevent concurrent inference - const [videoStream, setVideoStream] = useState(null) - const [error, setError] = useState(null) - const [isLoading, setIsLoading] = useState(true) - const [connectionTimedOut, setConnectionTimedOut] = useState(false) - const [detectedValue, setDetectedValue] = useState(null) - const [confidence, setConfidence] = useState(0) - const [columnDigits, setColumnDigits] = useState([]) + const [videoStream, setVideoStream] = useState(null); + const [error, setError] = useState(null); + const [isLoading, setIsLoading] = useState(true); + const [connectionTimedOut, setConnectionTimedOut] = useState(false); + const [detectedValue, setDetectedValue] = useState(null); + const [confidence, setConfidence] = useState(0); + const [columnDigits, setColumnDigits] = useState([]); // Use persisted mirror mode from context (survives component remounts) - const showAbacusMirror = visionConfig.showMirrorMode ?? false - const setShowAbacusMirror = setVisionMirrorMode + const showAbacusMirror = visionConfig.showMirrorMode ?? false; + const setShowAbacusMirror = setVisionMirrorMode; // Show a subtle recommendation to try mirror mode when detection is working well - const [showMirrorHint, setShowMirrorHint] = useState(false) + const [showMirrorHint, setShowMirrorHint] = useState(false); // Track if user has dismissed the hint or engaged with mirror mode - const hintDismissedRef = useRef(false) + const hintDismissedRef = useRef(false); // Track video element in state for marker detection hook - const [videoElement, setVideoElement] = useState(null) + const [videoElement, setVideoElement] = useState( + null, + ); // ML column classifier hook - const classifier = useColumnClassifier() + const classifier = useColumnClassifier(); // Preload the ML model when component mounts useEffect(() => { if (ENABLE_AUTO_DETECTION) { - classifier.preload() + classifier.preload(); } - }, [classifier]) + }, [classifier]); // Stability tracking for detected values (hook must be called unconditionally) - const stability = useFrameStability() + const stability = useFrameStability(); // Determine camera source from explicit activeCameraSource field - const isLocalCamera = visionConfig.activeCameraSource === 'local' - const isRemoteCamera = visionConfig.activeCameraSource === 'phone' + const isLocalCamera = visionConfig.activeCameraSource === "local"; + const isRemoteCamera = visionConfig.activeCameraSource === "phone"; // ArUco marker detection using shared hook const { markersFound } = useMarkerDetection({ @@ -116,7 +122,7 @@ export function DockedVisionFeed({ videoElement, columnCount, onCalibrationChange: setVisionCalibration, - }) + }); // Passive boundary capture for training data collection // Captures raw frames with detected corners during practice @@ -124,10 +130,10 @@ export function DockedVisionFeed({ const { maybeCapture: maybeCaptureForTraining } = usePassiveBoundaryCapture({ enabled: visionConfig.enabled && isRemoteCamera, captureIntervalMs: 200, // Match phone rate - 5fps when markers visible - deviceId: 'passive-practice-remote', + deviceId: "passive-practice-remote", sessionId: practiceSessionId, playerId, - }) + }); // Remote camera hook const { @@ -135,19 +141,23 @@ export function DockedVisionFeed({ latestFrame: remoteLatestFrame, subscribe: remoteSubscribe, unsubscribe: remoteUnsubscribe, - } = useRemoteCameraDesktop() + } = useRemoteCameraDesktop(); - const INFERENCE_INTERVAL_MS = 100 // 10fps + const INFERENCE_INTERVAL_MS = 100; // 10fps // Start local camera when component mounts (only for local camera) useEffect(() => { - if (!visionConfig.enabled || !isLocalCamera || !visionConfig.cameraDeviceId) { - return + if ( + !visionConfig.enabled || + !isLocalCamera || + !visionConfig.cameraDeviceId + ) { + return; } - let cancelled = false - setIsLoading(true) - setError(null) + let cancelled = false; + setIsLoading(true); + setError(null); const startCamera = async () => { try { @@ -157,52 +167,52 @@ export function DockedVisionFeed({ width: { ideal: 1280 }, height: { ideal: 720 }, }, - }) + }); if (cancelled) { - stream.getTracks().forEach((track) => track.stop()) - return + stream.getTracks().forEach((track) => track.stop()); + return; } - setVideoStream(stream) - setIsLoading(false) + setVideoStream(stream); + setIsLoading(false); } catch (err) { - if (cancelled) return - console.error('[DockedVisionFeed] Failed to start camera:', err) - setError('Failed to access camera') - setIsLoading(false) + if (cancelled) return; + console.error("[DockedVisionFeed] Failed to start camera:", err); + setError("Failed to access camera"); + setIsLoading(false); } - } + }; - startCamera() + startCamera(); return () => { - cancelled = true - } - }, [visionConfig.enabled, isLocalCamera, visionConfig.cameraDeviceId]) + cancelled = true; + }; + }, [visionConfig.enabled, isLocalCamera, visionConfig.cameraDeviceId]); // Stop camera when stream changes or component unmounts useEffect(() => { return () => { if (videoStream) { - videoStream.getTracks().forEach((track) => track.stop()) + videoStream.getTracks().forEach((track) => track.stop()); } - } - }, [videoStream]) + }; + }, [videoStream]); // Attach stream to video element useEffect(() => { if (videoRef.current && videoStream) { - videoRef.current.srcObject = videoStream + videoRef.current.srcObject = videoStream; } - }, [videoStream]) + }, [videoStream]); // Register vision source for training data capture // Note: We depend on remoteLatestFrame because the element only renders when we have a frame, // so remoteImageRef.current is null until the first frame arrives useEffect(() => { if (isLocalCamera && videoRef.current && videoStream) { - visionSourceRef.current = { type: 'video', element: videoRef.current } + visionSourceRef.current = { type: "video", element: videoRef.current }; } else if ( isRemoteCamera && remoteImageRef.current && @@ -210,15 +220,15 @@ export function DockedVisionFeed({ remoteLatestFrame ) { visionSourceRef.current = { - type: 'image', + type: "image", element: remoteImageRef.current, - } + }; } return () => { // Clear the source ref when this component unmounts - visionSourceRef.current = null - } + visionSourceRef.current = null; + }; }, [ isLocalCamera, isRemoteCamera, @@ -226,144 +236,148 @@ export function DockedVisionFeed({ remoteIsPhoneConnected, remoteLatestFrame, visionSourceRef, - ]) + ]); // Subscribe to remote camera session useEffect(() => { - if (!visionConfig.enabled || !isRemoteCamera || !visionConfig.remoteCameraSessionId) { - return + if ( + !visionConfig.enabled || + !isRemoteCamera || + !visionConfig.remoteCameraSessionId + ) { + return; } - setIsLoading(true) - remoteSubscribe(visionConfig.remoteCameraSessionId) + setIsLoading(true); + remoteSubscribe(visionConfig.remoteCameraSessionId); return () => { - remoteUnsubscribe() - } + remoteUnsubscribe(); + }; }, [ visionConfig.enabled, isRemoteCamera, visionConfig.remoteCameraSessionId, remoteSubscribe, remoteUnsubscribe, - ]) + ]); // Update loading state when remote camera connects useEffect(() => { if (isRemoteCamera && remoteIsPhoneConnected) { - setIsLoading(false) - setConnectionTimedOut(false) + setIsLoading(false); + setConnectionTimedOut(false); } - }, [isRemoteCamera, remoteIsPhoneConnected]) + }, [isRemoteCamera, remoteIsPhoneConnected]); // Connection timeout for remote camera - show remediation options after 15 seconds useEffect(() => { if (!isRemoteCamera || !isLoading || remoteIsPhoneConnected) { - return + return; } const timeoutId = setTimeout(() => { - setConnectionTimedOut(true) - }, 15000) // 15 seconds + setConnectionTimedOut(true); + }, 15000); // 15 seconds - return () => clearTimeout(timeoutId) - }, [isRemoteCamera, isLoading, remoteIsPhoneConnected]) + return () => clearTimeout(timeoutId); + }, [isRemoteCamera, isLoading, remoteIsPhoneConnected]); // Process local camera frames for detection (only when enabled) const processLocalFrame = useCallback(async () => { // Skip detection when feature is disabled or model not ready - if (!ENABLE_AUTO_DETECTION) return - if (!classifier.isModelLoaded) return - if (isInferringRef.current) return // Skip if already inferring + if (!ENABLE_AUTO_DETECTION) return; + if (!classifier.isModelLoaded) return; + if (isInferringRef.current) return; // Skip if already inferring - const now = performance.now() + const now = performance.now(); if (now - lastInferenceTimeRef.current < INFERENCE_INTERVAL_MS) { - return + return; } - lastInferenceTimeRef.current = now + lastInferenceTimeRef.current = now; - const video = videoRef.current - if (!video || video.readyState < 2) return - if (!visionConfig.calibration) return + const video = videoRef.current; + if (!video || video.readyState < 2) return; + if (!visionConfig.calibration) return; - isInferringRef.current = true + isInferringRef.current = true; try { // Process video frame into column strips - const columnImages = processVideoFrame(video, visionConfig.calibration) - if (columnImages.length === 0) return + const columnImages = processVideoFrame(video, visionConfig.calibration); + if (columnImages.length === 0) return; // Use ML-based digit classification - const results = await classifier.classifyColumns(columnImages) - if (!results || results.digits.length === 0) return + const results = await classifier.classifyColumns(columnImages); + if (!results || results.digits.length === 0) return; // Extract digits and minimum confidence - const { digits, confidences } = results - const minConfidence = Math.min(...confidences) + const { digits, confidences } = results; + const minConfidence = Math.min(...confidences); // Store column digits for AbacusMirror display - setColumnDigits(digits) + setColumnDigits(digits); // Convert to number - const value = digitsToNumber(digits) + const value = digitsToNumber(digits); // Push to stability buffer - stability.pushFrame(value, minConfidence) + stability.pushFrame(value, minConfidence); } finally { - isInferringRef.current = false + isInferringRef.current = false; } - }, [visionConfig.calibration, stability, classifier]) + }, [visionConfig.calibration, stability, classifier]); // Process remote camera frames for detection (only when enabled) useEffect(() => { // Skip detection when feature is disabled or model not ready - if (!ENABLE_AUTO_DETECTION) return - if (!classifier.isModelLoaded) return + if (!ENABLE_AUTO_DETECTION) return; + if (!classifier.isModelLoaded) return; if (!isRemoteCamera || !remoteIsPhoneConnected || !remoteLatestFrame) { - return + return; } - const now = performance.now() + const now = performance.now(); if (now - lastInferenceTimeRef.current < INFERENCE_INTERVAL_MS) { - return + return; } - const image = remoteImageRef.current + const image = remoteImageRef.current; if (!image || !image.complete || image.naturalWidth === 0) { - return + return; } // Prevent concurrent inference - if (isInferringRef.current) return - isInferringRef.current = true - lastInferenceTimeRef.current = now + if (isInferringRef.current) return; + isInferringRef.current = true; + lastInferenceTimeRef.current = now; // Phone sends pre-cropped frames in auto mode, so no calibration needed - const columnImages = processImageFrame(image, null, columnCount) + const columnImages = processImageFrame(image, null, columnCount); if (columnImages.length === 0) { - isInferringRef.current = false - return + isInferringRef.current = false; + return; } // Use ML-based digit classification (async) classifier.classifyColumns(columnImages).then((results) => { - isInferringRef.current = false - if (!results || results.digits.length === 0) return + isInferringRef.current = false; + if (!results || results.digits.length === 0) return; // Extract digits and minimum confidence - const { digits, confidences } = results - const minConfidence = Math.min(...confidences) + const { digits, confidences } = results; + const minConfidence = Math.min(...confidences); // Store column digits for AbacusMirror display - setColumnDigits(digits) + setColumnDigits(digits); // Convert to number - const value = digitsToNumber(digits) + const value = digitsToNumber(digits); // Push to stability buffer - stability.pushFrame(value, minConfidence) - }) + stability.pushFrame(value, minConfidence); + }); }, [ isRemoteCamera, remoteIsPhoneConnected, @@ -371,25 +385,25 @@ export function DockedVisionFeed({ columnCount, stability, classifier, - ]) + ]); // Passive boundary capture: save raw frames with detected corners for training // This runs when the phone is in raw mode and sending marker detection data useEffect(() => { if (!isRemoteCamera || !remoteIsPhoneConnected || !remoteLatestFrame) { - return + return; } // Only capture raw mode frames with detected corners - if (remoteLatestFrame.mode !== 'raw') { - return + if (remoteLatestFrame.mode !== "raw") { + return; } - const corners = remoteLatestFrame.detectedCorners - const dimensions = remoteLatestFrame.videoDimensions + const corners = remoteLatestFrame.detectedCorners; + const dimensions = remoteLatestFrame.videoDimensions; if (!corners || !dimensions) { - return + return; } // Attempt to capture this frame for boundary detector training @@ -398,39 +412,49 @@ export function DockedVisionFeed({ remoteLatestFrame.imageData, corners, dimensions.width, - dimensions.height - ) - }, [isRemoteCamera, remoteIsPhoneConnected, remoteLatestFrame, maybeCaptureForTraining]) + dimensions.height, + ); + }, [ + isRemoteCamera, + remoteIsPhoneConnected, + remoteLatestFrame, + maybeCaptureForTraining, + ]); // Local camera detection loop (only when enabled) useEffect(() => { // Skip detection loop when feature is disabled or model not loaded - if (!ENABLE_AUTO_DETECTION) return - if (!classifier.isModelLoaded) return + if (!ENABLE_AUTO_DETECTION) return; + if (!classifier.isModelLoaded) return; - if (!visionConfig.enabled || !isLocalCamera || !videoStream || !visionConfig.calibration) { - return + if ( + !visionConfig.enabled || + !isLocalCamera || + !videoStream || + !visionConfig.calibration + ) { + return; } - let running = true + let running = true; const loop = () => { - if (!running) return + if (!running) return; // processLocalFrame is async but we don't await - it handles concurrency internally - processLocalFrame() - animationFrameRef.current = requestAnimationFrame(loop) - } + processLocalFrame(); + animationFrameRef.current = requestAnimationFrame(loop); + }; - loop() + loop(); return () => { - running = false + running = false; if (animationFrameRef.current) { - cancelAnimationFrame(animationFrameRef.current) - animationFrameRef.current = null + cancelAnimationFrame(animationFrameRef.current); + animationFrameRef.current = null; } - } + }; }, [ visionConfig.enabled, isLocalCamera, @@ -438,18 +462,21 @@ export function DockedVisionFeed({ visionConfig.calibration, processLocalFrame, classifier.isModelLoaded, - ]) + ]); // Handle stable value changes (only when auto-detection is enabled) useEffect(() => { // Skip value updates when feature is disabled - if (!ENABLE_AUTO_DETECTION) return + if (!ENABLE_AUTO_DETECTION) return; - if (stability.stableValue !== null && stability.stableValue !== detectedValue) { - setDetectedValue(stability.stableValue) - setConfidence(stability.currentConfidence) - setDockedValue(stability.stableValue) - onValueDetected?.(stability.stableValue) + if ( + stability.stableValue !== null && + stability.stableValue !== detectedValue + ) { + setDetectedValue(stability.stableValue); + setConfidence(stability.currentConfidence); + setDockedValue(stability.stableValue); + onValueDetected?.(stability.stableValue); } }, [ stability.stableValue, @@ -457,22 +484,22 @@ export function DockedVisionFeed({ detectedValue, setDockedValue, onValueDetected, - ]) + ]); // Show a subtle hint to try mirror mode when detection has been stable // Once shown, keep it visible until dismissed or mirror mode enabled // This prevents flashing when stability temporarily drops useEffect(() => { - if (!ENABLE_AUTO_DETECTION) return - if (!classifier.isModelLoaded) return - if (hintDismissedRef.current) return // Don't show again if dismissed this session - if (showAbacusMirror) return // Already in mirror mode, no need to suggest it - if (showMirrorHint) return // Already showing, don't re-evaluate + if (!ENABLE_AUTO_DETECTION) return; + if (!classifier.isModelLoaded) return; + if (hintDismissedRef.current) return; // Don't show again if dismissed this session + if (showAbacusMirror) return; // Already in mirror mode, no need to suggest it + if (showMirrorHint) return; // Already showing, don't re-evaluate // Only SET to true when stable, never set back to false (sticky once shown) - const isStable = stability.consecutiveFrames >= 3 + const isStable = stability.consecutiveFrames >= 3; if (isStable && columnDigits.length > 0) { - setShowMirrorHint(true) + setShowMirrorHint(true); } }, [ stability.consecutiveFrames, @@ -480,34 +507,36 @@ export function DockedVisionFeed({ showAbacusMirror, columnDigits.length, showMirrorHint, - ]) + ]); // Broadcast vision frames to observers (5fps to save bandwidth) - const BROADCAST_INTERVAL_MS = 200 + const BROADCAST_INTERVAL_MS = 200; useEffect(() => { - if (!visionConfig.enabled) return + if (!visionConfig.enabled) return; - let running = true + let running = true; const broadcastLoop = () => { - if (!running) return + if (!running) return; - const now = performance.now() + const now = performance.now(); if (now - lastBroadcastTimeRef.current >= BROADCAST_INTERVAL_MS) { - lastBroadcastTimeRef.current = now + lastBroadcastTimeRef.current = now; // Capture from rectified canvas (local camera) or remote image - let imageData: string | null = null + let imageData: string | null = null; if (isLocalCamera && rectifiedCanvasRef.current) { - const canvas = rectifiedCanvasRef.current + const canvas = rectifiedCanvasRef.current; if (canvas.width > 0 && canvas.height > 0) { // Convert canvas to JPEG (quality 0.7 for bandwidth) - imageData = canvas.toDataURL('image/jpeg', 0.7).replace('data:image/jpeg;base64,', '') + imageData = canvas + .toDataURL("image/jpeg", 0.7) + .replace("data:image/jpeg;base64,", ""); } } else if (isRemoteCamera && remoteLatestFrame) { // Remote camera already sends base64 JPEG - imageData = remoteLatestFrame.imageData + imageData = remoteLatestFrame.imageData; } if (imageData) { @@ -515,18 +544,18 @@ export function DockedVisionFeed({ imageData, detectedValue, confidence, - }) + }); } } - requestAnimationFrame(broadcastLoop) - } + requestAnimationFrame(broadcastLoop); + }; - broadcastLoop() + broadcastLoop(); return () => { - running = false - } + running = false; + }; }, [ visionConfig.enabled, isLocalCamera, @@ -535,15 +564,15 @@ export function DockedVisionFeed({ detectedValue, confidence, emitVisionFrame, - ]) + ]); const handleDisableVision = (e: React.MouseEvent) => { - e.stopPropagation() - setVisionEnabled(false) + e.stopPropagation(); + setVisionEnabled(false); if (videoStream) { - videoStream.getTracks().forEach((track) => track.stop()) + videoStream.getTracks().forEach((track) => track.stop()); } - } + }; if (error) { return ( @@ -551,20 +580,20 @@ export function DockedVisionFeed({ data-component="docked-vision-feed" data-status="error" className={css({ - display: 'flex', - flexDirection: 'column', - alignItems: 'center', - justifyContent: 'center', + display: "flex", + flexDirection: "column", + alignItems: "center", + justifyContent: "center", gap: 2, p: 4, - bg: 'red.900/30', - borderRadius: 'lg', - color: 'red.400', - textAlign: 'center', + bg: "red.900/30", + borderRadius: "lg", + color: "red.400", + textAlign: "center", })} > - ⚠️ - {error} + ⚠️ + {error} - ) + ); } if (isLoading) { @@ -591,101 +620,174 @@ export function DockedVisionFeed({
- 📷 - + 📷 + {isRemoteCamera ? connectionTimedOut - ? 'Phone not connecting' - : 'Connecting to phone...' - : 'Starting camera...'} + ? "Phone not connecting" + : "Connecting to phone..." + : "Starting camera..."} - {/* Remediation options when remote camera times out */} - {isRemoteCamera && connectionTimedOut && ( + + {/* Remote camera: Always show session info and controls */} + {isRemoteCamera && (
- - + + + +
)} - ) + ); } return (
@@ -693,13 +795,13 @@ export function DockedVisionFeed({
{/* AbacusReact mirror mode - shows detected values */} @@ -707,27 +809,27 @@ export function DockedVisionFeed({
{/* Main AbacusReact display - takes available space */}
{isLocalCamera && ( @@ -760,11 +862,11 @@ export function DockedVisionFeed({ calibration={visionConfig.calibration} showRectifiedView={true} videoRef={(el) => { - videoRef.current = el - setVideoElement(el) + videoRef.current = el; + setVideoElement(el); }} rectifiedCanvasRef={(el) => { - rectifiedCanvasRef.current = el + rectifiedCanvasRef.current = el; }} /> )} @@ -774,9 +876,9 @@ export function DockedVisionFeed({ src={`data:image/jpeg;base64,${remoteLatestFrame.imageData}`} alt="Phone camera view" className={css({ - width: '100%', - height: '100%', - objectFit: 'cover', + width: "100%", + height: "100%", + objectFit: "cover", })} /> )} @@ -791,11 +893,11 @@ export function DockedVisionFeed({ calibration={visionConfig.calibration} showRectifiedView={true} videoRef={(el) => { - videoRef.current = el - setVideoElement(el) // Update state so marker detection hook can react + videoRef.current = el; + setVideoElement(el); // Update state so marker detection hook can react }} rectifiedCanvasRef={(el) => { - rectifiedCanvasRef.current = el + rectifiedCanvasRef.current = el; }} /> )} @@ -807,9 +909,9 @@ export function DockedVisionFeed({ src={`data:image/jpeg;base64,${remoteLatestFrame.imageData}`} alt="Phone camera view" className={css({ - width: '100%', - height: 'auto', - objectFit: 'contain', + width: "100%", + height: "auto", + objectFit: "contain", })} /> )} @@ -818,13 +920,13 @@ export function DockedVisionFeed({ {isRemoteCamera && !remoteLatestFrame && remoteIsPhoneConnected && (
Waiting for frames... @@ -839,49 +941,53 @@ export function DockedVisionFeed({
{/* Left side: Status (detected value or loading message) */}
{classifier.isLoading ? ( - Loading... + + Loading... + ) : !classifier.isModelLoaded ? ( - No model + + No model + ) : ( <> {/* Detected value */} - {detectedValue !== null ? detectedValue : '---'} + {detectedValue !== null ? detectedValue : "---"} {/* Stability dots - show detection stability */} {stability.consecutiveFrames > 0 && (
@@ -889,10 +995,13 @@ export function DockedVisionFeed({
))} @@ -905,10 +1014,10 @@ export function DockedVisionFeed({ {/* Center: Mode toggle with text labels */}
{columnDigits.length > 0 && ( @@ -916,10 +1025,10 @@ export function DockedVisionFeed({
{/* Right side: Undock + Close buttons */} -
+
{onUndock && (
)}
- ) + ); }