feat(vision): add CV-based bead detection and fix remote camera connection

- Add beadDetector.ts with intensity-profile-based bead detection (CV approach)
- Integrate CV pipeline for both local camera and remote phone camera feeds
- Add processImageFrame() to frameProcessor for remote camera image processing
- Fix React 18 Strict Mode duplicate session creation in RemoteCameraQRCode
- Add debug logging to remote camera hooks for connection troubleshooting
- Add VisionStatusIndicator for remote camera feed in AbacusVisionBridge

The duplicate session bug was caused by React 18 Strict Mode double-mounting
components and running effects twice with fresh state, which called
createSession() twice and created two different sessions - phone joined
one, desktop subscribed to the other.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Thomas Hallock 2026-01-01 11:29:02 -06:00
parent 5d0ac65bdd
commit 005140a1e7
12 changed files with 645 additions and 360 deletions

View File

@ -422,7 +422,34 @@
"Bash(apps/web/src/lib/vision/perspectiveTransform.ts )",
"Bash(apps/web/src/socket-server.ts)",
"Bash(apps/web/src/components/vision/CalibrationOverlay.tsx )",
"Bash(apps/web/src/components/practice/ActiveSession.tsx )"
"Bash(apps/web/src/components/practice/ActiveSession.tsx )",
"Bash(open -a Preview:*)",
"Bash(pip3 install:*)",
"Bash(pip3 uninstall:*)",
"Bash(/opt/homebrew/bin/python3:*)",
"Bash(/usr/bin/python3:*)",
"Bash(/opt/homebrew/bin/pip3 install:*)",
"Bash(source:*)",
"Bash(pip install:*)",
"Bash(/opt/homebrew/opt/python@3.11/bin/python3.11:*)",
"Bash(tensorflowjs_converter:*)",
"Bash(public/models/abacus-column-classifier/column-classifier.keras )",
"Bash(public/models/abacus-column-classifier/)",
"Bash(public/models/abacus-column-classifier/column-classifier.h5 )",
"Bash(apps/web/scripts/train-column-classifier/train_model.py )",
"Bash(apps/web/src/app/remote-camera/[sessionId]/page.tsx )",
"Bash(apps/web/src/hooks/useColumnClassifier.ts )",
"Bash(apps/web/src/lib/vision/columnClassifier.ts )",
"Bash(\"apps/web/src/app/remote-camera/[sessionId]/page.tsx\" )",
"Bash(apps/web/drizzle/0054_new_mathemanic.sql )",
"Bash(apps/web/drizzle/meta/0054_snapshot.json )",
"Bash(apps/web/src/components/AbacusDisplayDropdown.tsx )",
"Bash(apps/web/src/db/schema/abacus-settings.ts )",
"Bash(packages/abacus-react/src/AbacusContext.tsx)",
"Bash(apps/web/src/lib/vision/frameProcessor.ts )",
"Bash(apps/web/src/lib/vision/beadDetector.ts )",
"Bash(apps/web/public/models/abacus-column-classifier/model.json )",
"Bash(.claude/settings.local.json)"
],
"deny": [],
"ask": []

View File

@ -116,13 +116,9 @@
"abacus_settings_user_id_users_id_fk": {
"name": "abacus_settings_user_id_users_id_fk",
"tableFrom": "abacus_settings",
"columnsFrom": [
"user_id"
],
"columnsFrom": ["user_id"],
"tableTo": "users",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -240,9 +236,7 @@
"indexes": {
"arcade_rooms_code_unique": {
"name": "arcade_rooms_code_unique",
"columns": [
"code"
],
"columns": ["code"],
"isUnique": true
}
},
@ -339,26 +333,18 @@
"arcade_sessions_room_id_arcade_rooms_id_fk": {
"name": "arcade_sessions_room_id_arcade_rooms_id_fk",
"tableFrom": "arcade_sessions",
"columnsFrom": [
"room_id"
],
"columnsFrom": ["room_id"],
"tableTo": "arcade_rooms",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
},
"arcade_sessions_user_id_users_id_fk": {
"name": "arcade_sessions_user_id_users_id_fk",
"tableFrom": "arcade_sessions",
"columnsFrom": [
"user_id"
],
"columnsFrom": ["user_id"],
"tableTo": "users",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -424,9 +410,7 @@
"indexes": {
"players_user_id_idx": {
"name": "players_user_id_idx",
"columns": [
"user_id"
],
"columns": ["user_id"],
"isUnique": false
}
},
@ -434,13 +418,9 @@
"players_user_id_users_id_fk": {
"name": "players_user_id_users_id_fk",
"tableFrom": "players",
"columnsFrom": [
"user_id"
],
"columnsFrom": ["user_id"],
"tableTo": "users",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -514,9 +494,7 @@
"indexes": {
"idx_room_members_user_id_unique": {
"name": "idx_room_members_user_id_unique",
"columns": [
"user_id"
],
"columns": ["user_id"],
"isUnique": true
}
},
@ -524,13 +502,9 @@
"room_members_room_id_arcade_rooms_id_fk": {
"name": "room_members_room_id_arcade_rooms_id_fk",
"tableFrom": "room_members",
"columnsFrom": [
"room_id"
],
"columnsFrom": ["room_id"],
"tableTo": "arcade_rooms",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -605,13 +579,9 @@
"room_member_history_room_id_arcade_rooms_id_fk": {
"name": "room_member_history_room_id_arcade_rooms_id_fk",
"tableFrom": "room_member_history",
"columnsFrom": [
"room_id"
],
"columnsFrom": ["room_id"],
"tableTo": "arcade_rooms",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -713,10 +683,7 @@
"indexes": {
"idx_room_invitations_user_room": {
"name": "idx_room_invitations_user_room",
"columns": [
"user_id",
"room_id"
],
"columns": ["user_id", "room_id"],
"isUnique": true
}
},
@ -724,13 +691,9 @@
"room_invitations_room_id_arcade_rooms_id_fk": {
"name": "room_invitations_room_id_arcade_rooms_id_fk",
"tableFrom": "room_invitations",
"columnsFrom": [
"room_id"
],
"columnsFrom": ["room_id"],
"tableTo": "arcade_rooms",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -833,13 +796,9 @@
"room_reports_room_id_arcade_rooms_id_fk": {
"name": "room_reports_room_id_arcade_rooms_id_fk",
"tableFrom": "room_reports",
"columnsFrom": [
"room_id"
],
"columnsFrom": ["room_id"],
"tableTo": "arcade_rooms",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -918,10 +877,7 @@
"indexes": {
"idx_room_bans_user_room": {
"name": "idx_room_bans_user_room",
"columns": [
"user_id",
"room_id"
],
"columns": ["user_id", "room_id"],
"isUnique": true
}
},
@ -929,13 +885,9 @@
"room_bans_room_id_arcade_rooms_id_fk": {
"name": "room_bans_room_id_arcade_rooms_id_fk",
"tableFrom": "room_bans",
"columnsFrom": [
"room_id"
],
"columnsFrom": ["room_id"],
"tableTo": "arcade_rooms",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -998,13 +950,9 @@
"user_stats_user_id_users_id_fk": {
"name": "user_stats_user_id_users_id_fk",
"tableFrom": "user_stats",
"columnsFrom": [
"user_id"
],
"columnsFrom": ["user_id"],
"tableTo": "users",
"columnsTo": [
"id"
],
"columnsTo": ["id"],
"onUpdate": "no action",
"onDelete": "cascade"
}
@ -1062,16 +1010,12 @@
"indexes": {
"users_guest_id_unique": {
"name": "users_guest_id_unique",
"columns": [
"guest_id"
],
"columns": ["guest_id"],
"isUnique": true
},
"users_email_unique": {
"name": "users_email_unique",
"columns": [
"email"
],
"columns": ["email"],
"isUnique": true
}
},
@ -1091,4 +1035,4 @@
"internal": {
"indexes": {}
}
}
}

View File

@ -388,4 +388,4 @@
"breakpoints": true
}
]
}
}

View File

@ -20,12 +20,7 @@
"ragged": false,
"name": "input_layer",
"optional": false,
"batchInputShape": [
null,
128,
64,
1
]
"batchInputShape": [null, 128, 64, 1]
}
},
{
@ -35,20 +30,11 @@
"trainable": true,
"dtype": "float32",
"filters": 32,
"kernel_size": [
3,
3
],
"strides": [
1,
1
],
"kernel_size": [3, 3],
"strides": [1, 1],
"padding": "same",
"data_format": "channels_last",
"dilation_rate": [
1,
1
],
"dilation_rate": [1, 1],
"groups": 1,
"activation": "relu",
"use_bias": true,
@ -121,15 +107,9 @@
"name": "max_pooling2d",
"trainable": true,
"dtype": "float32",
"pool_size": [
2,
2
],
"pool_size": [2, 2],
"padding": "valid",
"strides": [
2,
2
],
"strides": [2, 2],
"data_format": "channels_last"
}
},
@ -151,20 +131,11 @@
"trainable": true,
"dtype": "float32",
"filters": 64,
"kernel_size": [
3,
3
],
"strides": [
1,
1
],
"kernel_size": [3, 3],
"strides": [1, 1],
"padding": "same",
"data_format": "channels_last",
"dilation_rate": [
1,
1
],
"dilation_rate": [1, 1],
"groups": 1,
"activation": "relu",
"use_bias": true,
@ -237,15 +208,9 @@
"name": "max_pooling2d_1",
"trainable": true,
"dtype": "float32",
"pool_size": [
2,
2
],
"pool_size": [2, 2],
"padding": "valid",
"strides": [
2,
2
],
"strides": [2, 2],
"data_format": "channels_last"
}
},
@ -267,20 +232,11 @@
"trainable": true,
"dtype": "float32",
"filters": 128,
"kernel_size": [
3,
3
],
"strides": [
1,
1
],
"kernel_size": [3, 3],
"strides": [1, 1],
"padding": "same",
"data_format": "channels_last",
"dilation_rate": [
1,
1
],
"dilation_rate": [1, 1],
"groups": 1,
"activation": "relu",
"use_bias": true,
@ -353,15 +309,9 @@
"name": "max_pooling2d_2",
"trainable": true,
"dtype": "float32",
"pool_size": [
2,
2
],
"pool_size": [2, 2],
"padding": "valid",
"strides": [
2,
2
],
"strides": [2, 2],
"data_format": "channels_last"
}
},
@ -499,20 +449,13 @@
}
}
],
"build_input_shape": [
null,
128,
64,
1
]
"build_input_shape": [null, 128, 64, 1]
}
},
"training_config": {
"loss": "sparse_categorical_crossentropy",
"loss_weights": null,
"metrics": [
"accuracy"
],
"metrics": ["accuracy"],
"weighted_metrics": null,
"run_eagerly": false,
"steps_per_execution": 1,
@ -541,15 +484,11 @@
},
"weightsManifest": [
{
"paths": [
"group1-shard1of1.bin"
],
"paths": ["group1-shard1of1.bin"],
"weights": [
{
"name": "batch_normalization/gamma",
"shape": [
32
],
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -560,9 +499,7 @@
},
{
"name": "batch_normalization/beta",
"shape": [
32
],
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -573,9 +510,7 @@
},
{
"name": "batch_normalization/moving_mean",
"shape": [
32
],
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -586,9 +521,7 @@
},
{
"name": "batch_normalization/moving_variance",
"shape": [
32
],
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -599,9 +532,7 @@
},
{
"name": "batch_normalization_1/gamma",
"shape": [
64
],
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -612,9 +543,7 @@
},
{
"name": "batch_normalization_1/beta",
"shape": [
64
],
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -625,9 +554,7 @@
},
{
"name": "batch_normalization_1/moving_mean",
"shape": [
64
],
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -638,9 +565,7 @@
},
{
"name": "batch_normalization_1/moving_variance",
"shape": [
64
],
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -651,9 +576,7 @@
},
{
"name": "batch_normalization_2/gamma",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -664,9 +587,7 @@
},
{
"name": "batch_normalization_2/beta",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -677,9 +598,7 @@
},
{
"name": "batch_normalization_2/moving_mean",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -690,9 +609,7 @@
},
{
"name": "batch_normalization_2/moving_variance",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -703,9 +620,7 @@
},
{
"name": "batch_normalization_3/gamma",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -716,9 +631,7 @@
},
{
"name": "batch_normalization_3/beta",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -729,9 +642,7 @@
},
{
"name": "batch_normalization_3/moving_mean",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -742,9 +653,7 @@
},
{
"name": "batch_normalization_3/moving_variance",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -755,12 +664,7 @@
},
{
"name": "conv2d/kernel",
"shape": [
3,
3,
1,
32
],
"shape": [3, 3, 1, 32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -771,9 +675,7 @@
},
{
"name": "conv2d/bias",
"shape": [
32
],
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -784,12 +686,7 @@
},
{
"name": "conv2d_1/kernel",
"shape": [
3,
3,
32,
64
],
"shape": [3, 3, 32, 64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -800,9 +697,7 @@
},
{
"name": "conv2d_1/bias",
"shape": [
64
],
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -813,12 +708,7 @@
},
{
"name": "conv2d_2/kernel",
"shape": [
3,
3,
64,
128
],
"shape": [3, 3, 64, 128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -829,9 +719,7 @@
},
{
"name": "conv2d_2/bias",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -842,10 +730,7 @@
},
{
"name": "dense/kernel",
"shape": [
16384,
128
],
"shape": [16384, 128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -856,9 +741,7 @@
},
{
"name": "dense/bias",
"shape": [
128
],
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -869,10 +752,7 @@
},
{
"name": "dense_1/kernel",
"shape": [
128,
10
],
"shape": [128, 10],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -883,9 +763,7 @@
},
{
"name": "dense_1/bias",
"shape": [
10
],
"shape": [10],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
@ -897,4 +775,4 @@
]
}
]
}
}

View File

@ -94,9 +94,13 @@ export default function RemoteCameraPage() {
// Validate session on mount
useEffect(() => {
async function validateSession() {
console.log('[RemoteCameraPage] Validating session:', sessionId)
try {
const response = await fetch(`/api/remote-camera?sessionId=${sessionId}`)
console.log('[RemoteCameraPage] Session validation response:', response.status)
if (response.ok) {
const data = await response.json()
console.log('[RemoteCameraPage] Session valid:', data)
setSessionStatus('connected')
} else if (response.status === 404) {
setSessionStatus('expired')
@ -107,6 +111,7 @@ export default function RemoteCameraPage() {
setSessionError(data.error || 'Failed to validate session')
}
} catch (err) {
console.error('[RemoteCameraPage] Session validation error:', err)
setSessionStatus('error')
setSessionError('Network error')
}

View File

@ -1,9 +1,13 @@
'use client'
import { motion } from 'framer-motion'
import type { ReactNode } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useAbacusVision } from '@/hooks/useAbacusVision'
import { useFrameStability } from '@/hooks/useFrameStability'
import { useRemoteCameraDesktop } from '@/hooks/useRemoteCameraDesktop'
import { analyzeColumns, analysesToDigits } from '@/lib/vision/beadDetector'
import { processImageFrame } from '@/lib/vision/frameProcessor'
import { isOpenCVReady, loadOpenCV, rectifyQuadrilateral } from '@/lib/vision/perspectiveTransform'
import type { CalibrationGrid, QuadCorners } from '@/types/vision'
import { DEFAULT_STABILITY_CONFIG } from '@/types/vision'
@ -96,6 +100,16 @@ export function AbacusVisionBridge({
setRemoteTorch,
} = useRemoteCameraDesktop()
// Stability tracking for remote frames
const remoteStability = useFrameStability()
// Track last stable value for remote camera to avoid duplicate callbacks
const lastRemoteStableValueRef = useRef<number | null>(null)
// Throttle remote frame processing
const lastRemoteInferenceTimeRef = useRef<number>(0)
const REMOTE_INFERENCE_INTERVAL_MS = 100 // 10fps
// Handle switching to phone camera
const handleCameraSourceChange = useCallback(
(source: CameraSource) => {
@ -194,6 +208,88 @@ export function AbacusVisionBridge({
}
}, [vision.cameraError, onError])
// Process remote camera frames through CV pipeline
useEffect(() => {
// Only process when using phone camera and connected
if (cameraSource !== 'phone' || !remoteIsPhoneConnected || !remoteLatestFrame) {
return
}
// Don't process during calibration
if (remoteIsCalibrating) {
return
}
// In manual mode, need calibration to process
if (remoteCalibrationMode === 'manual' && !remoteCalibration) {
return
}
// Throttle processing
const now = performance.now()
if (now - lastRemoteInferenceTimeRef.current < REMOTE_INFERENCE_INTERVAL_MS) {
return
}
lastRemoteInferenceTimeRef.current = now
// Get image element
const image = remoteImageRef.current
if (!image || !image.complete || image.naturalWidth === 0) {
return
}
// Determine calibration to use
// In auto mode (cropped frames), no calibration needed - phone already cropped
// In manual mode, use the desktop calibration
const calibration = remoteCalibrationMode === 'auto' ? null : remoteCalibration
// Process frame through CV pipeline
const columnImages = processImageFrame(image, calibration, columnCount)
if (columnImages.length === 0) return
// Run CV-based bead detection
const analyses = analyzeColumns(columnImages)
const { digits, minConfidence } = analysesToDigits(analyses)
// Convert digits to number
const detectedValue = digits.reduce((acc, d) => acc * 10 + d, 0)
// Log for debugging
console.log(
'[Remote CV] Bead analysis:',
analyses.map((a) => ({
digit: a.digit,
conf: a.confidence.toFixed(2),
heaven: a.heavenActive ? '5' : '0',
earth: a.earthActiveCount,
}))
)
// Push to stability buffer
remoteStability.pushFrame(detectedValue, minConfidence)
}, [
cameraSource,
remoteIsPhoneConnected,
remoteLatestFrame,
remoteIsCalibrating,
remoteCalibrationMode,
remoteCalibration,
columnCount,
remoteStability,
])
// Notify when remote stable value changes
useEffect(() => {
if (
cameraSource === 'phone' &&
remoteStability.stableValue !== null &&
remoteStability.stableValue !== lastRemoteStableValueRef.current
) {
lastRemoteStableValueRef.current = remoteStability.stableValue
onValueDetected(remoteStability.stableValue)
}
}, [cameraSource, remoteStability.stableValue, onValueDetected])
// Load OpenCV when calibrating (local or remote)
useEffect(() => {
const isCalibrating = vision.isCalibrating || remoteIsCalibrating
@ -302,9 +398,12 @@ export function AbacusVisionBridge({
)
return (
<div
<motion.div
ref={containerRef}
data-component="abacus-vision-bridge"
drag
dragMomentum={false}
dragElastic={0}
className={css({
display: 'flex',
flexDirection: 'column',
@ -314,6 +413,8 @@ export function AbacusVisionBridge({
borderRadius: 'xl',
maxWidth: '400px',
width: '100%',
cursor: 'grab',
_active: { cursor: 'grabbing' },
})}
>
{/* Header */}
@ -418,95 +519,95 @@ export function AbacusVisionBridge({
{/* Camera controls (local camera) - only show if there's something to display */}
{cameraSource === 'local' &&
(vision.availableDevices.length > 1 || vision.isTorchAvailable) && (
<div
data-element="camera-controls"
className={css({
display: 'flex',
alignItems: 'center',
gap: 2,
flexWrap: 'wrap',
})}
>
{/* Camera selector (if multiple cameras) */}
{vision.availableDevices.length > 1 && (
<select
data-element="camera-selector"
value={vision.selectedDeviceId ?? ''}
onChange={handleCameraSelect}
className={css({
flex: 1,
p: 2,
bg: 'gray.800',
color: 'white',
border: '1px solid',
borderColor: 'gray.600',
borderRadius: 'md',
fontSize: 'sm',
minWidth: '150px',
})}
>
{vision.availableDevices.map((device) => (
<option key={device.deviceId} value={device.deviceId}>
{device.label || `Camera ${device.deviceId.slice(0, 8)}`}
</option>
))}
</select>
)}
<div
data-element="camera-controls"
className={css({
display: 'flex',
alignItems: 'center',
gap: 2,
flexWrap: 'wrap',
})}
>
{/* Camera selector (if multiple cameras) */}
{vision.availableDevices.length > 1 && (
<select
data-element="camera-selector"
value={vision.selectedDeviceId ?? ''}
onChange={handleCameraSelect}
className={css({
flex: 1,
p: 2,
bg: 'gray.800',
color: 'white',
border: '1px solid',
borderColor: 'gray.600',
borderRadius: 'md',
fontSize: 'sm',
minWidth: '150px',
})}
>
{vision.availableDevices.map((device) => (
<option key={device.deviceId} value={device.deviceId}>
{device.label || `Camera ${device.deviceId.slice(0, 8)}`}
</option>
))}
</select>
)}
{/* Flip camera button - only show if multiple cameras available */}
{vision.availableDevices.length > 1 && (
<button
type="button"
onClick={() => vision.flipCamera()}
data-action="flip-camera"
className={css({
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
width: '40px',
height: '40px',
bg: 'gray.700',
color: 'white',
border: 'none',
borderRadius: 'md',
cursor: 'pointer',
fontSize: 'lg',
_hover: { bg: 'gray.600' },
})}
title={`Switch to ${vision.facingMode === 'environment' ? 'front' : 'back'} camera`}
>
🔄
</button>
)}
{/* Flip camera button - only show if multiple cameras available */}
{vision.availableDevices.length > 1 && (
<button
type="button"
onClick={() => vision.flipCamera()}
data-action="flip-camera"
className={css({
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
width: '40px',
height: '40px',
bg: 'gray.700',
color: 'white',
border: 'none',
borderRadius: 'md',
cursor: 'pointer',
fontSize: 'lg',
_hover: { bg: 'gray.600' },
})}
title={`Switch to ${vision.facingMode === 'environment' ? 'front' : 'back'} camera`}
>
🔄
</button>
)}
{/* Torch toggle button (only if available) */}
{vision.isTorchAvailable && (
<button
type="button"
onClick={() => vision.toggleTorch()}
data-action="toggle-torch"
data-status={vision.isTorchOn ? 'on' : 'off'}
className={css({
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
width: '40px',
height: '40px',
bg: vision.isTorchOn ? 'yellow.600' : 'gray.700',
color: 'white',
border: 'none',
borderRadius: 'md',
cursor: 'pointer',
fontSize: 'lg',
_hover: { bg: vision.isTorchOn ? 'yellow.500' : 'gray.600' },
})}
title={vision.isTorchOn ? 'Turn off flash' : 'Turn on flash'}
>
{vision.isTorchOn ? '🔦' : '💡'}
</button>
)}
</div>
)}
{/* Torch toggle button (only if available) */}
{vision.isTorchAvailable && (
<button
type="button"
onClick={() => vision.toggleTorch()}
data-action="toggle-torch"
data-status={vision.isTorchOn ? 'on' : 'off'}
className={css({
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
width: '40px',
height: '40px',
bg: vision.isTorchOn ? 'yellow.600' : 'gray.700',
color: 'white',
border: 'none',
borderRadius: 'md',
cursor: 'pointer',
fontSize: 'lg',
_hover: { bg: vision.isTorchOn ? 'yellow.500' : 'gray.600' },
})}
title={vision.isTorchOn ? 'Turn off flash' : 'Turn on flash'}
>
{vision.isTorchOn ? '🔦' : '💡'}
</button>
)}
</div>
)}
{/* Camera controls (phone camera) */}
{cameraSource === 'phone' && remoteIsPhoneConnected && remoteIsTorchAvailable && (
@ -829,6 +930,27 @@ export function AbacusVisionBridge({
/>
)}
{/* Detection status indicator */}
{!remoteIsCalibrating && (
<div
className={css({
position: 'absolute',
top: 2,
left: 2,
})}
>
<VisionStatusIndicator
isCalibrated={remoteCalibrationMode === 'auto' || remoteCalibration !== null}
isDetecting={remoteLatestFrame !== null}
confidence={remoteStability.currentConfidence}
handDetected={remoteStability.isHandDetected}
detectedValue={remoteStability.stableValue}
consecutiveFrames={remoteStability.consecutiveFrames}
minFrames={DEFAULT_STABILITY_CONFIG.minConsecutiveFrames}
/>
</div>
)}
{/* Connection status */}
<div
className={css({
@ -1074,7 +1196,7 @@ export function AbacusVisionBridge({
{vision.cameraError}
</div>
)}
</div>
</motion.div>
)
}

View File

@ -1,6 +1,6 @@
'use client'
import { useEffect, useState } from 'react'
import { useEffect, useRef, useState } from 'react'
import { AbacusQRCode } from '@/components/common/AbacusQRCode'
import { useRemoteCameraSession } from '@/hooks/useRemoteCameraSession'
import { css } from '../../../styled-system/css'
@ -32,6 +32,10 @@ export function RemoteCameraQRCode({
const { session, isCreating, error, createSession, setExistingSession, getPhoneUrl } =
useRemoteCameraSession()
// Ref to track if we've already initiated session creation
// This prevents React 18 Strict Mode from creating duplicate sessions
const creationInitiatedRef = useRef(false)
// If we have an existing session ID, use it instead of creating a new one
useEffect(() => {
if (existingSessionId && !session) {
@ -40,8 +44,10 @@ export function RemoteCameraQRCode({
}, [existingSessionId, session, setExistingSession])
// Create session on mount only if no existing session
// Use ref to prevent duplicate creation in React 18 Strict Mode
useEffect(() => {
if (!session && !isCreating && !existingSessionId) {
if (!session && !isCreating && !existingSessionId && !creationInitiatedRef.current) {
creationInitiatedRef.current = true
createSession().then((newSession) => {
if (newSession && onSessionCreated) {
onSessionCreated(newSession.sessionId)

View File

@ -8,6 +8,11 @@ import {
isArucoAvailable,
loadAruco,
} from '@/lib/vision/arucoDetection'
import {
analyzeColumns,
analysesToDigits,
digitsToNumber as cvDigitsToNumber,
} from '@/lib/vision/beadDetector'
import { digitsToNumber, getMinConfidence, processVideoFrame } from '@/lib/vision/frameProcessor'
import type {
CalibrationGrid,
@ -83,9 +88,9 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
// Track previous stable value to avoid duplicate callbacks
const lastStableValueRef = useRef<number | null>(null)
// Throttle inference to 5fps for performance
// Throttle detection (CV is fast, 10fps is plenty)
const lastInferenceTimeRef = useRef<number>(0)
const INFERENCE_INTERVAL_MS = 200 // 5fps
const INFERENCE_INTERVAL_MS = 100 // 10fps
// Ref for calibration functions to avoid infinite loop in auto-calibration effect
const calibrationRef = useRef(calibration)
@ -275,10 +280,10 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
}, [calibration])
/**
* Process a video frame for detection using TensorFlow.js classifier
* Process a video frame for detection using CV-based bead detection
*/
const processFrame = useCallback(async () => {
// Throttle inference for performance (5fps instead of 60fps)
// Throttle inference for performance (10fps)
const now = performance.now()
if (now - lastInferenceTimeRef.current < INFERENCE_INTERVAL_MS) {
return
@ -305,20 +310,31 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
const columnImages = processVideoFrame(video, calibration.calibration)
if (columnImages.length === 0) return
// Run classification
const result = await classifier.classifyColumns(columnImages)
if (!result) return
// Use CV-based bead detection instead of ML
const analyses = analyzeColumns(columnImages)
const { digits, confidences, minConfidence } = analysesToDigits(analyses)
// Log analysis for debugging
console.log(
'[CV] Bead analysis:',
analyses.map((a) => ({
digit: a.digit,
conf: a.confidence.toFixed(2),
heaven: a.heavenActive ? '5' : '0',
earth: a.earthActiveCount,
bar: a.reckoningBarPosition.toFixed(2),
}))
)
// Update column confidences
setColumnConfidences(result.confidences)
setColumnConfidences(confidences)
// Convert digits to number
const detectedValue = digitsToNumber(result.digits)
const minConfidence = getMinConfidence(result.confidences)
const detectedValue = cvDigitsToNumber(digits)
// Push to stability buffer
stability.pushFrame(detectedValue, minConfidence)
}, [camera.videoStream, calibration.isCalibrated, calibration.calibration, stability, classifier])
}, [camera.videoStream, calibration.isCalibrated, calibration.calibration, stability])
/**
* Detection loop

View File

@ -73,16 +73,23 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
// Initialize socket connection
useEffect(() => {
console.log('[RemoteCameraDesktop] Initializing socket connection...')
const socketInstance = io({
path: '/api/socket',
autoConnect: true,
})
socketInstance.on('connect', () => {
console.log('[RemoteCameraDesktop] Socket connected! ID:', socketInstance.id)
setIsConnected(true)
})
socketInstance.on('disconnect', () => {
socketInstance.on('connect_error', (error) => {
console.error('[RemoteCameraDesktop] Socket connect error:', error)
})
socketInstance.on('disconnect', (reason) => {
console.log('[RemoteCameraDesktop] Socket disconnected:', reason)
setIsConnected(false)
})
@ -105,17 +112,20 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
if (!socket) return
const handleConnected = ({ phoneConnected }: { phoneConnected: boolean }) => {
console.log('[RemoteCameraDesktop] Phone connected event:', phoneConnected)
setIsPhoneConnected(phoneConnected)
setError(null)
}
const handleDisconnected = ({ phoneConnected }: { phoneConnected: boolean }) => {
console.log('[RemoteCameraDesktop] Phone disconnected event:', phoneConnected)
setIsPhoneConnected(phoneConnected)
setLatestFrame(null)
setFrameRate(0)
}
const handleStatus = ({ phoneConnected }: { phoneConnected: boolean }) => {
console.log('[RemoteCameraDesktop] Status event:', phoneConnected)
setIsPhoneConnected(phoneConnected)
}
@ -174,13 +184,16 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
const subscribe = useCallback(
(sessionId: string) => {
console.log('[RemoteCameraDesktop] Subscribing to session:', sessionId, 'socket:', !!socket, 'connected:', isConnected)
if (!socket || !isConnected) {
console.error('[RemoteCameraDesktop] Socket not connected!')
setError('Socket not connected')
return
}
currentSessionId.current = sessionId
setError(null)
console.log('[RemoteCameraDesktop] Emitting remote-camera:subscribe')
socket.emit('remote-camera:subscribe', { sessionId })
},
[socket, isConnected]

View File

@ -68,8 +68,13 @@ interface UseRemoteCameraPhoneReturn {
export function useRemoteCameraPhone(
options: UseRemoteCameraPhoneOptions = {}
): UseRemoteCameraPhoneReturn {
const { targetFps = 10, jpegQuality = 0.8, targetWidth = 300, rawWidth = 640, onTorchRequest } =
options
const {
targetFps = 10,
jpegQuality = 0.8,
targetWidth = 300,
rawWidth = 640,
onTorchRequest,
} = options
// Keep onTorchRequest in a ref to avoid stale closures
const onTorchRequestRef = useRef(onTorchRequest)
@ -115,16 +120,23 @@ export function useRemoteCameraPhone(
// Initialize socket connection
useEffect(() => {
console.log('[RemoteCameraPhone] Initializing socket connection...')
const socketInstance = io({
path: '/api/socket',
autoConnect: true,
})
socketInstance.on('connect', () => {
console.log('[RemoteCameraPhone] Socket connected! ID:', socketInstance.id)
setIsSocketConnected(true)
})
socketInstance.on('disconnect', () => {
socketInstance.on('connect_error', (error) => {
console.error('[RemoteCameraPhone] Socket connect error:', error)
})
socketInstance.on('disconnect', (reason) => {
console.log('[RemoteCameraPhone] Socket disconnected:', reason)
setIsSocketConnected(false)
setIsConnected(false)
isConnectedRef.current = false
@ -314,7 +326,9 @@ export function useRemoteCameraPhone(
const connect = useCallback(
(sessionId: string) => {
const socket = socketRef.current
console.log('[RemoteCameraPhone] Connecting to session:', sessionId, 'socket:', !!socket, 'connected:', isSocketConnected)
if (!socket || !isSocketConnected) {
console.error('[RemoteCameraPhone] Socket not connected!')
setError('Socket not connected')
return
}
@ -322,6 +336,7 @@ export function useRemoteCameraPhone(
sessionIdRef.current = sessionId
setError(null)
console.log('[RemoteCameraPhone] Emitting remote-camera:join')
socket.emit('remote-camera:join', { sessionId })
setIsConnected(true)
isConnectedRef.current = true

View File

@ -0,0 +1,203 @@
/**
* Traditional CV-based bead detection for abacus columns
*
* Uses edge detection and contour analysis instead of ML.
* Works by detecting the reckoning bar and analyzing bead positions
* relative to it.
*/
export interface BeadAnalysis {
/** Detected digit value (0-9) */
digit: number
/** Confidence based on detection clarity */
confidence: number
/** Position of reckoning bar (0-1, relative to column height) */
reckoningBarPosition: number
/** Number of beads detected above bar */
heavenBeadsDetected: number
/** Whether heaven bead is active (touching bar) */
heavenActive: boolean
/** Number of beads detected below bar */
earthBeadsDetected: number
/** Number of active earth beads (touching bar) */
earthActiveCount: number
}
/**
* Analyze a single column image to detect bead positions
*
* @param imageData - Grayscale image data of a single column
* @returns Analysis result with detected digit
*/
export function analyzeColumn(imageData: ImageData): BeadAnalysis {
const { width, height, data } = imageData
// Step 1: Create vertical intensity profile (average each row)
const rowIntensities = new Float32Array(height)
for (let y = 0; y < height; y++) {
let sum = 0
for (let x = 0; x < width; x++) {
const idx = (y * width + x) * 4
sum += data[idx] // Use red channel (grayscale)
}
rowIntensities[y] = sum / width
}
// Step 2: Find reckoning bar (darkest horizontal region)
// The bar is typically a dark horizontal line in the middle third
const searchStart = Math.floor(height * 0.25)
const searchEnd = Math.floor(height * 0.75)
let darkestRow = searchStart
let darkestValue = 255
// Use a sliding window to find the darkest band
const windowSize = Math.max(3, Math.floor(height * 0.03))
for (let y = searchStart; y < searchEnd - windowSize; y++) {
let windowSum = 0
for (let i = 0; i < windowSize; i++) {
windowSum += rowIntensities[y + i]
}
const windowAvg = windowSum / windowSize
if (windowAvg < darkestValue) {
darkestValue = windowAvg
darkestRow = y + Math.floor(windowSize / 2)
}
}
const reckoningBarPosition = darkestRow / height
// Step 3: Analyze heaven section (above bar)
// Find peaks in intensity (beads are darker than background)
const heavenStart = 0
const heavenEnd = darkestRow - windowSize
const heavenPeaks = findPeaks(rowIntensities, heavenStart, heavenEnd, height)
// Heaven bead is active if it's close to the reckoning bar
const heavenActiveThreshold = height * 0.15 // Within 15% of bar
const heavenActive =
heavenPeaks.length > 0 &&
darkestRow - heavenPeaks[heavenPeaks.length - 1] < heavenActiveThreshold
// Step 4: Analyze earth section (below bar)
const earthStart = darkestRow + windowSize
const earthEnd = height
const earthPeaks = findPeaks(rowIntensities, earthStart, earthEnd, height)
// Earth beads are active if they're close to the reckoning bar
const earthActiveCount = earthPeaks.filter(
(peak) => peak - darkestRow < heavenActiveThreshold
).length
// Step 5: Calculate digit value
// Heaven bead = 5, each earth bead = 1
const heavenValue = heavenActive ? 5 : 0
const earthValue = Math.min(earthActiveCount, 4) // Max 4 earth beads
const digit = heavenValue + earthValue
// Step 6: Calculate confidence based on detection quality
// Higher confidence if we found expected number of beads and clear bar
const expectedHeavenBeads = 1
const expectedEarthBeads = 4
const heavenConfidence = heavenPeaks.length === expectedHeavenBeads ? 1.0 : 0.5
const earthConfidence =
earthPeaks.length >= expectedEarthBeads ? 1.0 : earthPeaks.length / expectedEarthBeads
const barContrast = (255 - darkestValue) / 255 // How dark is the bar?
const confidence = (heavenConfidence + earthConfidence + barContrast) / 3
return {
digit,
confidence,
reckoningBarPosition,
heavenBeadsDetected: heavenPeaks.length,
heavenActive,
earthBeadsDetected: earthPeaks.length,
earthActiveCount,
}
}
/**
* Find peaks (local minima = dark beads) in intensity profile
*/
function findPeaks(
intensities: Float32Array,
start: number,
end: number,
totalHeight: number
): number[] {
const peaks: number[] = []
const minPeakDistance = Math.floor(totalHeight * 0.05) // Min 5% height between peaks
const threshold = calculateAdaptiveThreshold(intensities, start, end)
let lastPeak = -minPeakDistance * 2
for (let y = start + 2; y < end - 2; y++) {
const current = intensities[y]
// Local minimum (darker than neighbors)
if (
current < intensities[y - 1] &&
current < intensities[y + 1] &&
current < intensities[y - 2] &&
current < intensities[y + 2] &&
current < threshold &&
y - lastPeak >= minPeakDistance
) {
peaks.push(y)
lastPeak = y
}
}
return peaks
}
/**
* Calculate adaptive threshold for peak detection
*/
function calculateAdaptiveThreshold(intensities: Float32Array, start: number, end: number): number {
let sum = 0
let min = 255
let max = 0
for (let y = start; y < end; y++) {
sum += intensities[y]
min = Math.min(min, intensities[y])
max = Math.max(max, intensities[y])
}
const avg = sum / (end - start)
// Threshold halfway between average and minimum
return (avg + min) / 2
}
/**
* Analyze multiple columns
*/
export function analyzeColumns(columnImages: ImageData[]): BeadAnalysis[] {
return columnImages.map(analyzeColumn)
}
/**
* Convert bead analyses to digits
*/
export function analysesToDigits(analyses: BeadAnalysis[]): {
digits: number[]
confidences: number[]
minConfidence: number
} {
const digits = analyses.map((a) => a.digit)
const confidences = analyses.map((a) => a.confidence)
const minConfidence = confidences.length > 0 ? Math.min(...confidences) : 0
return { digits, confidences, minConfidence }
}
/**
* Convert digits to number
*/
export function digitsToNumber(digits: number[]): number {
if (digits.length === 0) return 0
return digits.reduce((acc, d) => acc * 10 + d, 0)
}

View File

@ -214,3 +214,59 @@ export function getMinConfidence(confidences: number[]): number {
if (confidences.length === 0) return 0
return Math.min(...confidences)
}
/**
* Process an image frame for classification (for remote camera frames)
*
* @param image - Image element with the frame
* @param calibration - Calibration grid (if null, assumes entire image is the abacus)
* @param columnCount - Number of columns to slice into
* @param columnWidth - Target column width for model input
* @param columnHeight - Target column height for model input
* @returns Array of preprocessed column ImageData ready for classification
*/
export function processImageFrame(
image: HTMLImageElement,
calibration: CalibrationGrid | null,
columnCount: number,
columnWidth: number = 64,
columnHeight: number = 128
): ImageData[] {
// Create canvas for image frame
const canvas = document.createElement('canvas')
canvas.width = image.naturalWidth || image.width
canvas.height = image.naturalHeight || image.height
const ctx = canvas.getContext('2d')!
// Draw image frame
ctx.drawImage(image, 0, 0)
let roiData: ImageData
if (calibration) {
// Extract ROI using calibration
roiData = extractROI(ctx, calibration.roi)
} else {
// No calibration - use entire image as ROI (already cropped by phone)
roiData = ctx.getImageData(0, 0, canvas.width, canvas.height)
}
// Create a synthetic calibration for slicing if none provided
const sliceCalibration: CalibrationGrid = calibration ?? {
roi: { x: 0, y: 0, width: canvas.width, height: canvas.height },
columnCount,
columnDividers: Array.from({ length: columnCount - 1 }, (_, i) => (i + 1) / columnCount),
rotation: 0,
}
// Slice into columns
const columns = sliceIntoColumns(roiData, sliceCalibration)
// Preprocess each column
return columns.map((col) => {
// Convert to grayscale
const gray = toGrayscale(col)
// Resize to model input size
return resizeImageData(gray, columnWidth, columnHeight)
})
}