feat(vision): add CV-based bead detection and fix remote camera connection

- Add beadDetector.ts with intensity-profile-based bead detection (CV approach)
- Integrate CV pipeline for both local camera and remote phone camera feeds
- Add processImageFrame() to frameProcessor for remote camera image processing
- Fix React 18 Strict Mode duplicate session creation in RemoteCameraQRCode
- Add debug logging to remote camera hooks for connection troubleshooting
- Add VisionStatusIndicator for remote camera feed in AbacusVisionBridge

The duplicate session bug was caused by React 18 Strict Mode double-mounting
components and running effects twice with fresh state, which called
createSession() twice and created two different sessions - phone joined
one, desktop subscribed to the other.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Thomas Hallock
2026-01-01 11:29:02 -06:00
parent 5d0ac65bdd
commit 005140a1e7
12 changed files with 645 additions and 360 deletions

View File

@@ -422,7 +422,34 @@
"Bash(apps/web/src/lib/vision/perspectiveTransform.ts )", "Bash(apps/web/src/lib/vision/perspectiveTransform.ts )",
"Bash(apps/web/src/socket-server.ts)", "Bash(apps/web/src/socket-server.ts)",
"Bash(apps/web/src/components/vision/CalibrationOverlay.tsx )", "Bash(apps/web/src/components/vision/CalibrationOverlay.tsx )",
"Bash(apps/web/src/components/practice/ActiveSession.tsx )" "Bash(apps/web/src/components/practice/ActiveSession.tsx )",
"Bash(open -a Preview:*)",
"Bash(pip3 install:*)",
"Bash(pip3 uninstall:*)",
"Bash(/opt/homebrew/bin/python3:*)",
"Bash(/usr/bin/python3:*)",
"Bash(/opt/homebrew/bin/pip3 install:*)",
"Bash(source:*)",
"Bash(pip install:*)",
"Bash(/opt/homebrew/opt/python@3.11/bin/python3.11:*)",
"Bash(tensorflowjs_converter:*)",
"Bash(public/models/abacus-column-classifier/column-classifier.keras )",
"Bash(public/models/abacus-column-classifier/)",
"Bash(public/models/abacus-column-classifier/column-classifier.h5 )",
"Bash(apps/web/scripts/train-column-classifier/train_model.py )",
"Bash(apps/web/src/app/remote-camera/[sessionId]/page.tsx )",
"Bash(apps/web/src/hooks/useColumnClassifier.ts )",
"Bash(apps/web/src/lib/vision/columnClassifier.ts )",
"Bash(\"apps/web/src/app/remote-camera/[sessionId]/page.tsx\" )",
"Bash(apps/web/drizzle/0054_new_mathemanic.sql )",
"Bash(apps/web/drizzle/meta/0054_snapshot.json )",
"Bash(apps/web/src/components/AbacusDisplayDropdown.tsx )",
"Bash(apps/web/src/db/schema/abacus-settings.ts )",
"Bash(packages/abacus-react/src/AbacusContext.tsx)",
"Bash(apps/web/src/lib/vision/frameProcessor.ts )",
"Bash(apps/web/src/lib/vision/beadDetector.ts )",
"Bash(apps/web/public/models/abacus-column-classifier/model.json )",
"Bash(.claude/settings.local.json)"
], ],
"deny": [], "deny": [],
"ask": [] "ask": []

View File

@@ -116,13 +116,9 @@
"abacus_settings_user_id_users_id_fk": { "abacus_settings_user_id_users_id_fk": {
"name": "abacus_settings_user_id_users_id_fk", "name": "abacus_settings_user_id_users_id_fk",
"tableFrom": "abacus_settings", "tableFrom": "abacus_settings",
"columnsFrom": [ "columnsFrom": ["user_id"],
"user_id"
],
"tableTo": "users", "tableTo": "users",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -240,9 +236,7 @@
"indexes": { "indexes": {
"arcade_rooms_code_unique": { "arcade_rooms_code_unique": {
"name": "arcade_rooms_code_unique", "name": "arcade_rooms_code_unique",
"columns": [ "columns": ["code"],
"code"
],
"isUnique": true "isUnique": true
} }
}, },
@@ -339,26 +333,18 @@
"arcade_sessions_room_id_arcade_rooms_id_fk": { "arcade_sessions_room_id_arcade_rooms_id_fk": {
"name": "arcade_sessions_room_id_arcade_rooms_id_fk", "name": "arcade_sessions_room_id_arcade_rooms_id_fk",
"tableFrom": "arcade_sessions", "tableFrom": "arcade_sessions",
"columnsFrom": [ "columnsFrom": ["room_id"],
"room_id"
],
"tableTo": "arcade_rooms", "tableTo": "arcade_rooms",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
}, },
"arcade_sessions_user_id_users_id_fk": { "arcade_sessions_user_id_users_id_fk": {
"name": "arcade_sessions_user_id_users_id_fk", "name": "arcade_sessions_user_id_users_id_fk",
"tableFrom": "arcade_sessions", "tableFrom": "arcade_sessions",
"columnsFrom": [ "columnsFrom": ["user_id"],
"user_id"
],
"tableTo": "users", "tableTo": "users",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -424,9 +410,7 @@
"indexes": { "indexes": {
"players_user_id_idx": { "players_user_id_idx": {
"name": "players_user_id_idx", "name": "players_user_id_idx",
"columns": [ "columns": ["user_id"],
"user_id"
],
"isUnique": false "isUnique": false
} }
}, },
@@ -434,13 +418,9 @@
"players_user_id_users_id_fk": { "players_user_id_users_id_fk": {
"name": "players_user_id_users_id_fk", "name": "players_user_id_users_id_fk",
"tableFrom": "players", "tableFrom": "players",
"columnsFrom": [ "columnsFrom": ["user_id"],
"user_id"
],
"tableTo": "users", "tableTo": "users",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -514,9 +494,7 @@
"indexes": { "indexes": {
"idx_room_members_user_id_unique": { "idx_room_members_user_id_unique": {
"name": "idx_room_members_user_id_unique", "name": "idx_room_members_user_id_unique",
"columns": [ "columns": ["user_id"],
"user_id"
],
"isUnique": true "isUnique": true
} }
}, },
@@ -524,13 +502,9 @@
"room_members_room_id_arcade_rooms_id_fk": { "room_members_room_id_arcade_rooms_id_fk": {
"name": "room_members_room_id_arcade_rooms_id_fk", "name": "room_members_room_id_arcade_rooms_id_fk",
"tableFrom": "room_members", "tableFrom": "room_members",
"columnsFrom": [ "columnsFrom": ["room_id"],
"room_id"
],
"tableTo": "arcade_rooms", "tableTo": "arcade_rooms",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -605,13 +579,9 @@
"room_member_history_room_id_arcade_rooms_id_fk": { "room_member_history_room_id_arcade_rooms_id_fk": {
"name": "room_member_history_room_id_arcade_rooms_id_fk", "name": "room_member_history_room_id_arcade_rooms_id_fk",
"tableFrom": "room_member_history", "tableFrom": "room_member_history",
"columnsFrom": [ "columnsFrom": ["room_id"],
"room_id"
],
"tableTo": "arcade_rooms", "tableTo": "arcade_rooms",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -713,10 +683,7 @@
"indexes": { "indexes": {
"idx_room_invitations_user_room": { "idx_room_invitations_user_room": {
"name": "idx_room_invitations_user_room", "name": "idx_room_invitations_user_room",
"columns": [ "columns": ["user_id", "room_id"],
"user_id",
"room_id"
],
"isUnique": true "isUnique": true
} }
}, },
@@ -724,13 +691,9 @@
"room_invitations_room_id_arcade_rooms_id_fk": { "room_invitations_room_id_arcade_rooms_id_fk": {
"name": "room_invitations_room_id_arcade_rooms_id_fk", "name": "room_invitations_room_id_arcade_rooms_id_fk",
"tableFrom": "room_invitations", "tableFrom": "room_invitations",
"columnsFrom": [ "columnsFrom": ["room_id"],
"room_id"
],
"tableTo": "arcade_rooms", "tableTo": "arcade_rooms",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -833,13 +796,9 @@
"room_reports_room_id_arcade_rooms_id_fk": { "room_reports_room_id_arcade_rooms_id_fk": {
"name": "room_reports_room_id_arcade_rooms_id_fk", "name": "room_reports_room_id_arcade_rooms_id_fk",
"tableFrom": "room_reports", "tableFrom": "room_reports",
"columnsFrom": [ "columnsFrom": ["room_id"],
"room_id"
],
"tableTo": "arcade_rooms", "tableTo": "arcade_rooms",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -918,10 +877,7 @@
"indexes": { "indexes": {
"idx_room_bans_user_room": { "idx_room_bans_user_room": {
"name": "idx_room_bans_user_room", "name": "idx_room_bans_user_room",
"columns": [ "columns": ["user_id", "room_id"],
"user_id",
"room_id"
],
"isUnique": true "isUnique": true
} }
}, },
@@ -929,13 +885,9 @@
"room_bans_room_id_arcade_rooms_id_fk": { "room_bans_room_id_arcade_rooms_id_fk": {
"name": "room_bans_room_id_arcade_rooms_id_fk", "name": "room_bans_room_id_arcade_rooms_id_fk",
"tableFrom": "room_bans", "tableFrom": "room_bans",
"columnsFrom": [ "columnsFrom": ["room_id"],
"room_id"
],
"tableTo": "arcade_rooms", "tableTo": "arcade_rooms",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -998,13 +950,9 @@
"user_stats_user_id_users_id_fk": { "user_stats_user_id_users_id_fk": {
"name": "user_stats_user_id_users_id_fk", "name": "user_stats_user_id_users_id_fk",
"tableFrom": "user_stats", "tableFrom": "user_stats",
"columnsFrom": [ "columnsFrom": ["user_id"],
"user_id"
],
"tableTo": "users", "tableTo": "users",
"columnsTo": [ "columnsTo": ["id"],
"id"
],
"onUpdate": "no action", "onUpdate": "no action",
"onDelete": "cascade" "onDelete": "cascade"
} }
@@ -1062,16 +1010,12 @@
"indexes": { "indexes": {
"users_guest_id_unique": { "users_guest_id_unique": {
"name": "users_guest_id_unique", "name": "users_guest_id_unique",
"columns": [ "columns": ["guest_id"],
"guest_id"
],
"isUnique": true "isUnique": true
}, },
"users_email_unique": { "users_email_unique": {
"name": "users_email_unique", "name": "users_email_unique",
"columns": [ "columns": ["email"],
"email"
],
"isUnique": true "isUnique": true
} }
}, },
@@ -1091,4 +1035,4 @@
"internal": { "internal": {
"indexes": {} "indexes": {}
} }
} }

View File

@@ -388,4 +388,4 @@
"breakpoints": true "breakpoints": true
} }
] ]
} }

View File

@@ -20,12 +20,7 @@
"ragged": false, "ragged": false,
"name": "input_layer", "name": "input_layer",
"optional": false, "optional": false,
"batchInputShape": [ "batchInputShape": [null, 128, 64, 1]
null,
128,
64,
1
]
} }
}, },
{ {
@@ -35,20 +30,11 @@
"trainable": true, "trainable": true,
"dtype": "float32", "dtype": "float32",
"filters": 32, "filters": 32,
"kernel_size": [ "kernel_size": [3, 3],
3, "strides": [1, 1],
3
],
"strides": [
1,
1
],
"padding": "same", "padding": "same",
"data_format": "channels_last", "data_format": "channels_last",
"dilation_rate": [ "dilation_rate": [1, 1],
1,
1
],
"groups": 1, "groups": 1,
"activation": "relu", "activation": "relu",
"use_bias": true, "use_bias": true,
@@ -121,15 +107,9 @@
"name": "max_pooling2d", "name": "max_pooling2d",
"trainable": true, "trainable": true,
"dtype": "float32", "dtype": "float32",
"pool_size": [ "pool_size": [2, 2],
2,
2
],
"padding": "valid", "padding": "valid",
"strides": [ "strides": [2, 2],
2,
2
],
"data_format": "channels_last" "data_format": "channels_last"
} }
}, },
@@ -151,20 +131,11 @@
"trainable": true, "trainable": true,
"dtype": "float32", "dtype": "float32",
"filters": 64, "filters": 64,
"kernel_size": [ "kernel_size": [3, 3],
3, "strides": [1, 1],
3
],
"strides": [
1,
1
],
"padding": "same", "padding": "same",
"data_format": "channels_last", "data_format": "channels_last",
"dilation_rate": [ "dilation_rate": [1, 1],
1,
1
],
"groups": 1, "groups": 1,
"activation": "relu", "activation": "relu",
"use_bias": true, "use_bias": true,
@@ -237,15 +208,9 @@
"name": "max_pooling2d_1", "name": "max_pooling2d_1",
"trainable": true, "trainable": true,
"dtype": "float32", "dtype": "float32",
"pool_size": [ "pool_size": [2, 2],
2,
2
],
"padding": "valid", "padding": "valid",
"strides": [ "strides": [2, 2],
2,
2
],
"data_format": "channels_last" "data_format": "channels_last"
} }
}, },
@@ -267,20 +232,11 @@
"trainable": true, "trainable": true,
"dtype": "float32", "dtype": "float32",
"filters": 128, "filters": 128,
"kernel_size": [ "kernel_size": [3, 3],
3, "strides": [1, 1],
3
],
"strides": [
1,
1
],
"padding": "same", "padding": "same",
"data_format": "channels_last", "data_format": "channels_last",
"dilation_rate": [ "dilation_rate": [1, 1],
1,
1
],
"groups": 1, "groups": 1,
"activation": "relu", "activation": "relu",
"use_bias": true, "use_bias": true,
@@ -353,15 +309,9 @@
"name": "max_pooling2d_2", "name": "max_pooling2d_2",
"trainable": true, "trainable": true,
"dtype": "float32", "dtype": "float32",
"pool_size": [ "pool_size": [2, 2],
2,
2
],
"padding": "valid", "padding": "valid",
"strides": [ "strides": [2, 2],
2,
2
],
"data_format": "channels_last" "data_format": "channels_last"
} }
}, },
@@ -499,20 +449,13 @@
} }
} }
], ],
"build_input_shape": [ "build_input_shape": [null, 128, 64, 1]
null,
128,
64,
1
]
} }
}, },
"training_config": { "training_config": {
"loss": "sparse_categorical_crossentropy", "loss": "sparse_categorical_crossentropy",
"loss_weights": null, "loss_weights": null,
"metrics": [ "metrics": ["accuracy"],
"accuracy"
],
"weighted_metrics": null, "weighted_metrics": null,
"run_eagerly": false, "run_eagerly": false,
"steps_per_execution": 1, "steps_per_execution": 1,
@@ -541,15 +484,11 @@
}, },
"weightsManifest": [ "weightsManifest": [
{ {
"paths": [ "paths": ["group1-shard1of1.bin"],
"group1-shard1of1.bin"
],
"weights": [ "weights": [
{ {
"name": "batch_normalization/gamma", "name": "batch_normalization/gamma",
"shape": [ "shape": [32],
32
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -560,9 +499,7 @@
}, },
{ {
"name": "batch_normalization/beta", "name": "batch_normalization/beta",
"shape": [ "shape": [32],
32
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -573,9 +510,7 @@
}, },
{ {
"name": "batch_normalization/moving_mean", "name": "batch_normalization/moving_mean",
"shape": [ "shape": [32],
32
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -586,9 +521,7 @@
}, },
{ {
"name": "batch_normalization/moving_variance", "name": "batch_normalization/moving_variance",
"shape": [ "shape": [32],
32
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -599,9 +532,7 @@
}, },
{ {
"name": "batch_normalization_1/gamma", "name": "batch_normalization_1/gamma",
"shape": [ "shape": [64],
64
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -612,9 +543,7 @@
}, },
{ {
"name": "batch_normalization_1/beta", "name": "batch_normalization_1/beta",
"shape": [ "shape": [64],
64
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -625,9 +554,7 @@
}, },
{ {
"name": "batch_normalization_1/moving_mean", "name": "batch_normalization_1/moving_mean",
"shape": [ "shape": [64],
64
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -638,9 +565,7 @@
}, },
{ {
"name": "batch_normalization_1/moving_variance", "name": "batch_normalization_1/moving_variance",
"shape": [ "shape": [64],
64
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -651,9 +576,7 @@
}, },
{ {
"name": "batch_normalization_2/gamma", "name": "batch_normalization_2/gamma",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -664,9 +587,7 @@
}, },
{ {
"name": "batch_normalization_2/beta", "name": "batch_normalization_2/beta",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -677,9 +598,7 @@
}, },
{ {
"name": "batch_normalization_2/moving_mean", "name": "batch_normalization_2/moving_mean",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -690,9 +609,7 @@
}, },
{ {
"name": "batch_normalization_2/moving_variance", "name": "batch_normalization_2/moving_variance",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -703,9 +620,7 @@
}, },
{ {
"name": "batch_normalization_3/gamma", "name": "batch_normalization_3/gamma",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -716,9 +631,7 @@
}, },
{ {
"name": "batch_normalization_3/beta", "name": "batch_normalization_3/beta",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -729,9 +642,7 @@
}, },
{ {
"name": "batch_normalization_3/moving_mean", "name": "batch_normalization_3/moving_mean",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -742,9 +653,7 @@
}, },
{ {
"name": "batch_normalization_3/moving_variance", "name": "batch_normalization_3/moving_variance",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -755,12 +664,7 @@
}, },
{ {
"name": "conv2d/kernel", "name": "conv2d/kernel",
"shape": [ "shape": [3, 3, 1, 32],
3,
3,
1,
32
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -771,9 +675,7 @@
}, },
{ {
"name": "conv2d/bias", "name": "conv2d/bias",
"shape": [ "shape": [32],
32
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -784,12 +686,7 @@
}, },
{ {
"name": "conv2d_1/kernel", "name": "conv2d_1/kernel",
"shape": [ "shape": [3, 3, 32, 64],
3,
3,
32,
64
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -800,9 +697,7 @@
}, },
{ {
"name": "conv2d_1/bias", "name": "conv2d_1/bias",
"shape": [ "shape": [64],
64
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -813,12 +708,7 @@
}, },
{ {
"name": "conv2d_2/kernel", "name": "conv2d_2/kernel",
"shape": [ "shape": [3, 3, 64, 128],
3,
3,
64,
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -829,9 +719,7 @@
}, },
{ {
"name": "conv2d_2/bias", "name": "conv2d_2/bias",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -842,10 +730,7 @@
}, },
{ {
"name": "dense/kernel", "name": "dense/kernel",
"shape": [ "shape": [16384, 128],
16384,
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -856,9 +741,7 @@
}, },
{ {
"name": "dense/bias", "name": "dense/bias",
"shape": [ "shape": [128],
128
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -869,10 +752,7 @@
}, },
{ {
"name": "dense_1/kernel", "name": "dense_1/kernel",
"shape": [ "shape": [128, 10],
128,
10
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -883,9 +763,7 @@
}, },
{ {
"name": "dense_1/bias", "name": "dense_1/bias",
"shape": [ "shape": [10],
10
],
"dtype": "float32", "dtype": "float32",
"quantization": { "quantization": {
"dtype": "uint8", "dtype": "uint8",
@@ -897,4 +775,4 @@
] ]
} }
] ]
} }

View File

@@ -94,9 +94,13 @@ export default function RemoteCameraPage() {
// Validate session on mount // Validate session on mount
useEffect(() => { useEffect(() => {
async function validateSession() { async function validateSession() {
console.log('[RemoteCameraPage] Validating session:', sessionId)
try { try {
const response = await fetch(`/api/remote-camera?sessionId=${sessionId}`) const response = await fetch(`/api/remote-camera?sessionId=${sessionId}`)
console.log('[RemoteCameraPage] Session validation response:', response.status)
if (response.ok) { if (response.ok) {
const data = await response.json()
console.log('[RemoteCameraPage] Session valid:', data)
setSessionStatus('connected') setSessionStatus('connected')
} else if (response.status === 404) { } else if (response.status === 404) {
setSessionStatus('expired') setSessionStatus('expired')
@@ -107,6 +111,7 @@ export default function RemoteCameraPage() {
setSessionError(data.error || 'Failed to validate session') setSessionError(data.error || 'Failed to validate session')
} }
} catch (err) { } catch (err) {
console.error('[RemoteCameraPage] Session validation error:', err)
setSessionStatus('error') setSessionStatus('error')
setSessionError('Network error') setSessionError('Network error')
} }

View File

@@ -1,9 +1,13 @@
'use client' 'use client'
import { motion } from 'framer-motion'
import type { ReactNode } from 'react' import type { ReactNode } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react' import { useCallback, useEffect, useRef, useState } from 'react'
import { useAbacusVision } from '@/hooks/useAbacusVision' import { useAbacusVision } from '@/hooks/useAbacusVision'
import { useFrameStability } from '@/hooks/useFrameStability'
import { useRemoteCameraDesktop } from '@/hooks/useRemoteCameraDesktop' import { useRemoteCameraDesktop } from '@/hooks/useRemoteCameraDesktop'
import { analyzeColumns, analysesToDigits } from '@/lib/vision/beadDetector'
import { processImageFrame } from '@/lib/vision/frameProcessor'
import { isOpenCVReady, loadOpenCV, rectifyQuadrilateral } from '@/lib/vision/perspectiveTransform' import { isOpenCVReady, loadOpenCV, rectifyQuadrilateral } from '@/lib/vision/perspectiveTransform'
import type { CalibrationGrid, QuadCorners } from '@/types/vision' import type { CalibrationGrid, QuadCorners } from '@/types/vision'
import { DEFAULT_STABILITY_CONFIG } from '@/types/vision' import { DEFAULT_STABILITY_CONFIG } from '@/types/vision'
@@ -96,6 +100,16 @@ export function AbacusVisionBridge({
setRemoteTorch, setRemoteTorch,
} = useRemoteCameraDesktop() } = useRemoteCameraDesktop()
// Stability tracking for remote frames
const remoteStability = useFrameStability()
// Track last stable value for remote camera to avoid duplicate callbacks
const lastRemoteStableValueRef = useRef<number | null>(null)
// Throttle remote frame processing
const lastRemoteInferenceTimeRef = useRef<number>(0)
const REMOTE_INFERENCE_INTERVAL_MS = 100 // 10fps
// Handle switching to phone camera // Handle switching to phone camera
const handleCameraSourceChange = useCallback( const handleCameraSourceChange = useCallback(
(source: CameraSource) => { (source: CameraSource) => {
@@ -194,6 +208,88 @@ export function AbacusVisionBridge({
} }
}, [vision.cameraError, onError]) }, [vision.cameraError, onError])
// Process remote camera frames through CV pipeline
useEffect(() => {
// Only process when using phone camera and connected
if (cameraSource !== 'phone' || !remoteIsPhoneConnected || !remoteLatestFrame) {
return
}
// Don't process during calibration
if (remoteIsCalibrating) {
return
}
// In manual mode, need calibration to process
if (remoteCalibrationMode === 'manual' && !remoteCalibration) {
return
}
// Throttle processing
const now = performance.now()
if (now - lastRemoteInferenceTimeRef.current < REMOTE_INFERENCE_INTERVAL_MS) {
return
}
lastRemoteInferenceTimeRef.current = now
// Get image element
const image = remoteImageRef.current
if (!image || !image.complete || image.naturalWidth === 0) {
return
}
// Determine calibration to use
// In auto mode (cropped frames), no calibration needed - phone already cropped
// In manual mode, use the desktop calibration
const calibration = remoteCalibrationMode === 'auto' ? null : remoteCalibration
// Process frame through CV pipeline
const columnImages = processImageFrame(image, calibration, columnCount)
if (columnImages.length === 0) return
// Run CV-based bead detection
const analyses = analyzeColumns(columnImages)
const { digits, minConfidence } = analysesToDigits(analyses)
// Convert digits to number
const detectedValue = digits.reduce((acc, d) => acc * 10 + d, 0)
// Log for debugging
console.log(
'[Remote CV] Bead analysis:',
analyses.map((a) => ({
digit: a.digit,
conf: a.confidence.toFixed(2),
heaven: a.heavenActive ? '5' : '0',
earth: a.earthActiveCount,
}))
)
// Push to stability buffer
remoteStability.pushFrame(detectedValue, minConfidence)
}, [
cameraSource,
remoteIsPhoneConnected,
remoteLatestFrame,
remoteIsCalibrating,
remoteCalibrationMode,
remoteCalibration,
columnCount,
remoteStability,
])
// Notify when remote stable value changes
useEffect(() => {
if (
cameraSource === 'phone' &&
remoteStability.stableValue !== null &&
remoteStability.stableValue !== lastRemoteStableValueRef.current
) {
lastRemoteStableValueRef.current = remoteStability.stableValue
onValueDetected(remoteStability.stableValue)
}
}, [cameraSource, remoteStability.stableValue, onValueDetected])
// Load OpenCV when calibrating (local or remote) // Load OpenCV when calibrating (local or remote)
useEffect(() => { useEffect(() => {
const isCalibrating = vision.isCalibrating || remoteIsCalibrating const isCalibrating = vision.isCalibrating || remoteIsCalibrating
@@ -302,9 +398,12 @@ export function AbacusVisionBridge({
) )
return ( return (
<div <motion.div
ref={containerRef} ref={containerRef}
data-component="abacus-vision-bridge" data-component="abacus-vision-bridge"
drag
dragMomentum={false}
dragElastic={0}
className={css({ className={css({
display: 'flex', display: 'flex',
flexDirection: 'column', flexDirection: 'column',
@@ -314,6 +413,8 @@ export function AbacusVisionBridge({
borderRadius: 'xl', borderRadius: 'xl',
maxWidth: '400px', maxWidth: '400px',
width: '100%', width: '100%',
cursor: 'grab',
_active: { cursor: 'grabbing' },
})} })}
> >
{/* Header */} {/* Header */}
@@ -418,95 +519,95 @@ export function AbacusVisionBridge({
{/* Camera controls (local camera) - only show if there's something to display */} {/* Camera controls (local camera) - only show if there's something to display */}
{cameraSource === 'local' && {cameraSource === 'local' &&
(vision.availableDevices.length > 1 || vision.isTorchAvailable) && ( (vision.availableDevices.length > 1 || vision.isTorchAvailable) && (
<div <div
data-element="camera-controls" data-element="camera-controls"
className={css({ className={css({
display: 'flex', display: 'flex',
alignItems: 'center', alignItems: 'center',
gap: 2, gap: 2,
flexWrap: 'wrap', flexWrap: 'wrap',
})} })}
> >
{/* Camera selector (if multiple cameras) */} {/* Camera selector (if multiple cameras) */}
{vision.availableDevices.length > 1 && ( {vision.availableDevices.length > 1 && (
<select <select
data-element="camera-selector" data-element="camera-selector"
value={vision.selectedDeviceId ?? ''} value={vision.selectedDeviceId ?? ''}
onChange={handleCameraSelect} onChange={handleCameraSelect}
className={css({ className={css({
flex: 1, flex: 1,
p: 2, p: 2,
bg: 'gray.800', bg: 'gray.800',
color: 'white', color: 'white',
border: '1px solid', border: '1px solid',
borderColor: 'gray.600', borderColor: 'gray.600',
borderRadius: 'md', borderRadius: 'md',
fontSize: 'sm', fontSize: 'sm',
minWidth: '150px', minWidth: '150px',
})} })}
> >
{vision.availableDevices.map((device) => ( {vision.availableDevices.map((device) => (
<option key={device.deviceId} value={device.deviceId}> <option key={device.deviceId} value={device.deviceId}>
{device.label || `Camera ${device.deviceId.slice(0, 8)}`} {device.label || `Camera ${device.deviceId.slice(0, 8)}`}
</option> </option>
))} ))}
</select> </select>
)} )}
{/* Flip camera button - only show if multiple cameras available */} {/* Flip camera button - only show if multiple cameras available */}
{vision.availableDevices.length > 1 && ( {vision.availableDevices.length > 1 && (
<button <button
type="button" type="button"
onClick={() => vision.flipCamera()} onClick={() => vision.flipCamera()}
data-action="flip-camera" data-action="flip-camera"
className={css({ className={css({
display: 'flex', display: 'flex',
alignItems: 'center', alignItems: 'center',
justifyContent: 'center', justifyContent: 'center',
width: '40px', width: '40px',
height: '40px', height: '40px',
bg: 'gray.700', bg: 'gray.700',
color: 'white', color: 'white',
border: 'none', border: 'none',
borderRadius: 'md', borderRadius: 'md',
cursor: 'pointer', cursor: 'pointer',
fontSize: 'lg', fontSize: 'lg',
_hover: { bg: 'gray.600' }, _hover: { bg: 'gray.600' },
})} })}
title={`Switch to ${vision.facingMode === 'environment' ? 'front' : 'back'} camera`} title={`Switch to ${vision.facingMode === 'environment' ? 'front' : 'back'} camera`}
> >
🔄 🔄
</button> </button>
)} )}
{/* Torch toggle button (only if available) */} {/* Torch toggle button (only if available) */}
{vision.isTorchAvailable && ( {vision.isTorchAvailable && (
<button <button
type="button" type="button"
onClick={() => vision.toggleTorch()} onClick={() => vision.toggleTorch()}
data-action="toggle-torch" data-action="toggle-torch"
data-status={vision.isTorchOn ? 'on' : 'off'} data-status={vision.isTorchOn ? 'on' : 'off'}
className={css({ className={css({
display: 'flex', display: 'flex',
alignItems: 'center', alignItems: 'center',
justifyContent: 'center', justifyContent: 'center',
width: '40px', width: '40px',
height: '40px', height: '40px',
bg: vision.isTorchOn ? 'yellow.600' : 'gray.700', bg: vision.isTorchOn ? 'yellow.600' : 'gray.700',
color: 'white', color: 'white',
border: 'none', border: 'none',
borderRadius: 'md', borderRadius: 'md',
cursor: 'pointer', cursor: 'pointer',
fontSize: 'lg', fontSize: 'lg',
_hover: { bg: vision.isTorchOn ? 'yellow.500' : 'gray.600' }, _hover: { bg: vision.isTorchOn ? 'yellow.500' : 'gray.600' },
})} })}
title={vision.isTorchOn ? 'Turn off flash' : 'Turn on flash'} title={vision.isTorchOn ? 'Turn off flash' : 'Turn on flash'}
> >
{vision.isTorchOn ? '🔦' : '💡'} {vision.isTorchOn ? '🔦' : '💡'}
</button> </button>
)} )}
</div> </div>
)} )}
{/* Camera controls (phone camera) */} {/* Camera controls (phone camera) */}
{cameraSource === 'phone' && remoteIsPhoneConnected && remoteIsTorchAvailable && ( {cameraSource === 'phone' && remoteIsPhoneConnected && remoteIsTorchAvailable && (
@@ -829,6 +930,27 @@ export function AbacusVisionBridge({
/> />
)} )}
{/* Detection status indicator */}
{!remoteIsCalibrating && (
<div
className={css({
position: 'absolute',
top: 2,
left: 2,
})}
>
<VisionStatusIndicator
isCalibrated={remoteCalibrationMode === 'auto' || remoteCalibration !== null}
isDetecting={remoteLatestFrame !== null}
confidence={remoteStability.currentConfidence}
handDetected={remoteStability.isHandDetected}
detectedValue={remoteStability.stableValue}
consecutiveFrames={remoteStability.consecutiveFrames}
minFrames={DEFAULT_STABILITY_CONFIG.minConsecutiveFrames}
/>
</div>
)}
{/* Connection status */} {/* Connection status */}
<div <div
className={css({ className={css({
@@ -1074,7 +1196,7 @@ export function AbacusVisionBridge({
{vision.cameraError} {vision.cameraError}
</div> </div>
)} )}
</div> </motion.div>
) )
} }

View File

@@ -1,6 +1,6 @@
'use client' 'use client'
import { useEffect, useState } from 'react' import { useEffect, useRef, useState } from 'react'
import { AbacusQRCode } from '@/components/common/AbacusQRCode' import { AbacusQRCode } from '@/components/common/AbacusQRCode'
import { useRemoteCameraSession } from '@/hooks/useRemoteCameraSession' import { useRemoteCameraSession } from '@/hooks/useRemoteCameraSession'
import { css } from '../../../styled-system/css' import { css } from '../../../styled-system/css'
@@ -32,6 +32,10 @@ export function RemoteCameraQRCode({
const { session, isCreating, error, createSession, setExistingSession, getPhoneUrl } = const { session, isCreating, error, createSession, setExistingSession, getPhoneUrl } =
useRemoteCameraSession() useRemoteCameraSession()
// Ref to track if we've already initiated session creation
// This prevents React 18 Strict Mode from creating duplicate sessions
const creationInitiatedRef = useRef(false)
// If we have an existing session ID, use it instead of creating a new one // If we have an existing session ID, use it instead of creating a new one
useEffect(() => { useEffect(() => {
if (existingSessionId && !session) { if (existingSessionId && !session) {
@@ -40,8 +44,10 @@ export function RemoteCameraQRCode({
}, [existingSessionId, session, setExistingSession]) }, [existingSessionId, session, setExistingSession])
// Create session on mount only if no existing session // Create session on mount only if no existing session
// Use ref to prevent duplicate creation in React 18 Strict Mode
useEffect(() => { useEffect(() => {
if (!session && !isCreating && !existingSessionId) { if (!session && !isCreating && !existingSessionId && !creationInitiatedRef.current) {
creationInitiatedRef.current = true
createSession().then((newSession) => { createSession().then((newSession) => {
if (newSession && onSessionCreated) { if (newSession && onSessionCreated) {
onSessionCreated(newSession.sessionId) onSessionCreated(newSession.sessionId)

View File

@@ -8,6 +8,11 @@ import {
isArucoAvailable, isArucoAvailable,
loadAruco, loadAruco,
} from '@/lib/vision/arucoDetection' } from '@/lib/vision/arucoDetection'
import {
analyzeColumns,
analysesToDigits,
digitsToNumber as cvDigitsToNumber,
} from '@/lib/vision/beadDetector'
import { digitsToNumber, getMinConfidence, processVideoFrame } from '@/lib/vision/frameProcessor' import { digitsToNumber, getMinConfidence, processVideoFrame } from '@/lib/vision/frameProcessor'
import type { import type {
CalibrationGrid, CalibrationGrid,
@@ -83,9 +88,9 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
// Track previous stable value to avoid duplicate callbacks // Track previous stable value to avoid duplicate callbacks
const lastStableValueRef = useRef<number | null>(null) const lastStableValueRef = useRef<number | null>(null)
// Throttle inference to 5fps for performance // Throttle detection (CV is fast, 10fps is plenty)
const lastInferenceTimeRef = useRef<number>(0) const lastInferenceTimeRef = useRef<number>(0)
const INFERENCE_INTERVAL_MS = 200 // 5fps const INFERENCE_INTERVAL_MS = 100 // 10fps
// Ref for calibration functions to avoid infinite loop in auto-calibration effect // Ref for calibration functions to avoid infinite loop in auto-calibration effect
const calibrationRef = useRef(calibration) const calibrationRef = useRef(calibration)
@@ -275,10 +280,10 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
}, [calibration]) }, [calibration])
/** /**
* Process a video frame for detection using TensorFlow.js classifier * Process a video frame for detection using CV-based bead detection
*/ */
const processFrame = useCallback(async () => { const processFrame = useCallback(async () => {
// Throttle inference for performance (5fps instead of 60fps) // Throttle inference for performance (10fps)
const now = performance.now() const now = performance.now()
if (now - lastInferenceTimeRef.current < INFERENCE_INTERVAL_MS) { if (now - lastInferenceTimeRef.current < INFERENCE_INTERVAL_MS) {
return return
@@ -305,20 +310,31 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
const columnImages = processVideoFrame(video, calibration.calibration) const columnImages = processVideoFrame(video, calibration.calibration)
if (columnImages.length === 0) return if (columnImages.length === 0) return
// Run classification // Use CV-based bead detection instead of ML
const result = await classifier.classifyColumns(columnImages) const analyses = analyzeColumns(columnImages)
if (!result) return const { digits, confidences, minConfidence } = analysesToDigits(analyses)
// Log analysis for debugging
console.log(
'[CV] Bead analysis:',
analyses.map((a) => ({
digit: a.digit,
conf: a.confidence.toFixed(2),
heaven: a.heavenActive ? '5' : '0',
earth: a.earthActiveCount,
bar: a.reckoningBarPosition.toFixed(2),
}))
)
// Update column confidences // Update column confidences
setColumnConfidences(result.confidences) setColumnConfidences(confidences)
// Convert digits to number // Convert digits to number
const detectedValue = digitsToNumber(result.digits) const detectedValue = cvDigitsToNumber(digits)
const minConfidence = getMinConfidence(result.confidences)
// Push to stability buffer // Push to stability buffer
stability.pushFrame(detectedValue, minConfidence) stability.pushFrame(detectedValue, minConfidence)
}, [camera.videoStream, calibration.isCalibrated, calibration.calibration, stability, classifier]) }, [camera.videoStream, calibration.isCalibrated, calibration.calibration, stability])
/** /**
* Detection loop * Detection loop

View File

@@ -73,16 +73,23 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
// Initialize socket connection // Initialize socket connection
useEffect(() => { useEffect(() => {
console.log('[RemoteCameraDesktop] Initializing socket connection...')
const socketInstance = io({ const socketInstance = io({
path: '/api/socket', path: '/api/socket',
autoConnect: true, autoConnect: true,
}) })
socketInstance.on('connect', () => { socketInstance.on('connect', () => {
console.log('[RemoteCameraDesktop] Socket connected! ID:', socketInstance.id)
setIsConnected(true) setIsConnected(true)
}) })
socketInstance.on('disconnect', () => { socketInstance.on('connect_error', (error) => {
console.error('[RemoteCameraDesktop] Socket connect error:', error)
})
socketInstance.on('disconnect', (reason) => {
console.log('[RemoteCameraDesktop] Socket disconnected:', reason)
setIsConnected(false) setIsConnected(false)
}) })
@@ -105,17 +112,20 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
if (!socket) return if (!socket) return
const handleConnected = ({ phoneConnected }: { phoneConnected: boolean }) => { const handleConnected = ({ phoneConnected }: { phoneConnected: boolean }) => {
console.log('[RemoteCameraDesktop] Phone connected event:', phoneConnected)
setIsPhoneConnected(phoneConnected) setIsPhoneConnected(phoneConnected)
setError(null) setError(null)
} }
const handleDisconnected = ({ phoneConnected }: { phoneConnected: boolean }) => { const handleDisconnected = ({ phoneConnected }: { phoneConnected: boolean }) => {
console.log('[RemoteCameraDesktop] Phone disconnected event:', phoneConnected)
setIsPhoneConnected(phoneConnected) setIsPhoneConnected(phoneConnected)
setLatestFrame(null) setLatestFrame(null)
setFrameRate(0) setFrameRate(0)
} }
const handleStatus = ({ phoneConnected }: { phoneConnected: boolean }) => { const handleStatus = ({ phoneConnected }: { phoneConnected: boolean }) => {
console.log('[RemoteCameraDesktop] Status event:', phoneConnected)
setIsPhoneConnected(phoneConnected) setIsPhoneConnected(phoneConnected)
} }
@@ -174,13 +184,16 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
const subscribe = useCallback( const subscribe = useCallback(
(sessionId: string) => { (sessionId: string) => {
console.log('[RemoteCameraDesktop] Subscribing to session:', sessionId, 'socket:', !!socket, 'connected:', isConnected)
if (!socket || !isConnected) { if (!socket || !isConnected) {
console.error('[RemoteCameraDesktop] Socket not connected!')
setError('Socket not connected') setError('Socket not connected')
return return
} }
currentSessionId.current = sessionId currentSessionId.current = sessionId
setError(null) setError(null)
console.log('[RemoteCameraDesktop] Emitting remote-camera:subscribe')
socket.emit('remote-camera:subscribe', { sessionId }) socket.emit('remote-camera:subscribe', { sessionId })
}, },
[socket, isConnected] [socket, isConnected]

View File

@@ -68,8 +68,13 @@ interface UseRemoteCameraPhoneReturn {
export function useRemoteCameraPhone( export function useRemoteCameraPhone(
options: UseRemoteCameraPhoneOptions = {} options: UseRemoteCameraPhoneOptions = {}
): UseRemoteCameraPhoneReturn { ): UseRemoteCameraPhoneReturn {
const { targetFps = 10, jpegQuality = 0.8, targetWidth = 300, rawWidth = 640, onTorchRequest } = const {
options targetFps = 10,
jpegQuality = 0.8,
targetWidth = 300,
rawWidth = 640,
onTorchRequest,
} = options
// Keep onTorchRequest in a ref to avoid stale closures // Keep onTorchRequest in a ref to avoid stale closures
const onTorchRequestRef = useRef(onTorchRequest) const onTorchRequestRef = useRef(onTorchRequest)
@@ -115,16 +120,23 @@ export function useRemoteCameraPhone(
// Initialize socket connection // Initialize socket connection
useEffect(() => { useEffect(() => {
console.log('[RemoteCameraPhone] Initializing socket connection...')
const socketInstance = io({ const socketInstance = io({
path: '/api/socket', path: '/api/socket',
autoConnect: true, autoConnect: true,
}) })
socketInstance.on('connect', () => { socketInstance.on('connect', () => {
console.log('[RemoteCameraPhone] Socket connected! ID:', socketInstance.id)
setIsSocketConnected(true) setIsSocketConnected(true)
}) })
socketInstance.on('disconnect', () => { socketInstance.on('connect_error', (error) => {
console.error('[RemoteCameraPhone] Socket connect error:', error)
})
socketInstance.on('disconnect', (reason) => {
console.log('[RemoteCameraPhone] Socket disconnected:', reason)
setIsSocketConnected(false) setIsSocketConnected(false)
setIsConnected(false) setIsConnected(false)
isConnectedRef.current = false isConnectedRef.current = false
@@ -314,7 +326,9 @@ export function useRemoteCameraPhone(
const connect = useCallback( const connect = useCallback(
(sessionId: string) => { (sessionId: string) => {
const socket = socketRef.current const socket = socketRef.current
console.log('[RemoteCameraPhone] Connecting to session:', sessionId, 'socket:', !!socket, 'connected:', isSocketConnected)
if (!socket || !isSocketConnected) { if (!socket || !isSocketConnected) {
console.error('[RemoteCameraPhone] Socket not connected!')
setError('Socket not connected') setError('Socket not connected')
return return
} }
@@ -322,6 +336,7 @@ export function useRemoteCameraPhone(
sessionIdRef.current = sessionId sessionIdRef.current = sessionId
setError(null) setError(null)
console.log('[RemoteCameraPhone] Emitting remote-camera:join')
socket.emit('remote-camera:join', { sessionId }) socket.emit('remote-camera:join', { sessionId })
setIsConnected(true) setIsConnected(true)
isConnectedRef.current = true isConnectedRef.current = true

View File

@@ -0,0 +1,203 @@
/**
* Traditional CV-based bead detection for abacus columns
*
* Uses edge detection and contour analysis instead of ML.
* Works by detecting the reckoning bar and analyzing bead positions
* relative to it.
*/
export interface BeadAnalysis {
/** Detected digit value (0-9) */
digit: number
/** Confidence based on detection clarity */
confidence: number
/** Position of reckoning bar (0-1, relative to column height) */
reckoningBarPosition: number
/** Number of beads detected above bar */
heavenBeadsDetected: number
/** Whether heaven bead is active (touching bar) */
heavenActive: boolean
/** Number of beads detected below bar */
earthBeadsDetected: number
/** Number of active earth beads (touching bar) */
earthActiveCount: number
}
/**
* Analyze a single column image to detect bead positions
*
* @param imageData - Grayscale image data of a single column
* @returns Analysis result with detected digit
*/
export function analyzeColumn(imageData: ImageData): BeadAnalysis {
const { width, height, data } = imageData
// Step 1: Create vertical intensity profile (average each row)
const rowIntensities = new Float32Array(height)
for (let y = 0; y < height; y++) {
let sum = 0
for (let x = 0; x < width; x++) {
const idx = (y * width + x) * 4
sum += data[idx] // Use red channel (grayscale)
}
rowIntensities[y] = sum / width
}
// Step 2: Find reckoning bar (darkest horizontal region)
// The bar is typically a dark horizontal line in the middle third
const searchStart = Math.floor(height * 0.25)
const searchEnd = Math.floor(height * 0.75)
let darkestRow = searchStart
let darkestValue = 255
// Use a sliding window to find the darkest band
const windowSize = Math.max(3, Math.floor(height * 0.03))
for (let y = searchStart; y < searchEnd - windowSize; y++) {
let windowSum = 0
for (let i = 0; i < windowSize; i++) {
windowSum += rowIntensities[y + i]
}
const windowAvg = windowSum / windowSize
if (windowAvg < darkestValue) {
darkestValue = windowAvg
darkestRow = y + Math.floor(windowSize / 2)
}
}
const reckoningBarPosition = darkestRow / height
// Step 3: Analyze heaven section (above bar)
// Find peaks in intensity (beads are darker than background)
const heavenStart = 0
const heavenEnd = darkestRow - windowSize
const heavenPeaks = findPeaks(rowIntensities, heavenStart, heavenEnd, height)
// Heaven bead is active if it's close to the reckoning bar
const heavenActiveThreshold = height * 0.15 // Within 15% of bar
const heavenActive =
heavenPeaks.length > 0 &&
darkestRow - heavenPeaks[heavenPeaks.length - 1] < heavenActiveThreshold
// Step 4: Analyze earth section (below bar)
const earthStart = darkestRow + windowSize
const earthEnd = height
const earthPeaks = findPeaks(rowIntensities, earthStart, earthEnd, height)
// Earth beads are active if they're close to the reckoning bar
const earthActiveCount = earthPeaks.filter(
(peak) => peak - darkestRow < heavenActiveThreshold
).length
// Step 5: Calculate digit value
// Heaven bead = 5, each earth bead = 1
const heavenValue = heavenActive ? 5 : 0
const earthValue = Math.min(earthActiveCount, 4) // Max 4 earth beads
const digit = heavenValue + earthValue
// Step 6: Calculate confidence based on detection quality
// Higher confidence if we found expected number of beads and clear bar
const expectedHeavenBeads = 1
const expectedEarthBeads = 4
const heavenConfidence = heavenPeaks.length === expectedHeavenBeads ? 1.0 : 0.5
const earthConfidence =
earthPeaks.length >= expectedEarthBeads ? 1.0 : earthPeaks.length / expectedEarthBeads
const barContrast = (255 - darkestValue) / 255 // How dark is the bar?
const confidence = (heavenConfidence + earthConfidence + barContrast) / 3
return {
digit,
confidence,
reckoningBarPosition,
heavenBeadsDetected: heavenPeaks.length,
heavenActive,
earthBeadsDetected: earthPeaks.length,
earthActiveCount,
}
}
/**
* Find peaks (local minima = dark beads) in intensity profile
*/
function findPeaks(
intensities: Float32Array,
start: number,
end: number,
totalHeight: number
): number[] {
const peaks: number[] = []
const minPeakDistance = Math.floor(totalHeight * 0.05) // Min 5% height between peaks
const threshold = calculateAdaptiveThreshold(intensities, start, end)
let lastPeak = -minPeakDistance * 2
for (let y = start + 2; y < end - 2; y++) {
const current = intensities[y]
// Local minimum (darker than neighbors)
if (
current < intensities[y - 1] &&
current < intensities[y + 1] &&
current < intensities[y - 2] &&
current < intensities[y + 2] &&
current < threshold &&
y - lastPeak >= minPeakDistance
) {
peaks.push(y)
lastPeak = y
}
}
return peaks
}
/**
* Calculate adaptive threshold for peak detection
*/
function calculateAdaptiveThreshold(intensities: Float32Array, start: number, end: number): number {
let sum = 0
let min = 255
let max = 0
for (let y = start; y < end; y++) {
sum += intensities[y]
min = Math.min(min, intensities[y])
max = Math.max(max, intensities[y])
}
const avg = sum / (end - start)
// Threshold halfway between average and minimum
return (avg + min) / 2
}
/**
* Analyze multiple columns
*/
export function analyzeColumns(columnImages: ImageData[]): BeadAnalysis[] {
return columnImages.map(analyzeColumn)
}
/**
* Convert bead analyses to digits
*/
export function analysesToDigits(analyses: BeadAnalysis[]): {
digits: number[]
confidences: number[]
minConfidence: number
} {
const digits = analyses.map((a) => a.digit)
const confidences = analyses.map((a) => a.confidence)
const minConfidence = confidences.length > 0 ? Math.min(...confidences) : 0
return { digits, confidences, minConfidence }
}
/**
* Convert digits to number
*/
export function digitsToNumber(digits: number[]): number {
if (digits.length === 0) return 0
return digits.reduce((acc, d) => acc * 10 + d, 0)
}

View File

@@ -214,3 +214,59 @@ export function getMinConfidence(confidences: number[]): number {
if (confidences.length === 0) return 0 if (confidences.length === 0) return 0
return Math.min(...confidences) return Math.min(...confidences)
} }
/**
* Process an image frame for classification (for remote camera frames)
*
* @param image - Image element with the frame
* @param calibration - Calibration grid (if null, assumes entire image is the abacus)
* @param columnCount - Number of columns to slice into
* @param columnWidth - Target column width for model input
* @param columnHeight - Target column height for model input
* @returns Array of preprocessed column ImageData ready for classification
*/
export function processImageFrame(
image: HTMLImageElement,
calibration: CalibrationGrid | null,
columnCount: number,
columnWidth: number = 64,
columnHeight: number = 128
): ImageData[] {
// Create canvas for image frame
const canvas = document.createElement('canvas')
canvas.width = image.naturalWidth || image.width
canvas.height = image.naturalHeight || image.height
const ctx = canvas.getContext('2d')!
// Draw image frame
ctx.drawImage(image, 0, 0)
let roiData: ImageData
if (calibration) {
// Extract ROI using calibration
roiData = extractROI(ctx, calibration.roi)
} else {
// No calibration - use entire image as ROI (already cropped by phone)
roiData = ctx.getImageData(0, 0, canvas.width, canvas.height)
}
// Create a synthetic calibration for slicing if none provided
const sliceCalibration: CalibrationGrid = calibration ?? {
roi: { x: 0, y: 0, width: canvas.width, height: canvas.height },
columnCount,
columnDividers: Array.from({ length: columnCount - 1 }, (_, i) => (i + 1) / columnCount),
rotation: 0,
}
// Slice into columns
const columns = sliceIntoColumns(roiData, sliceCalibration)
// Preprocess each column
return columns.map((col) => {
// Convert to grayscale
const gray = toGrayscale(col)
// Resize to model input size
return resizeImageData(gray, columnWidth, columnHeight)
})
}