feat(vision): improve remote camera calibration and UX
- Fix remote camera autocrop rotation by swapping ArUco corners for phone camera (detectMarkers assumes Desk View orientation which is 180° rotated) - Add rotate left/right buttons to CalibrationOverlay for manual calibration - Fix mode switching bug: switching to auto mode now clears desktop calibration on phone via new 'remote-camera:clear-calibration' socket event - Add copy button to QR code URL with visual feedback - Fix text selection spanning into video feed with userSelect: none - Add flip camera and torch controls to local camera UI - Add session persistence for remote camera reconnection - Fix 4:3 aspect ratio for cropped abacus output 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
937223e318
commit
8846cece93
|
|
@ -305,14 +305,16 @@ def main():
|
|||
print("Install with: pip install tensorflow")
|
||||
sys.exit(1)
|
||||
|
||||
# Check tensorflowjs is available
|
||||
# Check tensorflowjs is available (optional - can convert later)
|
||||
tfjs_available = False
|
||||
try:
|
||||
import tensorflowjs
|
||||
print(f"TensorFlow.js converter version: {tensorflowjs.__version__}")
|
||||
except ImportError:
|
||||
print("Error: tensorflowjs not installed")
|
||||
print("Install with: pip install tensorflowjs")
|
||||
sys.exit(1)
|
||||
tfjs_available = True
|
||||
except (ImportError, AttributeError) as e:
|
||||
print(f"Note: tensorflowjs not available ({type(e).__name__})")
|
||||
print("Model will be saved as Keras format. Convert later with:")
|
||||
print(" tensorflowjs_converter --input_format=keras model.keras output_dir/")
|
||||
|
||||
print()
|
||||
|
||||
|
|
@ -352,9 +354,14 @@ def main():
|
|||
# Save Keras model
|
||||
save_keras_model(model, args.output_dir)
|
||||
|
||||
# Export to TensorFlow.js
|
||||
print("\nExporting to TensorFlow.js format...")
|
||||
export_to_tfjs(model, args.output_dir)
|
||||
# Export to TensorFlow.js (if available)
|
||||
if tfjs_available:
|
||||
print("\nExporting to TensorFlow.js format...")
|
||||
export_to_tfjs(model, args.output_dir)
|
||||
else:
|
||||
print("\nSkipping TensorFlow.js export (tensorflowjs not available)")
|
||||
print("Convert later with:")
|
||||
print(f" tensorflowjs_converter --input_format=keras {args.output_dir}/column-classifier.keras {args.output_dir}")
|
||||
|
||||
print("\nTraining complete!")
|
||||
print(f"Model files saved to: {args.output_dir}")
|
||||
|
|
|
|||
|
|
@ -182,8 +182,12 @@ export default function RemoteCameraPage() {
|
|||
if (isSending) {
|
||||
updateCalibration(desktopCalibration)
|
||||
}
|
||||
} else if (usingDesktopCalibration) {
|
||||
// Desktop cleared calibration - go back to auto-detection
|
||||
setUsingDesktopCalibration(false)
|
||||
setCalibration(null)
|
||||
}
|
||||
}, [desktopCalibration, isSending, updateCalibration])
|
||||
}, [desktopCalibration, isSending, updateCalibration, usingDesktopCalibration])
|
||||
|
||||
// Auto-detect markers (always runs unless using desktop calibration)
|
||||
useEffect(() => {
|
||||
|
|
@ -201,18 +205,28 @@ export default function RemoteCameraPage() {
|
|||
|
||||
if (result.allMarkersFound && result.quadCorners) {
|
||||
// Auto-calibration successful!
|
||||
// NOTE: detectMarkers() returns corners swapped for Desk View camera (180° rotated).
|
||||
// Phone camera is NOT Desk View, so we need to swap corners back to get correct orientation.
|
||||
// detectMarkers maps: marker 2 (physical BR) → topLeft, marker 0 (physical TL) → bottomRight
|
||||
// For phone camera we need: marker 0 (physical TL) → topLeft, marker 2 (physical BR) → bottomRight
|
||||
const phoneCorners = {
|
||||
topLeft: result.quadCorners.bottomRight, // marker 0 (physical TL)
|
||||
topRight: result.quadCorners.bottomLeft, // marker 1 (physical TR)
|
||||
bottomRight: result.quadCorners.topLeft, // marker 2 (physical BR)
|
||||
bottomLeft: result.quadCorners.topRight, // marker 3 (physical BL)
|
||||
}
|
||||
const grid: CalibrationGrid = {
|
||||
roi: {
|
||||
x: Math.min(result.quadCorners.topLeft.x, result.quadCorners.bottomLeft.x),
|
||||
y: Math.min(result.quadCorners.topLeft.y, result.quadCorners.topRight.y),
|
||||
x: Math.min(phoneCorners.topLeft.x, phoneCorners.bottomLeft.x),
|
||||
y: Math.min(phoneCorners.topLeft.y, phoneCorners.topRight.y),
|
||||
width:
|
||||
Math.max(result.quadCorners.topRight.x, result.quadCorners.bottomRight.x) -
|
||||
Math.min(result.quadCorners.topLeft.x, result.quadCorners.bottomLeft.x),
|
||||
Math.max(phoneCorners.topRight.x, phoneCorners.bottomRight.x) -
|
||||
Math.min(phoneCorners.topLeft.x, phoneCorners.bottomLeft.x),
|
||||
height:
|
||||
Math.max(result.quadCorners.bottomLeft.y, result.quadCorners.bottomRight.y) -
|
||||
Math.min(result.quadCorners.topLeft.y, result.quadCorners.topRight.y),
|
||||
Math.max(phoneCorners.bottomLeft.y, phoneCorners.bottomRight.y) -
|
||||
Math.min(phoneCorners.topLeft.y, phoneCorners.topRight.y),
|
||||
},
|
||||
corners: result.quadCorners,
|
||||
corners: phoneCorners,
|
||||
columnCount: 13,
|
||||
columnDividers: Array.from({ length: 12 }, (_, i) => (i + 1) / 13),
|
||||
rotation: 0,
|
||||
|
|
@ -221,7 +235,7 @@ export default function RemoteCameraPage() {
|
|||
// Update the calibration for the sending loop and switch to cropped mode
|
||||
// BUT: don't switch to cropped if desktop is actively calibrating (they need raw frames)
|
||||
if (isSending && !desktopIsCalibrating) {
|
||||
updateCalibration(result.quadCorners)
|
||||
updateCalibration(phoneCorners)
|
||||
setFrameMode('cropped')
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -90,6 +90,7 @@ export function AbacusVisionBridge({
|
|||
unsubscribe: remoteUnsubscribe,
|
||||
setPhoneFrameMode: remoteSetPhoneFrameMode,
|
||||
sendCalibration: remoteSendCalibration,
|
||||
clearCalibration: remoteClearCalibration,
|
||||
} = useRemoteCameraDesktop()
|
||||
|
||||
// Handle switching to phone camera
|
||||
|
|
@ -129,12 +130,15 @@ export function AbacusVisionBridge({
|
|||
// Tell phone to use its auto-calibration (cropped frames)
|
||||
remoteSetPhoneFrameMode('cropped')
|
||||
setRemoteIsCalibrating(false)
|
||||
// Clear desktop calibration on phone so it goes back to auto-detection
|
||||
remoteClearCalibration()
|
||||
setRemoteCalibration(null)
|
||||
} else {
|
||||
// Tell phone to send raw frames for desktop calibration
|
||||
remoteSetPhoneFrameMode('raw')
|
||||
}
|
||||
},
|
||||
[remoteSetPhoneFrameMode]
|
||||
[remoteSetPhoneFrameMode, remoteClearCalibration]
|
||||
)
|
||||
|
||||
// Start remote camera calibration
|
||||
|
|
@ -408,28 +412,94 @@ export function AbacusVisionBridge({
|
|||
</button>
|
||||
</div>
|
||||
|
||||
{/* Camera selector (if multiple cameras and using local) */}
|
||||
{cameraSource === 'local' && vision.availableDevices.length > 1 && (
|
||||
<select
|
||||
data-element="camera-selector"
|
||||
value={vision.selectedDeviceId ?? ''}
|
||||
onChange={handleCameraSelect}
|
||||
{/* Camera controls (local camera only) */}
|
||||
{cameraSource === 'local' && (
|
||||
<div
|
||||
data-element="camera-controls"
|
||||
className={css({
|
||||
p: 2,
|
||||
bg: 'gray.800',
|
||||
color: 'white',
|
||||
border: '1px solid',
|
||||
borderColor: 'gray.600',
|
||||
borderRadius: 'md',
|
||||
fontSize: 'sm',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 2,
|
||||
flexWrap: 'wrap',
|
||||
})}
|
||||
>
|
||||
{vision.availableDevices.map((device) => (
|
||||
<option key={device.deviceId} value={device.deviceId}>
|
||||
{device.label || `Camera ${device.deviceId.slice(0, 8)}`}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
{/* Camera selector (if multiple cameras) */}
|
||||
{vision.availableDevices.length > 1 && (
|
||||
<select
|
||||
data-element="camera-selector"
|
||||
value={vision.selectedDeviceId ?? ''}
|
||||
onChange={handleCameraSelect}
|
||||
className={css({
|
||||
flex: 1,
|
||||
p: 2,
|
||||
bg: 'gray.800',
|
||||
color: 'white',
|
||||
border: '1px solid',
|
||||
borderColor: 'gray.600',
|
||||
borderRadius: 'md',
|
||||
fontSize: 'sm',
|
||||
minWidth: '150px',
|
||||
})}
|
||||
>
|
||||
{vision.availableDevices.map((device) => (
|
||||
<option key={device.deviceId} value={device.deviceId}>
|
||||
{device.label || `Camera ${device.deviceId.slice(0, 8)}`}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
)}
|
||||
|
||||
{/* Flip camera button */}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => vision.flipCamera()}
|
||||
data-action="flip-camera"
|
||||
className={css({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
width: '40px',
|
||||
height: '40px',
|
||||
bg: 'gray.700',
|
||||
color: 'white',
|
||||
border: 'none',
|
||||
borderRadius: 'md',
|
||||
cursor: 'pointer',
|
||||
fontSize: 'lg',
|
||||
_hover: { bg: 'gray.600' },
|
||||
})}
|
||||
title={`Switch to ${vision.facingMode === 'environment' ? 'front' : 'back'} camera`}
|
||||
>
|
||||
🔄
|
||||
</button>
|
||||
|
||||
{/* Torch toggle button (only if available) */}
|
||||
{vision.isTorchAvailable && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => vision.toggleTorch()}
|
||||
data-action="toggle-torch"
|
||||
data-status={vision.isTorchOn ? 'on' : 'off'}
|
||||
className={css({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
width: '40px',
|
||||
height: '40px',
|
||||
bg: vision.isTorchOn ? 'yellow.600' : 'gray.700',
|
||||
color: 'white',
|
||||
border: 'none',
|
||||
borderRadius: 'md',
|
||||
cursor: 'pointer',
|
||||
fontSize: 'lg',
|
||||
_hover: { bg: vision.isTorchOn ? 'yellow.500' : 'gray.600' },
|
||||
})}
|
||||
title={vision.isTorchOn ? 'Turn off flash' : 'Turn on flash'}
|
||||
>
|
||||
{vision.isTorchOn ? '🔦' : '💡'}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Calibration mode toggle (both local and phone camera) */}
|
||||
|
|
@ -636,6 +706,7 @@ export function AbacusVisionBridge({
|
|||
borderRadius: 'lg',
|
||||
overflow: 'hidden',
|
||||
minHeight: '200px',
|
||||
userSelect: 'none', // Prevent text selection from spanning into video feed
|
||||
})}
|
||||
>
|
||||
{!remoteCameraSessionId ? (
|
||||
|
|
@ -652,7 +723,7 @@ export function AbacusVisionBridge({
|
|||
<RemoteCameraQRCode onSessionCreated={handleRemoteSessionCreated} size={180} />
|
||||
</div>
|
||||
) : !remoteIsPhoneConnected ? (
|
||||
/* Waiting for phone to connect */
|
||||
/* Waiting for phone to connect/reconnect - reuse existing session */
|
||||
<div
|
||||
className={css({
|
||||
display: 'flex',
|
||||
|
|
@ -664,7 +735,11 @@ export function AbacusVisionBridge({
|
|||
})}
|
||||
>
|
||||
<p className={css({ mb: 4 })}>Waiting for phone to connect...</p>
|
||||
<RemoteCameraQRCode onSessionCreated={handleRemoteSessionCreated} size={150} />
|
||||
<RemoteCameraQRCode
|
||||
onSessionCreated={handleRemoteSessionCreated}
|
||||
existingSessionId={remoteCameraSessionId}
|
||||
size={150}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
/* Show camera frames */
|
||||
|
|
|
|||
|
|
@ -284,6 +284,32 @@ export function CalibrationOverlay({
|
|||
dragStartRef.current = null
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Rotate corners 90° clockwise or counter-clockwise around the quad center
|
||||
* This reassigns corner labels, not their positions
|
||||
*/
|
||||
const handleRotate = useCallback((direction: 'left' | 'right') => {
|
||||
setCorners((prev) => {
|
||||
if (direction === 'right') {
|
||||
// Rotate 90° clockwise: TL→TR, TR→BR, BR→BL, BL→TL
|
||||
return {
|
||||
topLeft: prev.bottomLeft,
|
||||
topRight: prev.topLeft,
|
||||
bottomRight: prev.topRight,
|
||||
bottomLeft: prev.bottomRight,
|
||||
}
|
||||
} else {
|
||||
// Rotate 90° counter-clockwise: TL→BL, BL→BR, BR→TR, TR→TL
|
||||
return {
|
||||
topLeft: prev.topRight,
|
||||
topRight: prev.bottomRight,
|
||||
bottomRight: prev.bottomLeft,
|
||||
bottomLeft: prev.topLeft,
|
||||
}
|
||||
}
|
||||
})
|
||||
}, [])
|
||||
|
||||
// Handle complete
|
||||
const handleComplete = useCallback(() => {
|
||||
const grid: CalibrationGrid = {
|
||||
|
|
@ -547,6 +573,51 @@ export function CalibrationOverlay({
|
|||
gap: 2,
|
||||
})}
|
||||
>
|
||||
{/* Rotation buttons */}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => handleRotate('left')}
|
||||
data-action="rotate-left"
|
||||
className={css({
|
||||
px: 2,
|
||||
py: 1.5,
|
||||
bg: 'blue.600',
|
||||
color: 'white',
|
||||
borderRadius: 'md',
|
||||
fontSize: 'sm',
|
||||
border: 'none',
|
||||
cursor: 'pointer',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
_hover: { bg: 'blue.500' },
|
||||
})}
|
||||
title="Rotate 90° left"
|
||||
>
|
||||
↺
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => handleRotate('right')}
|
||||
data-action="rotate-right"
|
||||
className={css({
|
||||
px: 2,
|
||||
py: 1.5,
|
||||
bg: 'blue.600',
|
||||
color: 'white',
|
||||
borderRadius: 'md',
|
||||
fontSize: 'sm',
|
||||
border: 'none',
|
||||
cursor: 'pointer',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
_hover: { bg: 'blue.500' },
|
||||
})}
|
||||
title="Rotate 90° right"
|
||||
>
|
||||
↻
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={onCancel}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AbacusQRCode } from '@/components/common/AbacusQRCode'
|
||||
import { useRemoteCameraSession } from '@/hooks/useRemoteCameraSession'
|
||||
import { css } from '../../../styled-system/css'
|
||||
|
|
@ -10,6 +10,8 @@ export interface RemoteCameraQRCodeProps {
|
|||
onSessionCreated?: (sessionId: string) => void
|
||||
/** Size of the QR code in pixels */
|
||||
size?: number
|
||||
/** Existing session ID to reuse (for reconnection scenarios) */
|
||||
existingSessionId?: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -17,20 +19,36 @@ export interface RemoteCameraQRCodeProps {
|
|||
*
|
||||
* Automatically creates a remote camera session and shows a QR code
|
||||
* that phones can scan to connect as a remote camera source.
|
||||
*
|
||||
* If an existing session ID is provided, it will reuse that session
|
||||
* instead of creating a new one. This allows the phone to reconnect
|
||||
* after a page reload.
|
||||
*/
|
||||
export function RemoteCameraQRCode({ onSessionCreated, size = 200 }: RemoteCameraQRCodeProps) {
|
||||
const { session, isCreating, error, createSession, getPhoneUrl } = useRemoteCameraSession()
|
||||
export function RemoteCameraQRCode({
|
||||
onSessionCreated,
|
||||
size = 200,
|
||||
existingSessionId,
|
||||
}: RemoteCameraQRCodeProps) {
|
||||
const { session, isCreating, error, createSession, setExistingSession, getPhoneUrl } =
|
||||
useRemoteCameraSession()
|
||||
|
||||
// Create session on mount
|
||||
// If we have an existing session ID, use it instead of creating a new one
|
||||
useEffect(() => {
|
||||
if (!session && !isCreating) {
|
||||
if (existingSessionId && !session) {
|
||||
setExistingSession(existingSessionId)
|
||||
}
|
||||
}, [existingSessionId, session, setExistingSession])
|
||||
|
||||
// Create session on mount only if no existing session
|
||||
useEffect(() => {
|
||||
if (!session && !isCreating && !existingSessionId) {
|
||||
createSession().then((newSession) => {
|
||||
if (newSession && onSessionCreated) {
|
||||
onSessionCreated(newSession.sessionId)
|
||||
}
|
||||
})
|
||||
}
|
||||
}, [session, isCreating, createSession, onSessionCreated])
|
||||
}, [session, isCreating, existingSessionId, createSession, onSessionCreated])
|
||||
|
||||
const phoneUrl = getPhoneUrl()
|
||||
|
||||
|
|
@ -149,23 +167,75 @@ export function RemoteCameraQRCode({ onSessionCreated, size = 200 }: RemoteCamer
|
|||
<p className={css({ fontSize: 'xs', color: 'gray.400' })}>Session expires in 10 minutes</p>
|
||||
</div>
|
||||
|
||||
{/* URL for manual entry */}
|
||||
<div
|
||||
className={css({
|
||||
fontSize: 'xs',
|
||||
color: 'gray.500',
|
||||
bg: 'gray.100',
|
||||
px: 3,
|
||||
py: 2,
|
||||
borderRadius: 'md',
|
||||
fontFamily: 'mono',
|
||||
wordBreak: 'break-all',
|
||||
maxWidth: '280px',
|
||||
textAlign: 'center',
|
||||
})}
|
||||
>
|
||||
{phoneUrl}
|
||||
</div>
|
||||
{/* URL for manual entry with copy button */}
|
||||
<UrlWithCopyButton url={phoneUrl} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* URL display with copy button
|
||||
*/
|
||||
function UrlWithCopyButton({ url }: { url: string }) {
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopy = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(url)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
} catch (err) {
|
||||
console.error('Failed to copy URL:', err)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
data-element="url-copy-container"
|
||||
className={css({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: 2,
|
||||
bg: 'gray.100',
|
||||
px: 3,
|
||||
py: 2,
|
||||
borderRadius: 'md',
|
||||
maxWidth: '280px',
|
||||
})}
|
||||
>
|
||||
<span
|
||||
className={css({
|
||||
fontSize: 'xs',
|
||||
color: 'gray.500',
|
||||
fontFamily: 'mono',
|
||||
wordBreak: 'break-all',
|
||||
flex: 1,
|
||||
userSelect: 'text',
|
||||
})}
|
||||
>
|
||||
{url}
|
||||
</span>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCopy}
|
||||
data-action="copy-url"
|
||||
className={css({
|
||||
flexShrink: 0,
|
||||
px: 2,
|
||||
py: 1,
|
||||
bg: copied ? 'green.600' : 'gray.600',
|
||||
color: 'white',
|
||||
border: 'none',
|
||||
borderRadius: 'md',
|
||||
fontSize: 'xs',
|
||||
cursor: 'pointer',
|
||||
transition: 'background-color 0.2s',
|
||||
_hover: { bg: copied ? 'green.700' : 'gray.700' },
|
||||
})}
|
||||
title="Copy URL to clipboard"
|
||||
>
|
||||
{copied ? '✓' : '📋'}
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -431,6 +431,9 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
|
|||
selectedDeviceId: camera.currentDevice?.deviceId ?? null,
|
||||
availableDevices: camera.availableDevices,
|
||||
isDeskViewDetected: camera.isDeskViewDetected,
|
||||
facingMode: camera.facingMode,
|
||||
isTorchOn: camera.isTorchOn,
|
||||
isTorchAvailable: camera.isTorchAvailable,
|
||||
|
||||
// Calibration state
|
||||
calibrationGrid: calibration.calibration,
|
||||
|
|
@ -451,5 +454,7 @@ export function useAbacusVision(options: UseAbacusVisionOptions = {}): UseAbacus
|
|||
selectCamera,
|
||||
resetCalibration,
|
||||
setCalibrationMode,
|
||||
flipCamera: camera.flipCamera,
|
||||
toggleTorch: camera.toggleTorch,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -144,6 +144,9 @@ export function useColumnClassifier(): UseColumnClassifierReturn {
|
|||
|
||||
const results = await classifier.classifyColumns(columnImages)
|
||||
|
||||
// Model unavailable
|
||||
if (!results) return null
|
||||
|
||||
return {
|
||||
digits: results.map((r) => r.digit),
|
||||
confidences: results.map((r) => r.confidence),
|
||||
|
|
|
|||
|
|
@ -16,6 +16,12 @@ export interface UseDeskViewCameraReturn {
|
|||
availableDevices: MediaDeviceInfo[]
|
||||
/** Whether Desk View camera was auto-detected */
|
||||
isDeskViewDetected: boolean
|
||||
/** Current facing mode */
|
||||
facingMode: 'user' | 'environment'
|
||||
/** Whether torch is currently on */
|
||||
isTorchOn: boolean
|
||||
/** Whether torch is available on current device */
|
||||
isTorchAvailable: boolean
|
||||
|
||||
/** Request camera access, optionally specifying device ID */
|
||||
requestCamera: (deviceId?: string) => Promise<void>
|
||||
|
|
@ -23,6 +29,10 @@ export interface UseDeskViewCameraReturn {
|
|||
stopCamera: () => void
|
||||
/** Refresh device list */
|
||||
enumerateDevices: () => Promise<MediaDeviceInfo[]>
|
||||
/** Flip between front and back camera */
|
||||
flipCamera: () => Promise<void>
|
||||
/** Toggle torch on/off */
|
||||
toggleTorch: () => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -38,9 +48,13 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
const [currentDevice, setCurrentDevice] = useState<MediaDeviceInfo | null>(null)
|
||||
const [availableDevices, setAvailableDevices] = useState<MediaDeviceInfo[]>([])
|
||||
const [isDeskViewDetected, setIsDeskViewDetected] = useState(false)
|
||||
const [facingMode, setFacingMode] = useState<'user' | 'environment'>('environment')
|
||||
const [isTorchOn, setIsTorchOn] = useState(false)
|
||||
const [isTorchAvailable, setIsTorchAvailable] = useState(false)
|
||||
|
||||
const streamRef = useRef<MediaStream | null>(null)
|
||||
const requestIdRef = useRef(0) // Track request ID to ignore stale completions
|
||||
const facingModeRef = useRef<'user' | 'environment'>('environment')
|
||||
|
||||
/**
|
||||
* Enumerate available video input devices
|
||||
|
|
@ -80,6 +94,35 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
[isDeskViewDevice]
|
||||
)
|
||||
|
||||
/**
|
||||
* Check if torch is available on a video track
|
||||
*/
|
||||
const checkTorchAvailability = useCallback((track: MediaStreamTrack): boolean => {
|
||||
try {
|
||||
const capabilities = track.getCapabilities() as MediaTrackCapabilities & {
|
||||
torch?: boolean
|
||||
}
|
||||
return capabilities.torch === true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Apply torch setting to track
|
||||
*/
|
||||
const applyTorch = useCallback(async (track: MediaStreamTrack, on: boolean): Promise<boolean> => {
|
||||
try {
|
||||
await track.applyConstraints({
|
||||
advanced: [{ torch: on } as MediaTrackConstraintSet],
|
||||
})
|
||||
return true
|
||||
} catch (err) {
|
||||
console.warn('[DeskViewCamera] Failed to apply torch:', err)
|
||||
return false
|
||||
}
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Request camera access
|
||||
*/
|
||||
|
|
@ -122,7 +165,9 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
// Try to disable face-tracking auto-focus (not all cameras support this)
|
||||
// @ts-expect-error - focusMode is valid but not in TS types
|
||||
focusMode: 'continuous',
|
||||
...(targetDeviceId ? { deviceId: { exact: targetDeviceId } } : {}),
|
||||
...(targetDeviceId
|
||||
? { deviceId: { exact: targetDeviceId } }
|
||||
: { facingMode: { ideal: facingModeRef.current } }),
|
||||
},
|
||||
audio: false,
|
||||
}
|
||||
|
|
@ -140,7 +185,7 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
streamRef.current = stream
|
||||
setVideoStream(stream)
|
||||
|
||||
// Find which device we got
|
||||
// Find which device we got and check torch availability
|
||||
const videoTrack = stream.getVideoTracks()[0]
|
||||
if (videoTrack) {
|
||||
const settings = videoTrack.getSettings()
|
||||
|
|
@ -149,6 +194,11 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
setCurrentDevice(matchingDevice)
|
||||
setIsDeskViewDetected(isDeskViewDevice(matchingDevice))
|
||||
}
|
||||
|
||||
// Check torch availability
|
||||
const torchAvailable = checkTorchAvailability(videoTrack)
|
||||
setIsTorchAvailable(torchAvailable)
|
||||
setIsTorchOn(false)
|
||||
}
|
||||
|
||||
setIsLoading(false)
|
||||
|
|
@ -158,7 +208,7 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
setIsLoading(false)
|
||||
}
|
||||
},
|
||||
[enumerateDevices, findDeskViewCamera, isDeskViewDevice]
|
||||
[enumerateDevices, findDeskViewCamera, isDeskViewDevice, checkTorchAvailability]
|
||||
)
|
||||
|
||||
/**
|
||||
|
|
@ -176,6 +226,8 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
setVideoStream(null)
|
||||
setCurrentDevice(null)
|
||||
setError(null)
|
||||
setIsTorchOn(false)
|
||||
setIsTorchAvailable(false)
|
||||
}, [])
|
||||
|
||||
// Cleanup on unmount
|
||||
|
|
@ -206,6 +258,33 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
}
|
||||
}, [enumerateDevices])
|
||||
|
||||
/**
|
||||
* Flip between front and back camera
|
||||
*/
|
||||
const flipCamera = useCallback(async () => {
|
||||
const newFacingMode = facingMode === 'user' ? 'environment' : 'user'
|
||||
facingModeRef.current = newFacingMode
|
||||
setFacingMode(newFacingMode)
|
||||
// Re-request camera with new facing mode (don't pass device ID to use facingMode)
|
||||
await requestCamera()
|
||||
}, [facingMode, requestCamera])
|
||||
|
||||
/**
|
||||
* Toggle torch on/off
|
||||
*/
|
||||
const toggleTorch = useCallback(async () => {
|
||||
if (!streamRef.current || !isTorchAvailable) return
|
||||
|
||||
const videoTrack = streamRef.current.getVideoTracks()[0]
|
||||
if (!videoTrack) return
|
||||
|
||||
const newState = !isTorchOn
|
||||
const success = await applyTorch(videoTrack, newState)
|
||||
if (success) {
|
||||
setIsTorchOn(newState)
|
||||
}
|
||||
}, [isTorchAvailable, isTorchOn, applyTorch])
|
||||
|
||||
return {
|
||||
isLoading,
|
||||
error,
|
||||
|
|
@ -213,8 +292,13 @@ export function useDeskViewCamera(): UseDeskViewCameraReturn {
|
|||
currentDevice,
|
||||
availableDevices,
|
||||
isDeskViewDetected,
|
||||
facingMode,
|
||||
isTorchOn,
|
||||
isTorchAvailable,
|
||||
requestCamera,
|
||||
stopCamera,
|
||||
enumerateDevices,
|
||||
flipCamera,
|
||||
toggleTorch,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,6 +35,8 @@ interface UseRemoteCameraDesktopReturn {
|
|||
setPhoneFrameMode: (mode: FrameMode) => void
|
||||
/** Send calibration to the phone */
|
||||
sendCalibration: (corners: QuadCorners) => void
|
||||
/** Clear desktop calibration on phone (go back to auto-detection) */
|
||||
clearCalibration: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -212,6 +214,18 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
|
|||
[socket]
|
||||
)
|
||||
|
||||
/**
|
||||
* Clear desktop calibration on phone
|
||||
* This tells the phone to forget the desktop calibration and go back to auto-detection
|
||||
*/
|
||||
const clearCalibration = useCallback(() => {
|
||||
if (!socket || !currentSessionId.current) return
|
||||
|
||||
socket.emit('remote-camera:clear-calibration', {
|
||||
sessionId: currentSessionId.current,
|
||||
})
|
||||
}, [socket])
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
|
|
@ -234,5 +248,6 @@ export function useRemoteCameraDesktop(): UseRemoteCameraDesktopReturn {
|
|||
unsubscribe,
|
||||
setPhoneFrameMode,
|
||||
sendCalibration,
|
||||
clearCalibration,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,12 +17,18 @@ interface UseRemoteCameraPhoneOptions {
|
|||
targetFps?: number
|
||||
/** JPEG quality (0-1, default 0.8) */
|
||||
jpegQuality?: number
|
||||
/** Target width for cropped image (default 400) */
|
||||
/** Target width for cropped image (default 300) */
|
||||
targetWidth?: number
|
||||
/** Target width for raw frames (default 640) */
|
||||
rawWidth?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Fixed aspect ratio for cropped abacus images.
|
||||
* Abacus is 4 units high by 3 units wide, giving a 4:3 height:width ratio.
|
||||
*/
|
||||
const ABACUS_ASPECT_RATIO = 4 / 3
|
||||
|
||||
interface UseRemoteCameraPhoneReturn {
|
||||
/** Whether connected to the session */
|
||||
isConnected: boolean
|
||||
|
|
@ -58,7 +64,10 @@ interface UseRemoteCameraPhoneReturn {
|
|||
export function useRemoteCameraPhone(
|
||||
options: UseRemoteCameraPhoneOptions = {}
|
||||
): UseRemoteCameraPhoneReturn {
|
||||
const { targetFps = 10, jpegQuality = 0.8, targetWidth = 400, rawWidth = 640 } = options
|
||||
const { targetFps = 10, jpegQuality = 0.8, targetWidth = 300, rawWidth = 640 } = options
|
||||
|
||||
// Calculate fixed output height based on aspect ratio (4 units tall by 3 units wide)
|
||||
const targetHeight = Math.round(targetWidth * ABACUS_ASPECT_RATIO)
|
||||
|
||||
const [isSocketConnected, setIsSocketConnected] = useState(false)
|
||||
const [isConnected, setIsConnected] = useState(false)
|
||||
|
|
@ -157,20 +166,31 @@ export function useRemoteCameraPhone(
|
|||
frameModeRef.current = 'cropped'
|
||||
}
|
||||
|
||||
// Handle clear calibration from desktop (go back to auto-detection)
|
||||
const handleClearCalibration = () => {
|
||||
console.log('[RemoteCameraPhone] Desktop cleared calibration - returning to auto-detection')
|
||||
setDesktopCalibration(null)
|
||||
calibrationRef.current = null
|
||||
}
|
||||
|
||||
socket.on('remote-camera:error', handleError)
|
||||
socket.on('remote-camera:set-mode', handleSetMode)
|
||||
socket.on('remote-camera:set-calibration', handleSetCalibration)
|
||||
socket.on('remote-camera:clear-calibration', handleClearCalibration)
|
||||
|
||||
return () => {
|
||||
socket.off('remote-camera:error', handleError)
|
||||
socket.off('remote-camera:set-mode', handleSetMode)
|
||||
socket.off('remote-camera:set-calibration', handleSetCalibration)
|
||||
socket.off('remote-camera:clear-calibration', handleClearCalibration)
|
||||
}
|
||||
}, [isSocketConnected]) // Re-run when socket connects
|
||||
|
||||
/**
|
||||
* Apply perspective transform and extract the quadrilateral region
|
||||
* Uses OpenCV for proper perspective correction
|
||||
*
|
||||
* Output is fixed at 4:3 height:width aspect ratio (abacus is 4 units tall by 3 units wide)
|
||||
*/
|
||||
const cropToQuad = useCallback(
|
||||
(video: HTMLVideoElement, quad: QuadCorners): string | null => {
|
||||
|
|
@ -181,11 +201,12 @@ export function useRemoteCameraPhone(
|
|||
|
||||
return rectifyQuadrilateralToBase64(video, quad, {
|
||||
outputWidth: targetWidth,
|
||||
outputHeight: targetHeight, // Fixed 4:3 aspect ratio
|
||||
jpegQuality,
|
||||
rotate180: false, // Phone camera: no rotation needed, direct mapping
|
||||
})
|
||||
},
|
||||
[targetWidth, jpegQuality]
|
||||
[targetWidth, targetHeight, jpegQuality]
|
||||
)
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ interface UseRemoteCameraSessionReturn {
|
|||
error: string | null
|
||||
/** Create a new remote camera session */
|
||||
createSession: () => Promise<RemoteCameraSession | null>
|
||||
/** Set an existing session ID (for reconnection scenarios) */
|
||||
setExistingSession: (sessionId: string) => void
|
||||
/** Clear the current session */
|
||||
clearSession: () => void
|
||||
/** Get the URL for the phone to scan */
|
||||
|
|
@ -67,6 +69,17 @@ export function useRemoteCameraSession(): UseRemoteCameraSessionReturn {
|
|||
}
|
||||
}, [])
|
||||
|
||||
const setExistingSession = useCallback((sessionId: string) => {
|
||||
// Use existing session ID without creating a new one on the server
|
||||
// This is used for reconnection when the phone reloads
|
||||
setSession({
|
||||
sessionId,
|
||||
expiresAt: new Date(Date.now() + 10 * 60 * 1000).toISOString(), // Assume 10 min remaining
|
||||
phoneConnected: false,
|
||||
})
|
||||
setError(null)
|
||||
}, [])
|
||||
|
||||
const clearSession = useCallback(() => {
|
||||
setSession(null)
|
||||
setError(null)
|
||||
|
|
@ -112,6 +125,7 @@ export function useRemoteCameraSession(): UseRemoteCameraSessionReturn {
|
|||
isCreating,
|
||||
error,
|
||||
createSession,
|
||||
setExistingSession,
|
||||
clearSession,
|
||||
getPhoneUrl,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
// TensorFlow.js types (dynamically imported)
|
||||
type TFLite = typeof import('@tensorflow/tfjs')
|
||||
type GraphModel = import('@tensorflow/tfjs').GraphModel
|
||||
type LayersModel = import('@tensorflow/tfjs').LayersModel
|
||||
|
||||
// Model configuration
|
||||
const MODEL_PATH = '/models/abacus-column-classifier/model.json'
|
||||
|
|
@ -17,8 +17,8 @@ const NUM_CLASSES = 10
|
|||
|
||||
// Cached model and TensorFlow instance
|
||||
let tfInstance: TFLite | null = null
|
||||
let modelInstance: GraphModel | null = null
|
||||
let modelLoadPromise: Promise<GraphModel | null> | null = null
|
||||
let modelInstance: LayersModel | null = null
|
||||
let modelLoadPromise: Promise<LayersModel | null> | null = null
|
||||
let modelCheckFailed = false // Track if model doesn't exist
|
||||
|
||||
/**
|
||||
|
|
@ -61,7 +61,7 @@ async function checkModelExists(): Promise<boolean> {
|
|||
* Lazy load the classification model
|
||||
* Returns null if model doesn't exist (not yet trained)
|
||||
*/
|
||||
async function loadModel(): Promise<GraphModel | null> {
|
||||
async function loadModel(): Promise<LayersModel | null> {
|
||||
if (modelInstance) return modelInstance
|
||||
if (modelCheckFailed) return null
|
||||
|
||||
|
|
@ -86,8 +86,8 @@ async function loadModel(): Promise<GraphModel | null> {
|
|||
const startTime = performance.now()
|
||||
|
||||
try {
|
||||
// Load as GraphModel for optimized inference
|
||||
const model = await tf.loadGraphModel(MODEL_PATH)
|
||||
// Load as LayersModel (exported from Keras)
|
||||
const model = await tf.loadLayersModel(MODEL_PATH)
|
||||
|
||||
const loadTime = performance.now() - startTime
|
||||
console.log(`[ColumnClassifier] Model loaded in ${loadTime.toFixed(0)}ms`)
|
||||
|
|
|
|||
|
|
@ -1228,6 +1228,13 @@ export function initializeSocketServer(httpServer: HTTPServer) {
|
|||
}
|
||||
)
|
||||
|
||||
// Remote Camera: Desktop clears calibration (tell phone to go back to auto-detection)
|
||||
socket.on('remote-camera:clear-calibration', ({ sessionId }: { sessionId: string }) => {
|
||||
// Forward clear calibration to phone
|
||||
socket.to(`remote-camera:${sessionId}`).emit('remote-camera:clear-calibration', {})
|
||||
console.log(`🖥️ Desktop cleared remote camera calibration`)
|
||||
})
|
||||
|
||||
// Remote Camera: Leave session
|
||||
socket.on('remote-camera:leave', async ({ sessionId }: { sessionId: string }) => {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -125,6 +125,9 @@ export interface AbacusVisionState {
|
|||
selectedDeviceId: string | null
|
||||
availableDevices: MediaDeviceInfo[]
|
||||
isDeskViewDetected: boolean
|
||||
facingMode: 'user' | 'environment'
|
||||
isTorchOn: boolean
|
||||
isTorchAvailable: boolean
|
||||
|
||||
// Calibration state
|
||||
calibrationGrid: CalibrationGrid | null
|
||||
|
|
@ -157,6 +160,10 @@ export interface AbacusVisionActions {
|
|||
resetCalibration: () => void
|
||||
/** Set calibration mode (auto uses ArUco markers, manual uses drag handles) */
|
||||
setCalibrationMode: (mode: CalibrationMode) => void
|
||||
/** Flip between front and back camera */
|
||||
flipCamera: () => Promise<void>
|
||||
/** Toggle torch on/off */
|
||||
toggleTorch: () => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
Loading…
Reference in New Issue