feat(webui): live camera viewer — multi-stream + detection overlays (Issue #177)
UI (src/hooks/useCamera.js, src/components/CameraViewer.jsx):
- 7 camera sources: front/left/rear/right CSI, D435i RGB/depth, panoramic
- Compressed image subscription via rosbridge (sensor_msgs/CompressedImage)
- Client-side 15fps gate (drops excess frames, reduces JS pressure)
- Per-camera FPS indicator with quality badge (FULL/GOOD/LOW/NO SIGNAL)
- Detection overlays: face boxes + names (/social/faces/detections),
gesture icons (/social/gestures), scene object labels + hazard colours
(/social/scene/objects); overlay mode selector (off/faces/gestures/objects/all)
- 360° panoramic equirect viewer with mouse/touch drag azimuth pan
- Picture-in-picture: up to 3 pinned cameras via ⊕ button
- One-click recording (MediaRecorder → MP4/WebM download)
- Snapshot to PNG with detection overlay composite + timestamp watermark
- Cameras tab added to TELEMETRY group in App.jsx
Jetson (rosbridge bringup):
- rosbridge_params.yaml: whitelist + /camera/depth/image_rect_raw/compressed,
/camera/panoramic/compressed, /social/faces/detections,
/social/gestures, /social/scene/objects
- rosbridge.launch.py: D435i colour republisher (JPEG 75%) +
depth republisher (compressedDepth/PNG16 preserving uint16 values)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
54668536c1
commit
57420807ca
@ -40,6 +40,11 @@ rosbridge_websocket:
|
||||
"/person/target",
|
||||
"/person/detections",
|
||||
"/camera/*/image_raw/compressed",
|
||||
"/camera/depth/image_rect_raw/compressed",
|
||||
"/camera/panoramic/compressed",
|
||||
"/social/faces/detections",
|
||||
"/social/gestures",
|
||||
"/social/scene/objects",
|
||||
"/scan",
|
||||
"/cmd_vel",
|
||||
"/saltybot/imu",
|
||||
|
||||
@ -94,4 +94,33 @@ def generate_launch_description():
|
||||
for name in _CAMERAS
|
||||
]
|
||||
|
||||
return LaunchDescription([rosbridge] + republishers)
|
||||
# ── D435i colour republisher (Issue #177) ────────────────────────────────
|
||||
d435i_color = Node(
|
||||
package='image_transport',
|
||||
executable='republish',
|
||||
name='compress_d435i_color',
|
||||
arguments=['raw', 'compressed'],
|
||||
remappings=[
|
||||
('in', '/camera/color/image_raw'),
|
||||
('out/compressed', '/camera/color/image_raw/compressed'),
|
||||
],
|
||||
parameters=[{'compressed.jpeg_quality': _JPEG_QUALITY}],
|
||||
output='screen',
|
||||
)
|
||||
|
||||
# ── D435i depth republisher (Issue #177) ─────────────────────────────────
|
||||
# Depth stream as compressedDepth (PNG16) — preserves uint16 depth values.
|
||||
# Browser displays as greyscale PNG (darker = closer).
|
||||
d435i_depth = Node(
|
||||
package='image_transport',
|
||||
executable='republish',
|
||||
name='compress_d435i_depth',
|
||||
arguments=['raw', 'compressedDepth'],
|
||||
remappings=[
|
||||
('in', '/camera/depth/image_rect_raw'),
|
||||
('out/compressedDepth', '/camera/depth/image_rect_raw/compressed'),
|
||||
],
|
||||
output='screen',
|
||||
)
|
||||
|
||||
return LaunchDescription([rosbridge] + republishers + [d435i_color, d435i_depth])
|
||||
|
||||
@ -5,13 +5,16 @@
|
||||
* Status | Faces | Conversation | Personality | Navigation
|
||||
*
|
||||
* Telemetry tabs (issue #126):
|
||||
* IMU | Battery | Motors | Map | Control | Health
|
||||
* IMU | Battery | Motors | Map | Control | Health | Cameras
|
||||
*
|
||||
* Fleet tabs (issue #139):
|
||||
* Fleet (self-contained via useFleet)
|
||||
*
|
||||
* Mission tabs (issue #145):
|
||||
* Missions (waypoint editor, route builder, geofence, schedule, execute)
|
||||
*
|
||||
* Camera viewer (issue #177):
|
||||
* CSI × 4 + D435i RGB/depth + panoramic, detection overlays, recording
|
||||
*/
|
||||
|
||||
import { useState, useCallback } from 'react';
|
||||
@ -41,6 +44,9 @@ import { MissionPlanner } from './components/MissionPlanner.jsx';
|
||||
// Settings panel (issue #160)
|
||||
import { SettingsPanel } from './components/SettingsPanel.jsx';
|
||||
|
||||
// Camera viewer (issue #177)
|
||||
import { CameraViewer } from './components/CameraViewer.jsx';
|
||||
|
||||
const TAB_GROUPS = [
|
||||
{
|
||||
label: 'SOCIAL',
|
||||
@ -63,6 +69,7 @@ const TAB_GROUPS = [
|
||||
{ id: 'map', label: 'Map', },
|
||||
{ id: 'control', label: 'Control', },
|
||||
{ id: 'health', label: 'Health', },
|
||||
{ id: 'cameras', label: 'Cameras', },
|
||||
],
|
||||
},
|
||||
{
|
||||
@ -206,6 +213,7 @@ export default function App() {
|
||||
{activeTab === 'map' && <MapViewer subscribe={subscribe} />}
|
||||
{activeTab === 'control' && <ControlMode subscribe={subscribe} />}
|
||||
{activeTab === 'health' && <SystemHealth subscribe={subscribe} />}
|
||||
{activeTab === 'cameras' && <CameraViewer subscribe={subscribe} />}
|
||||
|
||||
{activeTab === 'fleet' && <FleetPanel />}
|
||||
{activeTab === 'missions' && <MissionPlanner />}
|
||||
|
||||
671
ui/social-bot/src/components/CameraViewer.jsx
Normal file
671
ui/social-bot/src/components/CameraViewer.jsx
Normal file
@ -0,0 +1,671 @@
|
||||
/**
|
||||
* CameraViewer.jsx — Live camera stream viewer (Issue #177).
|
||||
*
|
||||
* Features:
|
||||
* - 7 cameras: front/left/rear/right (CSI), D435i RGB/depth, panoramic
|
||||
* - Detection overlays: face boxes + names, gesture icons, scene object labels
|
||||
* - 360° panoramic equirect viewer with mouse drag pan
|
||||
* - One-click recording (MP4/WebM) + download
|
||||
* - Snapshot to PNG with annotations + timestamp
|
||||
* - Picture-in-picture (up to 3 pinned cameras)
|
||||
* - Per-camera FPS indicator + adaptive quality badge
|
||||
*
|
||||
* Topics consumed:
|
||||
* /camera/<name>/image_raw/compressed sensor_msgs/CompressedImage
|
||||
* /camera/color/image_raw/compressed sensor_msgs/CompressedImage (D435i)
|
||||
* /camera/depth/image_rect_raw/compressed sensor_msgs/CompressedImage (D435i)
|
||||
* /camera/panoramic/compressed sensor_msgs/CompressedImage
|
||||
* /social/faces/detections saltybot_social_msgs/FaceDetectionArray
|
||||
* /social/gestures saltybot_social_msgs/GestureArray
|
||||
* /social/scene/objects saltybot_scene_msgs/SceneObjectArray
|
||||
*/
|
||||
|
||||
import { useEffect, useRef, useState, useCallback } from 'react';
|
||||
import { useCamera, CAMERAS, CAMERA_BY_ID, CAMERA_BY_ROS_ID } from '../hooks/useCamera.js';
|
||||
|
||||
// ── Constants ─────────────────────────────────────────────────────────────────
|
||||
|
||||
const GESTURE_ICONS = {
|
||||
wave: '👋',
|
||||
point: '👆',
|
||||
stop_palm: '✋',
|
||||
thumbs_up: '👍',
|
||||
thumbs_down: '👎',
|
||||
come_here: '🤏',
|
||||
follow: '☞',
|
||||
arms_up: '🙌',
|
||||
crouch: '⬇',
|
||||
arms_spread: '↔',
|
||||
};
|
||||
|
||||
const HAZARD_COLORS = {
|
||||
1: '#f59e0b', // stairs — amber
|
||||
2: '#ef4444', // drop — red
|
||||
3: '#60a5fa', // wet floor — blue
|
||||
4: '#a855f7', // glass door — purple
|
||||
5: '#f97316', // pet — orange
|
||||
};
|
||||
|
||||
// ── Detection overlay drawing helpers ─────────────────────────────────────────
|
||||
|
||||
function drawFaceBoxes(ctx, faces, scaleX, scaleY) {
|
||||
for (const face of faces) {
|
||||
const x = face.bbox_x * scaleX;
|
||||
const y = face.bbox_y * scaleY;
|
||||
const w = face.bbox_w * scaleX;
|
||||
const h = face.bbox_h * scaleY;
|
||||
|
||||
const isKnown = face.person_name && face.person_name !== 'unknown';
|
||||
ctx.strokeStyle = isKnown ? '#06b6d4' : '#f59e0b';
|
||||
ctx.lineWidth = 2;
|
||||
ctx.shadowBlur = 6;
|
||||
ctx.shadowColor = ctx.strokeStyle;
|
||||
ctx.strokeRect(x, y, w, h);
|
||||
ctx.shadowBlur = 0;
|
||||
|
||||
// Corner accent marks
|
||||
const cLen = 8;
|
||||
ctx.lineWidth = 3;
|
||||
[[x,y,1,1],[x+w,y,-1,1],[x,y+h,1,-1],[x+w,y+h,-1,-1]].forEach(([cx,cy,dx,dy]) => {
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(cx, cy + dy * cLen);
|
||||
ctx.lineTo(cx, cy);
|
||||
ctx.lineTo(cx + dx * cLen, cy);
|
||||
ctx.stroke();
|
||||
});
|
||||
|
||||
// Label
|
||||
const label = isKnown
|
||||
? `${face.person_name} ${(face.recognition_score * 100).toFixed(0)}%`
|
||||
: `face #${face.face_id}`;
|
||||
ctx.font = 'bold 11px monospace';
|
||||
const tw = ctx.measureText(label).width;
|
||||
ctx.fillStyle = isKnown ? 'rgba(6,182,212,0.8)' : 'rgba(245,158,11,0.8)';
|
||||
ctx.fillRect(x, y - 16, tw + 6, 16);
|
||||
ctx.fillStyle = '#000';
|
||||
ctx.fillText(label, x + 3, y - 4);
|
||||
}
|
||||
}
|
||||
|
||||
function drawGestureIcons(ctx, gestures, activeCamId, scaleX, scaleY) {
|
||||
for (const g of gestures) {
|
||||
// Only show gestures from the currently viewed camera
|
||||
const cam = CAMERA_BY_ROS_ID[g.camera_id];
|
||||
if (!cam || cam.cameraId !== activeCamId) continue;
|
||||
|
||||
const x = g.hand_x * ctx.canvas.width;
|
||||
const y = g.hand_y * ctx.canvas.height;
|
||||
const icon = GESTURE_ICONS[g.gesture_type] ?? '?';
|
||||
|
||||
ctx.font = '24px serif';
|
||||
ctx.shadowBlur = 8;
|
||||
ctx.shadowColor = '#f97316';
|
||||
ctx.fillText(icon, x - 12, y + 8);
|
||||
ctx.shadowBlur = 0;
|
||||
|
||||
ctx.font = 'bold 10px monospace';
|
||||
ctx.fillStyle = '#f97316';
|
||||
const label = g.gesture_type;
|
||||
ctx.fillText(label, x - ctx.measureText(label).width / 2, y + 22);
|
||||
}
|
||||
}
|
||||
|
||||
function drawSceneObjects(ctx, objects, scaleX, scaleY) {
|
||||
for (const obj of objects) {
|
||||
// vision_msgs/BoundingBox2D: center_x, center_y, size_x, size_y
|
||||
const bb = obj.bbox;
|
||||
const cx = bb?.center?.x ?? bb?.center_x;
|
||||
const cy = bb?.center?.y ?? bb?.center_y;
|
||||
const sw = bb?.size_x ?? 0;
|
||||
const sh = bb?.size_y ?? 0;
|
||||
if (cx == null) continue;
|
||||
|
||||
const x = (cx - sw / 2) * scaleX;
|
||||
const y = (cy - sh / 2) * scaleY;
|
||||
const w = sw * scaleX;
|
||||
const h = sh * scaleY;
|
||||
|
||||
const color = HAZARD_COLORS[obj.hazard_type] ?? '#22c55e';
|
||||
ctx.strokeStyle = color;
|
||||
ctx.lineWidth = 1.5;
|
||||
ctx.setLineDash([4, 3]);
|
||||
ctx.strokeRect(x, y, w, h);
|
||||
ctx.setLineDash([]);
|
||||
|
||||
const dist = obj.distance_m > 0 ? ` ${obj.distance_m.toFixed(1)}m` : '';
|
||||
const label = `${obj.class_name}${dist}`;
|
||||
ctx.font = '10px monospace';
|
||||
const tw = ctx.measureText(label).width;
|
||||
ctx.fillStyle = `${color}cc`;
|
||||
ctx.fillRect(x, y + h, tw + 4, 14);
|
||||
ctx.fillStyle = '#000';
|
||||
ctx.fillText(label, x + 2, y + h + 11);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Overlay canvas ─────────────────────────────────────────────────────────────
|
||||
|
||||
function OverlayCanvas({ faces, gestures, sceneObjects, activeCam, containerW, containerH }) {
|
||||
const canvasRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas) return;
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
if (!activeCam) return;
|
||||
|
||||
const scaleX = canvas.width / (activeCam.width || 640);
|
||||
const scaleY = canvas.height / (activeCam.height || 480);
|
||||
|
||||
// Draw overlays: only for front camera (face + gesture source)
|
||||
if (activeCam.id === 'front') {
|
||||
drawFaceBoxes(ctx, faces, scaleX, scaleY);
|
||||
}
|
||||
if (!activeCam.isPanoramic) {
|
||||
drawGestureIcons(ctx, gestures, activeCam.cameraId, scaleX, scaleY);
|
||||
}
|
||||
if (activeCam.id === 'color') {
|
||||
drawSceneObjects(ctx, sceneObjects, scaleX, scaleY);
|
||||
}
|
||||
}, [faces, gestures, sceneObjects, activeCam]);
|
||||
|
||||
return (
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
width={containerW || 640}
|
||||
height={containerH || 480}
|
||||
className="absolute inset-0 w-full h-full pointer-events-none"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Panoramic equirect viewer ──────────────────────────────────────────────────
|
||||
|
||||
function PanoViewer({ frameUrl }) {
|
||||
const canvasRef = useRef(null);
|
||||
const azRef = useRef(0); // 0–1920px offset
|
||||
const dragRef = useRef(null);
|
||||
const imgRef = useRef(null);
|
||||
|
||||
const draw = useCallback(() => {
|
||||
const canvas = canvasRef.current;
|
||||
const img = imgRef.current;
|
||||
if (!canvas || !img || !img.complete) return;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
const W = canvas.width;
|
||||
const H = canvas.height;
|
||||
const iW = img.naturalWidth; // 1920
|
||||
const iH = img.naturalHeight; // 960
|
||||
const vW = iW / 2; // viewport = 50% of equirect width
|
||||
const vH = Math.round((H / W) * vW);
|
||||
const vY = Math.round((iH - vH) / 2);
|
||||
const off = Math.round(azRef.current) % iW;
|
||||
|
||||
ctx.clearRect(0, 0, W, H);
|
||||
|
||||
// Draw left segment
|
||||
const srcX1 = off;
|
||||
const srcW1 = Math.min(vW, iW - off);
|
||||
const dstW1 = Math.round((srcW1 / vW) * W);
|
||||
if (dstW1 > 0) {
|
||||
ctx.drawImage(img, srcX1, vY, srcW1, vH, 0, 0, dstW1, H);
|
||||
}
|
||||
|
||||
// Draw wrapped right segment (if viewport crosses 0°)
|
||||
if (srcW1 < vW) {
|
||||
const srcX2 = 0;
|
||||
const srcW2 = vW - srcW1;
|
||||
const dstX2 = dstW1;
|
||||
const dstW2 = W - dstW1;
|
||||
ctx.drawImage(img, srcX2, vY, srcW2, vH, dstX2, 0, dstW2, H);
|
||||
}
|
||||
|
||||
// Compass badge
|
||||
const azDeg = Math.round((azRef.current / iW) * 360);
|
||||
ctx.fillStyle = 'rgba(0,0,0,0.5)';
|
||||
ctx.fillRect(W - 58, 6, 52, 18);
|
||||
ctx.fillStyle = '#06b6d4';
|
||||
ctx.font = 'bold 11px monospace';
|
||||
ctx.fillText(`${azDeg}°`, W - 52, 19);
|
||||
}, []);
|
||||
|
||||
// Load image when URL changes
|
||||
useEffect(() => {
|
||||
if (!frameUrl) return;
|
||||
const img = new Image();
|
||||
img.onload = draw;
|
||||
img.src = frameUrl;
|
||||
imgRef.current = img;
|
||||
}, [frameUrl, draw]);
|
||||
|
||||
// Re-draw when azimuth changes
|
||||
const onMouseDown = e => { dragRef.current = e.clientX; };
|
||||
const onMouseMove = e => {
|
||||
if (dragRef.current == null) return;
|
||||
const dx = e.clientX - dragRef.current;
|
||||
dragRef.current = e.clientX;
|
||||
azRef.current = ((azRef.current - dx * 2) % 1920 + 1920) % 1920;
|
||||
draw();
|
||||
};
|
||||
const onMouseUp = () => { dragRef.current = null; };
|
||||
|
||||
const onTouchStart = e => { dragRef.current = e.touches[0].clientX; };
|
||||
const onTouchMove = e => {
|
||||
if (dragRef.current == null) return;
|
||||
const dx = e.touches[0].clientX - dragRef.current;
|
||||
dragRef.current = e.touches[0].clientX;
|
||||
azRef.current = ((azRef.current - dx * 2) % 1920 + 1920) % 1920;
|
||||
draw();
|
||||
};
|
||||
|
||||
return (
|
||||
<canvas
|
||||
ref={canvasRef}
|
||||
width={960}
|
||||
height={240}
|
||||
className="w-full object-contain bg-black cursor-ew-resize rounded"
|
||||
onMouseDown={onMouseDown}
|
||||
onMouseMove={onMouseMove}
|
||||
onMouseUp={onMouseUp}
|
||||
onMouseLeave={onMouseUp}
|
||||
onTouchStart={onTouchStart}
|
||||
onTouchMove={onTouchMove}
|
||||
onTouchEnd={() => { dragRef.current = null; }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// ── PiP mini window ────────────────────────────────────────────────────────────
|
||||
|
||||
function PiPWindow({ cam, frameUrl, fps, onClose, index }) {
|
||||
const positions = [
|
||||
'bottom-2 left-2',
|
||||
'bottom-2 left-40',
|
||||
'bottom-2 left-[18rem]',
|
||||
];
|
||||
return (
|
||||
<div className={`absolute ${positions[index] ?? 'bottom-2 left-2'} w-36 rounded border border-cyan-900 overflow-hidden bg-black shadow-lg shadow-black z-10`}>
|
||||
<div className="flex items-center justify-between px-1.5 py-0.5 bg-gray-950 text-xs">
|
||||
<span className="text-cyan-700 font-bold">{cam.label}</span>
|
||||
<div className="flex items-center gap-1">
|
||||
<span className="text-gray-700">{fps}fps</span>
|
||||
<button onClick={onClose} className="text-gray-600 hover:text-red-400">✕</button>
|
||||
</div>
|
||||
</div>
|
||||
{frameUrl ? (
|
||||
<img src={frameUrl} alt={cam.label} className="w-full aspect-video object-cover block" />
|
||||
) : (
|
||||
<div className="w-full aspect-video flex items-center justify-center text-gray-800 text-xs">
|
||||
no signal
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Camera selector strip ──────────────────────────────────────────────────────
|
||||
|
||||
function CameraStrip({ cameras, activeId, pipList, frames, fps, onSelect, onTogglePip }) {
|
||||
return (
|
||||
<div className="flex gap-1.5 flex-wrap">
|
||||
{cameras.map(cam => {
|
||||
const hasFrame = !!frames[cam.id];
|
||||
const camFps = fps[cam.id] ?? 0;
|
||||
const isActive = activeId === cam.id;
|
||||
const isPip = pipList.includes(cam.id);
|
||||
|
||||
return (
|
||||
<div key={cam.id} className="relative">
|
||||
<button
|
||||
onClick={() => onSelect(cam.id)}
|
||||
className={`flex flex-col items-start rounded border px-2.5 py-1.5 text-xs font-bold transition-colors ${
|
||||
isActive
|
||||
? 'border-cyan-500 bg-cyan-950 bg-opacity-50 text-cyan-300'
|
||||
: hasFrame
|
||||
? 'border-gray-700 bg-gray-900 text-gray-400 hover:border-cyan-800 hover:text-gray-200'
|
||||
: 'border-gray-800 bg-gray-950 text-gray-700 hover:border-gray-700'
|
||||
}`}
|
||||
>
|
||||
<span>{cam.label.toUpperCase()}</span>
|
||||
<span className={`text-xs font-normal mt-0.5 ${
|
||||
camFps >= 12 ? 'text-green-600' :
|
||||
camFps > 0 ? 'text-amber-600' :
|
||||
'text-gray-700'
|
||||
}`}>
|
||||
{camFps > 0 ? `${camFps}fps` : 'no signal'}
|
||||
</span>
|
||||
</button>
|
||||
|
||||
{/* PiP pin button — only when NOT the active camera */}
|
||||
{!isActive && (
|
||||
<button
|
||||
onClick={() => onTogglePip(cam.id)}
|
||||
title={isPip ? 'Unpin PiP' : 'Pin PiP'}
|
||||
className={`absolute -top-1.5 -right-1.5 w-4 h-4 rounded-full text-[9px] flex items-center justify-center border transition-colors ${
|
||||
isPip
|
||||
? 'bg-cyan-600 border-cyan-400 text-white'
|
||||
: 'bg-gray-800 border-gray-700 text-gray-600 hover:border-cyan-700 hover:text-cyan-500'
|
||||
}`}
|
||||
>
|
||||
{isPip ? '×' : '⊕'}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Recording bar ──────────────────────────────────────────────────────────────
|
||||
|
||||
function RecordingBar({ recording, recSeconds, onStart, onStop, onSnapshot, overlayRef }) {
|
||||
const fmtTime = s => `${String(Math.floor(s / 60)).padStart(2, '0')}:${String(s % 60).padStart(2, '0')}`;
|
||||
|
||||
return (
|
||||
<div className="flex items-center gap-2 flex-wrap">
|
||||
{!recording ? (
|
||||
<button
|
||||
onClick={onStart}
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded border border-red-900 bg-red-950 text-red-400 hover:bg-red-900 text-xs font-bold transition-colors"
|
||||
>
|
||||
<span className="w-2 h-2 rounded-full bg-red-500" />
|
||||
REC
|
||||
</button>
|
||||
) : (
|
||||
<button
|
||||
onClick={onStop}
|
||||
className="flex items-center gap-1.5 px-3 py-1.5 rounded border border-red-600 bg-red-900 text-red-300 hover:bg-red-800 text-xs font-bold animate-pulse"
|
||||
>
|
||||
<span className="w-2 h-2 rounded bg-red-400" />
|
||||
STOP {fmtTime(recSeconds)}
|
||||
</button>
|
||||
)}
|
||||
|
||||
<button
|
||||
onClick={() => onSnapshot(overlayRef?.current)}
|
||||
className="flex items-center gap-1 px-3 py-1.5 rounded border border-gray-700 bg-gray-900 text-gray-400 hover:border-cyan-700 hover:text-cyan-400 text-xs font-bold transition-colors"
|
||||
>
|
||||
📷 SNAP
|
||||
</button>
|
||||
|
||||
{recording && (
|
||||
<span className="text-xs text-red-500 animate-pulse font-mono">
|
||||
● RECORDING {fmtTime(recSeconds)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// ── Main component ─────────────────────────────────────────────────────────────
|
||||
|
||||
export function CameraViewer({ subscribe }) {
|
||||
const {
|
||||
cameras, frames, fps,
|
||||
activeId, setActiveId,
|
||||
pipList, togglePip,
|
||||
recording, recSeconds,
|
||||
startRecording, stopRecording,
|
||||
takeSnapshot,
|
||||
} = useCamera({ subscribe });
|
||||
|
||||
// ── Detection state ─────────────────────────────────────────────────────────
|
||||
const [faces, setFaces] = useState([]);
|
||||
const [gestures, setGestures] = useState([]);
|
||||
const [sceneObjects, setSceneObjects] = useState([]);
|
||||
|
||||
const [showOverlay, setShowOverlay] = useState(true);
|
||||
const [overlayMode, setOverlayMode] = useState('all'); // 'all' | 'faces' | 'gestures' | 'objects' | 'off'
|
||||
|
||||
const overlayCanvasRef = useRef(null);
|
||||
|
||||
// Subscribe to detection topics
|
||||
useEffect(() => {
|
||||
if (!subscribe) return;
|
||||
const u1 = subscribe('/social/faces/detections', 'saltybot_social_msgs/FaceDetectionArray', msg => {
|
||||
setFaces(msg.faces ?? []);
|
||||
});
|
||||
const u2 = subscribe('/social/gestures', 'saltybot_social_msgs/GestureArray', msg => {
|
||||
setGestures(msg.gestures ?? []);
|
||||
});
|
||||
const u3 = subscribe('/social/scene/objects', 'saltybot_scene_msgs/SceneObjectArray', msg => {
|
||||
setSceneObjects(msg.objects ?? []);
|
||||
});
|
||||
return () => { u1?.(); u2?.(); u3?.(); };
|
||||
}, [subscribe]);
|
||||
|
||||
const activeCam = CAMERA_BY_ID[activeId];
|
||||
const activeFrame = frames[activeId];
|
||||
|
||||
// Filter overlay data based on mode
|
||||
const visibleFaces = (overlayMode === 'all' || overlayMode === 'faces') ? faces : [];
|
||||
const visibleGestures = (overlayMode === 'all' || overlayMode === 'gestures') ? gestures : [];
|
||||
const visibleObjects = (overlayMode === 'all' || overlayMode === 'objects') ? sceneObjects : [];
|
||||
|
||||
// ── Container size tracking (for overlay canvas sizing) ────────────────────
|
||||
const containerRef = useRef(null);
|
||||
const [containerSize, setContainerSize] = useState({ w: 640, h: 480 });
|
||||
|
||||
useEffect(() => {
|
||||
if (!containerRef.current) return;
|
||||
const ro = new ResizeObserver(entries => {
|
||||
const e = entries[0];
|
||||
setContainerSize({ w: Math.round(e.contentRect.width), h: Math.round(e.contentRect.height) });
|
||||
});
|
||||
ro.observe(containerRef.current);
|
||||
return () => ro.disconnect();
|
||||
}, []);
|
||||
|
||||
// ── Quality badge ──────────────────────────────────────────────────────────
|
||||
const camFps = fps[activeId] ?? 0;
|
||||
const quality = camFps >= 13 ? 'FULL' : camFps >= 8 ? 'GOOD' : camFps > 0 ? 'LOW' : 'NO SIGNAL';
|
||||
const qualColor = camFps >= 13 ? 'text-green-500' : camFps >= 8 ? 'text-amber-500' : camFps > 0 ? 'text-red-500' : 'text-gray-700';
|
||||
|
||||
return (
|
||||
<div className="space-y-3">
|
||||
|
||||
{/* ── Camera strip ── */}
|
||||
<div className="bg-gray-950 rounded-lg border border-cyan-950 p-3 space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="text-cyan-700 text-xs font-bold tracking-widest">CAMERA SELECT</div>
|
||||
<span className={`text-xs font-bold ${qualColor}`}>{quality} {camFps > 0 ? `${camFps}fps` : ''}</span>
|
||||
</div>
|
||||
<CameraStrip
|
||||
cameras={cameras}
|
||||
activeId={activeId}
|
||||
pipList={pipList}
|
||||
frames={frames}
|
||||
fps={fps}
|
||||
onSelect={setActiveId}
|
||||
onTogglePip={togglePip}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* ── Main viewer ── */}
|
||||
<div className="bg-gray-950 rounded-lg border border-cyan-950 overflow-hidden">
|
||||
|
||||
{/* Viewer toolbar */}
|
||||
<div className="flex items-center justify-between px-3 py-2 border-b border-cyan-950">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-cyan-400 text-xs font-bold">{activeCam?.label ?? '—'}</span>
|
||||
{activeCam?.isDepth && (
|
||||
<span className="text-xs text-gray-600 border border-gray-800 rounded px-1">DEPTH · greyscale</span>
|
||||
)}
|
||||
{activeCam?.isPanoramic && (
|
||||
<span className="text-xs text-gray-600 border border-gray-800 rounded px-1">360° · drag to pan</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Overlay mode selector */}
|
||||
<div className="flex items-center gap-1">
|
||||
{['off','faces','gestures','objects','all'].map(mode => (
|
||||
<button
|
||||
key={mode}
|
||||
onClick={() => setOverlayMode(mode)}
|
||||
className={`px-2 py-0.5 rounded text-xs border transition-colors ${
|
||||
overlayMode === mode
|
||||
? 'border-cyan-600 bg-cyan-950 text-cyan-400'
|
||||
: 'border-gray-800 text-gray-600 hover:border-gray-700 hover:text-gray-400'
|
||||
}`}
|
||||
>
|
||||
{mode === 'all' ? 'ALL' : mode === 'off' ? 'OFF' : mode.slice(0,3).toUpperCase()}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Image + overlay */}
|
||||
<div className="relative" ref={containerRef}>
|
||||
{activeCam?.isPanoramic ? (
|
||||
<PanoViewer frameUrl={activeFrame} />
|
||||
) : activeFrame ? (
|
||||
<img
|
||||
src={activeFrame}
|
||||
alt={activeCam?.label ?? 'camera'}
|
||||
className="w-full object-contain block bg-black"
|
||||
style={{ maxHeight: '480px' }}
|
||||
/>
|
||||
) : (
|
||||
<div className="w-full bg-black flex items-center justify-center text-gray-800 text-sm font-mono"
|
||||
style={{ height: '360px' }}>
|
||||
<div className="text-center space-y-2">
|
||||
<div className="text-2xl">📷</div>
|
||||
<div>Waiting for {activeCam?.label ?? '—'}…</div>
|
||||
<div className="text-xs text-gray-700">{activeCam?.topic}</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Detection overlay canvas */}
|
||||
{overlayMode !== 'off' && !activeCam?.isPanoramic && (
|
||||
<OverlayCanvas
|
||||
ref={overlayCanvasRef}
|
||||
faces={visibleFaces}
|
||||
gestures={visibleGestures}
|
||||
sceneObjects={visibleObjects}
|
||||
activeCam={activeCam}
|
||||
containerW={containerSize.w}
|
||||
containerH={containerSize.h}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* PiP windows */}
|
||||
{pipList.map((id, idx) => {
|
||||
const cam = CAMERA_BY_ID[id];
|
||||
if (!cam) return null;
|
||||
return (
|
||||
<PiPWindow
|
||||
key={id}
|
||||
cam={cam}
|
||||
frameUrl={frames[id]}
|
||||
fps={fps[id] ?? 0}
|
||||
index={idx}
|
||||
onClose={() => togglePip(id)}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* ── Recording controls ── */}
|
||||
<div className="bg-gray-950 rounded-lg border border-cyan-950 p-3">
|
||||
<div className="flex items-center justify-between mb-2">
|
||||
<div className="text-cyan-700 text-xs font-bold tracking-widest">CAPTURE</div>
|
||||
</div>
|
||||
<RecordingBar
|
||||
recording={recording}
|
||||
recSeconds={recSeconds}
|
||||
onStart={startRecording}
|
||||
onStop={stopRecording}
|
||||
onSnapshot={takeSnapshot}
|
||||
overlayRef={overlayCanvasRef}
|
||||
/>
|
||||
<div className="mt-2 text-xs text-gray-700">
|
||||
Recording saves as MP4/WebM to your Downloads.
|
||||
Snapshot includes detection overlay + timestamp.
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* ── Detection status ── */}
|
||||
<div className="grid grid-cols-3 gap-2 text-xs">
|
||||
<div className="bg-gray-950 rounded border border-gray-800 p-2">
|
||||
<div className="text-gray-600 mb-1">FACES</div>
|
||||
<div className={`font-bold ${faces.length > 0 ? 'text-cyan-400' : 'text-gray-700'}`}>
|
||||
{faces.length > 0 ? `${faces.length} detected` : 'none'}
|
||||
</div>
|
||||
{faces.slice(0, 2).map((f, i) => (
|
||||
<div key={i} className="text-gray-600 truncate">
|
||||
{f.person_name && f.person_name !== 'unknown'
|
||||
? `↳ ${f.person_name}`
|
||||
: `↳ unknown #${f.face_id}`}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="bg-gray-950 rounded border border-gray-800 p-2">
|
||||
<div className="text-gray-600 mb-1">GESTURES</div>
|
||||
<div className={`font-bold ${gestures.length > 0 ? 'text-amber-400' : 'text-gray-700'}`}>
|
||||
{gestures.length > 0 ? `${gestures.length} active` : 'none'}
|
||||
</div>
|
||||
{gestures.slice(0, 2).map((g, i) => {
|
||||
const icon = GESTURE_ICONS[g.gesture_type] ?? '?';
|
||||
return (
|
||||
<div key={i} className="text-gray-600 truncate">
|
||||
{icon} {g.gesture_type} cam{g.camera_id}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
<div className="bg-gray-950 rounded border border-gray-800 p-2">
|
||||
<div className="text-gray-600 mb-1">OBJECTS</div>
|
||||
<div className={`font-bold ${sceneObjects.length > 0 ? 'text-green-400' : 'text-gray-700'}`}>
|
||||
{sceneObjects.length > 0 ? `${sceneObjects.length} objects` : 'none'}
|
||||
</div>
|
||||
{sceneObjects
|
||||
.filter(o => o.hazard_type > 0)
|
||||
.slice(0, 2)
|
||||
.map((o, i) => (
|
||||
<div key={i} className="text-amber-700 truncate">⚠ {o.class_name}</div>
|
||||
))
|
||||
}
|
||||
{sceneObjects.filter(o => o.hazard_type === 0).slice(0, 2).map((o, i) => (
|
||||
<div key={`ok${i}`} className="text-gray-600 truncate">
|
||||
{o.class_name} {o.distance_m > 0 ? `${o.distance_m.toFixed(1)}m` : ''}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* ── Legend ── */}
|
||||
<div className="flex gap-4 text-xs text-gray-700 flex-wrap">
|
||||
<div className="flex items-center gap-1">
|
||||
<div className="w-3 h-3 rounded-sm border border-cyan-600" />
|
||||
Known face
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<div className="w-3 h-3 rounded-sm border border-amber-600" />
|
||||
Unknown face
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<span>👆</span> Gesture
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<div className="w-3 h-3 rounded-sm border border-green-700 border-dashed" />
|
||||
Object
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
<div className="w-3 h-3 rounded-sm border border-amber-600 border-dashed" />
|
||||
Hazard
|
||||
</div>
|
||||
<div className="ml-auto text-gray-800 italic">
|
||||
⊕ pin = PiP · overlay: {overlayMode}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
325
ui/social-bot/src/hooks/useCamera.js
Normal file
325
ui/social-bot/src/hooks/useCamera.js
Normal file
@ -0,0 +1,325 @@
|
||||
/**
|
||||
* useCamera.js — Multi-camera stream manager (Issue #177).
|
||||
*
|
||||
* Subscribes to sensor_msgs/CompressedImage topics via rosbridge.
|
||||
* Decodes base64 JPEG/PNG → data URL for <img>/<canvas> display.
|
||||
* Tracks per-camera FPS. Manages MediaRecorder for recording + snapshots.
|
||||
*
|
||||
* Camera sources:
|
||||
* front / left / rear / right — 4× CSI IMX219, 640×480
|
||||
* topic: /camera/<name>/image_raw/compressed
|
||||
* color — D435i RGB, 640×480
|
||||
* topic: /camera/color/image_raw/compressed
|
||||
* depth — D435i depth, 640×480 greyscale (PNG16)
|
||||
* topic: /camera/depth/image_rect_raw/compressed
|
||||
* panoramic — equirect stitch 1920×960
|
||||
* topic: /camera/panoramic/compressed
|
||||
*/
|
||||
|
||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||
|
||||
// ── Camera catalogue ──────────────────────────────────────────────────────────
|
||||
|
||||
export const CAMERAS = [
|
||||
{
|
||||
id: 'front',
|
||||
label: 'Front',
|
||||
shortLabel: 'F',
|
||||
topic: '/camera/front/image_raw/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: 0, // matches gesture_node camera_id
|
||||
width: 640, height: 480,
|
||||
},
|
||||
{
|
||||
id: 'left',
|
||||
label: 'Left',
|
||||
shortLabel: 'L',
|
||||
topic: '/camera/left/image_raw/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: 1,
|
||||
width: 640, height: 480,
|
||||
},
|
||||
{
|
||||
id: 'rear',
|
||||
label: 'Rear',
|
||||
shortLabel: 'R',
|
||||
topic: '/camera/rear/image_raw/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: 2,
|
||||
width: 640, height: 480,
|
||||
},
|
||||
{
|
||||
id: 'right',
|
||||
label: 'Right',
|
||||
shortLabel: 'Rt',
|
||||
topic: '/camera/right/image_raw/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: 3,
|
||||
width: 640, height: 480,
|
||||
},
|
||||
{
|
||||
id: 'color',
|
||||
label: 'D435i RGB',
|
||||
shortLabel: 'D',
|
||||
topic: '/camera/color/image_raw/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: 4,
|
||||
width: 640, height: 480,
|
||||
},
|
||||
{
|
||||
id: 'depth',
|
||||
label: 'Depth',
|
||||
shortLabel: '≋',
|
||||
topic: '/camera/depth/image_rect_raw/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: 5,
|
||||
width: 640, height: 480,
|
||||
isDepth: true,
|
||||
},
|
||||
{
|
||||
id: 'panoramic',
|
||||
label: 'Panoramic',
|
||||
shortLabel: '360',
|
||||
topic: '/camera/panoramic/compressed',
|
||||
msgType: 'sensor_msgs/CompressedImage',
|
||||
cameraId: -1,
|
||||
width: 1920, height: 960,
|
||||
isPanoramic: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const CAMERA_BY_ID = Object.fromEntries(CAMERAS.map(c => [c.id, c]));
|
||||
export const CAMERA_BY_ROS_ID = Object.fromEntries(
|
||||
CAMERAS.filter(c => c.cameraId >= 0).map(c => [c.cameraId, c])
|
||||
);
|
||||
|
||||
const TARGET_FPS = 15;
|
||||
const FPS_INTERVAL = 1000; // ms between FPS counter resets
|
||||
|
||||
// ── Hook ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
export function useCamera({ subscribe } = {}) {
|
||||
const [frames, setFrames] = useState(() =>
|
||||
Object.fromEntries(CAMERAS.map(c => [c.id, null]))
|
||||
);
|
||||
const [fps, setFps] = useState(() =>
|
||||
Object.fromEntries(CAMERAS.map(c => [c.id, 0]))
|
||||
);
|
||||
const [activeId, setActiveId] = useState('front');
|
||||
const [pipList, setPipList] = useState([]); // up to 3 extra camera ids
|
||||
const [recording, setRecording] = useState(false);
|
||||
const [recSeconds, setRecSeconds] = useState(0);
|
||||
|
||||
// ── Refs (not state — no re-render needed) ─────────────────────────────────
|
||||
const countRef = useRef(Object.fromEntries(CAMERAS.map(c => [c.id, 0])));
|
||||
const mediaRecRef = useRef(null);
|
||||
const chunksRef = useRef([]);
|
||||
const recTimerRef = useRef(null);
|
||||
const recordCanvas = useRef(null); // hidden canvas used for recording
|
||||
const recAnimRef = useRef(null); // rAF handle for record-canvas loop
|
||||
const latestFrameRef = useRef(Object.fromEntries(CAMERAS.map(c => [c.id, null])));
|
||||
const latestTsRef = useRef(Object.fromEntries(CAMERAS.map(c => [c.id, 0])));
|
||||
|
||||
// ── FPS counter ────────────────────────────────────────────────────────────
|
||||
useEffect(() => {
|
||||
const timer = setInterval(() => {
|
||||
setFps({ ...countRef.current });
|
||||
const reset = Object.fromEntries(CAMERAS.map(c => [c.id, 0]));
|
||||
countRef.current = reset;
|
||||
}, FPS_INTERVAL);
|
||||
return () => clearInterval(timer);
|
||||
}, []);
|
||||
|
||||
// ── Subscribe all camera topics ────────────────────────────────────────────
|
||||
useEffect(() => {
|
||||
if (!subscribe) return;
|
||||
|
||||
const unsubs = CAMERAS.map(cam => {
|
||||
let lastTs = 0;
|
||||
const interval = Math.floor(1000 / TARGET_FPS); // client-side 15fps gate
|
||||
|
||||
return subscribe(cam.topic, cam.msgType, (msg) => {
|
||||
const now = Date.now();
|
||||
if (now - lastTs < interval) return; // drop frames > 15fps
|
||||
lastTs = now;
|
||||
|
||||
const fmt = msg.format || 'jpeg';
|
||||
const mime = fmt.includes('png') || fmt.includes('16UC') ? 'image/png' : 'image/jpeg';
|
||||
const dataUrl = `data:${mime};base64,${msg.data}`;
|
||||
|
||||
latestFrameRef.current[cam.id] = dataUrl;
|
||||
latestTsRef.current[cam.id] = now;
|
||||
countRef.current[cam.id] = (countRef.current[cam.id] ?? 0) + 1;
|
||||
|
||||
setFrames(prev => ({ ...prev, [cam.id]: dataUrl }));
|
||||
});
|
||||
});
|
||||
|
||||
return () => unsubs.forEach(fn => fn?.());
|
||||
}, [subscribe]);
|
||||
|
||||
// ── Create hidden record canvas ────────────────────────────────────────────
|
||||
useEffect(() => {
|
||||
const c = document.createElement('canvas');
|
||||
c.width = 640;
|
||||
c.height = 480;
|
||||
c.style.display = 'none';
|
||||
document.body.appendChild(c);
|
||||
recordCanvas.current = c;
|
||||
return () => { c.remove(); };
|
||||
}, []);
|
||||
|
||||
// ── Draw loop for record canvas ────────────────────────────────────────────
|
||||
// Runs at TARGET_FPS when recording — draws active frame to hidden canvas
|
||||
const startRecordLoop = useCallback(() => {
|
||||
const canvas = recordCanvas.current;
|
||||
if (!canvas) return;
|
||||
|
||||
const step = () => {
|
||||
const cam = CAMERA_BY_ID[activeId];
|
||||
const src = latestFrameRef.current[activeId];
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
if (!cam || !src) {
|
||||
recAnimRef.current = requestAnimationFrame(step);
|
||||
return;
|
||||
}
|
||||
|
||||
// Resize canvas to match source
|
||||
if (canvas.width !== cam.width || canvas.height !== cam.height) {
|
||||
canvas.width = cam.width;
|
||||
canvas.height = cam.height;
|
||||
}
|
||||
|
||||
const img = new Image();
|
||||
img.onload = () => {
|
||||
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
|
||||
};
|
||||
img.src = src;
|
||||
|
||||
recAnimRef.current = setTimeout(step, Math.floor(1000 / TARGET_FPS));
|
||||
};
|
||||
|
||||
recAnimRef.current = setTimeout(step, 0);
|
||||
}, [activeId]);
|
||||
|
||||
const stopRecordLoop = useCallback(() => {
|
||||
if (recAnimRef.current) {
|
||||
clearTimeout(recAnimRef.current);
|
||||
cancelAnimationFrame(recAnimRef.current);
|
||||
recAnimRef.current = null;
|
||||
}
|
||||
}, []);
|
||||
|
||||
// ── Recording ──────────────────────────────────────────────────────────────
|
||||
|
||||
const startRecording = useCallback(() => {
|
||||
const canvas = recordCanvas.current;
|
||||
if (!canvas || recording) return;
|
||||
|
||||
startRecordLoop();
|
||||
|
||||
const stream = canvas.captureStream(TARGET_FPS);
|
||||
const mimeType =
|
||||
MediaRecorder.isTypeSupported('video/mp4') ? 'video/mp4' :
|
||||
MediaRecorder.isTypeSupported('video/webm;codecs=vp9') ? 'video/webm;codecs=vp9' :
|
||||
MediaRecorder.isTypeSupported('video/webm;codecs=vp8') ? 'video/webm;codecs=vp8' :
|
||||
'video/webm';
|
||||
|
||||
chunksRef.current = [];
|
||||
const mr = new MediaRecorder(stream, { mimeType, videoBitsPerSecond: 2_500_000 });
|
||||
mr.ondataavailable = e => { if (e.data?.size > 0) chunksRef.current.push(e.data); };
|
||||
mr.start(200);
|
||||
mediaRecRef.current = mr;
|
||||
setRecording(true);
|
||||
setRecSeconds(0);
|
||||
recTimerRef.current = setInterval(() => setRecSeconds(s => s + 1), 1000);
|
||||
}, [recording, startRecordLoop]);
|
||||
|
||||
const stopRecording = useCallback(() => {
|
||||
const mr = mediaRecRef.current;
|
||||
if (!mr || mr.state === 'inactive') return;
|
||||
|
||||
mr.onstop = () => {
|
||||
const ext = mr.mimeType.includes('mp4') ? 'mp4' : 'webm';
|
||||
const blob = new Blob(chunksRef.current, { type: mr.mimeType });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `saltybot-${activeId}-${Date.now()}.${ext}`;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
mr.stop();
|
||||
stopRecordLoop();
|
||||
clearInterval(recTimerRef.current);
|
||||
setRecording(false);
|
||||
}, [activeId, stopRecordLoop]);
|
||||
|
||||
// ── Snapshot ───────────────────────────────────────────────────────────────
|
||||
|
||||
const takeSnapshot = useCallback((overlayCanvasEl) => {
|
||||
const src = latestFrameRef.current[activeId];
|
||||
if (!src) return;
|
||||
|
||||
const cam = CAMERA_BY_ID[activeId];
|
||||
const canvas = document.createElement('canvas');
|
||||
canvas.width = cam.width;
|
||||
canvas.height = cam.height;
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
const img = new Image();
|
||||
img.onload = () => {
|
||||
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
|
||||
|
||||
// Composite detection overlay if provided
|
||||
if (overlayCanvasEl) {
|
||||
ctx.drawImage(overlayCanvasEl, 0, 0, canvas.width, canvas.height);
|
||||
}
|
||||
|
||||
// Timestamp watermark
|
||||
ctx.fillStyle = 'rgba(0,0,0,0.5)';
|
||||
ctx.fillRect(0, canvas.height - 20, canvas.width, 20);
|
||||
ctx.fillStyle = '#06b6d4';
|
||||
ctx.font = '11px monospace';
|
||||
ctx.fillText(`SALTYBOT ${cam.label} ${new Date().toISOString()}`, 8, canvas.height - 6);
|
||||
|
||||
canvas.toBlob(blob => {
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `saltybot-snap-${activeId}-${Date.now()}.png`;
|
||||
a.click();
|
||||
URL.revokeObjectURL(url);
|
||||
}, 'image/png');
|
||||
};
|
||||
img.src = src;
|
||||
}, [activeId]);
|
||||
|
||||
// ── PiP management ─────────────────────────────────────────────────────────
|
||||
|
||||
const togglePip = useCallback(id => {
|
||||
setPipList(prev => {
|
||||
if (prev.includes(id)) return prev.filter(x => x !== id);
|
||||
const next = [...prev, id].filter(x => x !== activeId);
|
||||
return next.slice(-3); // max 3 PIPs
|
||||
});
|
||||
}, [activeId]);
|
||||
|
||||
// Remove PiP if it becomes the active camera
|
||||
useEffect(() => {
|
||||
setPipList(prev => prev.filter(id => id !== activeId));
|
||||
}, [activeId]);
|
||||
|
||||
return {
|
||||
cameras: CAMERAS,
|
||||
frames,
|
||||
fps,
|
||||
activeId, setActiveId,
|
||||
pipList, togglePip,
|
||||
recording, recSeconds,
|
||||
startRecording, stopRecording,
|
||||
takeSnapshot,
|
||||
};
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user