UI (src/hooks/useCamera.js, src/components/CameraViewer.jsx):
- 7 camera sources: front/left/rear/right CSI, D435i RGB/depth, panoramic
- Compressed image subscription via rosbridge (sensor_msgs/CompressedImage)
- Client-side 15fps gate (drops excess frames, reduces JS pressure)
- Per-camera FPS indicator with quality badge (FULL/GOOD/LOW/NO SIGNAL)
- Detection overlays: face boxes + names (/social/faces/detections),
gesture icons (/social/gestures), scene object labels + hazard colours
(/social/scene/objects); overlay mode selector (off/faces/gestures/objects/all)
- 360° panoramic equirect viewer with mouse/touch drag azimuth pan
- Picture-in-picture: up to 3 pinned cameras via ⊕ button
- One-click recording (MediaRecorder → MP4/WebM download)
- Snapshot to PNG with detection overlay composite + timestamp watermark
- Cameras tab added to TELEMETRY group in App.jsx
Jetson (rosbridge bringup):
- rosbridge_params.yaml: whitelist + /camera/depth/image_rect_raw/compressed,
/camera/panoramic/compressed, /social/faces/detections,
/social/gestures, /social/scene/objects
- rosbridge.launch.py: D435i colour republisher (JPEG 75%) +
depth republisher (compressedDepth/PNG16 preserving uint16 values)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
326 lines
12 KiB
JavaScript
326 lines
12 KiB
JavaScript
/**
|
||
* useCamera.js — Multi-camera stream manager (Issue #177).
|
||
*
|
||
* Subscribes to sensor_msgs/CompressedImage topics via rosbridge.
|
||
* Decodes base64 JPEG/PNG → data URL for <img>/<canvas> display.
|
||
* Tracks per-camera FPS. Manages MediaRecorder for recording + snapshots.
|
||
*
|
||
* Camera sources:
|
||
* front / left / rear / right — 4× CSI IMX219, 640×480
|
||
* topic: /camera/<name>/image_raw/compressed
|
||
* color — D435i RGB, 640×480
|
||
* topic: /camera/color/image_raw/compressed
|
||
* depth — D435i depth, 640×480 greyscale (PNG16)
|
||
* topic: /camera/depth/image_rect_raw/compressed
|
||
* panoramic — equirect stitch 1920×960
|
||
* topic: /camera/panoramic/compressed
|
||
*/
|
||
|
||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||
|
||
// ── Camera catalogue ──────────────────────────────────────────────────────────
|
||
|
||
export const CAMERAS = [
|
||
{
|
||
id: 'front',
|
||
label: 'Front',
|
||
shortLabel: 'F',
|
||
topic: '/camera/front/image_raw/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: 0, // matches gesture_node camera_id
|
||
width: 640, height: 480,
|
||
},
|
||
{
|
||
id: 'left',
|
||
label: 'Left',
|
||
shortLabel: 'L',
|
||
topic: '/camera/left/image_raw/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: 1,
|
||
width: 640, height: 480,
|
||
},
|
||
{
|
||
id: 'rear',
|
||
label: 'Rear',
|
||
shortLabel: 'R',
|
||
topic: '/camera/rear/image_raw/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: 2,
|
||
width: 640, height: 480,
|
||
},
|
||
{
|
||
id: 'right',
|
||
label: 'Right',
|
||
shortLabel: 'Rt',
|
||
topic: '/camera/right/image_raw/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: 3,
|
||
width: 640, height: 480,
|
||
},
|
||
{
|
||
id: 'color',
|
||
label: 'D435i RGB',
|
||
shortLabel: 'D',
|
||
topic: '/camera/color/image_raw/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: 4,
|
||
width: 640, height: 480,
|
||
},
|
||
{
|
||
id: 'depth',
|
||
label: 'Depth',
|
||
shortLabel: '≋',
|
||
topic: '/camera/depth/image_rect_raw/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: 5,
|
||
width: 640, height: 480,
|
||
isDepth: true,
|
||
},
|
||
{
|
||
id: 'panoramic',
|
||
label: 'Panoramic',
|
||
shortLabel: '360',
|
||
topic: '/camera/panoramic/compressed',
|
||
msgType: 'sensor_msgs/CompressedImage',
|
||
cameraId: -1,
|
||
width: 1920, height: 960,
|
||
isPanoramic: true,
|
||
},
|
||
];
|
||
|
||
export const CAMERA_BY_ID = Object.fromEntries(CAMERAS.map(c => [c.id, c]));
|
||
export const CAMERA_BY_ROS_ID = Object.fromEntries(
|
||
CAMERAS.filter(c => c.cameraId >= 0).map(c => [c.cameraId, c])
|
||
);
|
||
|
||
const TARGET_FPS = 15;
|
||
const FPS_INTERVAL = 1000; // ms between FPS counter resets
|
||
|
||
// ── Hook ──────────────────────────────────────────────────────────────────────
|
||
|
||
export function useCamera({ subscribe } = {}) {
|
||
const [frames, setFrames] = useState(() =>
|
||
Object.fromEntries(CAMERAS.map(c => [c.id, null]))
|
||
);
|
||
const [fps, setFps] = useState(() =>
|
||
Object.fromEntries(CAMERAS.map(c => [c.id, 0]))
|
||
);
|
||
const [activeId, setActiveId] = useState('front');
|
||
const [pipList, setPipList] = useState([]); // up to 3 extra camera ids
|
||
const [recording, setRecording] = useState(false);
|
||
const [recSeconds, setRecSeconds] = useState(0);
|
||
|
||
// ── Refs (not state — no re-render needed) ─────────────────────────────────
|
||
const countRef = useRef(Object.fromEntries(CAMERAS.map(c => [c.id, 0])));
|
||
const mediaRecRef = useRef(null);
|
||
const chunksRef = useRef([]);
|
||
const recTimerRef = useRef(null);
|
||
const recordCanvas = useRef(null); // hidden canvas used for recording
|
||
const recAnimRef = useRef(null); // rAF handle for record-canvas loop
|
||
const latestFrameRef = useRef(Object.fromEntries(CAMERAS.map(c => [c.id, null])));
|
||
const latestTsRef = useRef(Object.fromEntries(CAMERAS.map(c => [c.id, 0])));
|
||
|
||
// ── FPS counter ────────────────────────────────────────────────────────────
|
||
useEffect(() => {
|
||
const timer = setInterval(() => {
|
||
setFps({ ...countRef.current });
|
||
const reset = Object.fromEntries(CAMERAS.map(c => [c.id, 0]));
|
||
countRef.current = reset;
|
||
}, FPS_INTERVAL);
|
||
return () => clearInterval(timer);
|
||
}, []);
|
||
|
||
// ── Subscribe all camera topics ────────────────────────────────────────────
|
||
useEffect(() => {
|
||
if (!subscribe) return;
|
||
|
||
const unsubs = CAMERAS.map(cam => {
|
||
let lastTs = 0;
|
||
const interval = Math.floor(1000 / TARGET_FPS); // client-side 15fps gate
|
||
|
||
return subscribe(cam.topic, cam.msgType, (msg) => {
|
||
const now = Date.now();
|
||
if (now - lastTs < interval) return; // drop frames > 15fps
|
||
lastTs = now;
|
||
|
||
const fmt = msg.format || 'jpeg';
|
||
const mime = fmt.includes('png') || fmt.includes('16UC') ? 'image/png' : 'image/jpeg';
|
||
const dataUrl = `data:${mime};base64,${msg.data}`;
|
||
|
||
latestFrameRef.current[cam.id] = dataUrl;
|
||
latestTsRef.current[cam.id] = now;
|
||
countRef.current[cam.id] = (countRef.current[cam.id] ?? 0) + 1;
|
||
|
||
setFrames(prev => ({ ...prev, [cam.id]: dataUrl }));
|
||
});
|
||
});
|
||
|
||
return () => unsubs.forEach(fn => fn?.());
|
||
}, [subscribe]);
|
||
|
||
// ── Create hidden record canvas ────────────────────────────────────────────
|
||
useEffect(() => {
|
||
const c = document.createElement('canvas');
|
||
c.width = 640;
|
||
c.height = 480;
|
||
c.style.display = 'none';
|
||
document.body.appendChild(c);
|
||
recordCanvas.current = c;
|
||
return () => { c.remove(); };
|
||
}, []);
|
||
|
||
// ── Draw loop for record canvas ────────────────────────────────────────────
|
||
// Runs at TARGET_FPS when recording — draws active frame to hidden canvas
|
||
const startRecordLoop = useCallback(() => {
|
||
const canvas = recordCanvas.current;
|
||
if (!canvas) return;
|
||
|
||
const step = () => {
|
||
const cam = CAMERA_BY_ID[activeId];
|
||
const src = latestFrameRef.current[activeId];
|
||
const ctx = canvas.getContext('2d');
|
||
|
||
if (!cam || !src) {
|
||
recAnimRef.current = requestAnimationFrame(step);
|
||
return;
|
||
}
|
||
|
||
// Resize canvas to match source
|
||
if (canvas.width !== cam.width || canvas.height !== cam.height) {
|
||
canvas.width = cam.width;
|
||
canvas.height = cam.height;
|
||
}
|
||
|
||
const img = new Image();
|
||
img.onload = () => {
|
||
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
|
||
};
|
||
img.src = src;
|
||
|
||
recAnimRef.current = setTimeout(step, Math.floor(1000 / TARGET_FPS));
|
||
};
|
||
|
||
recAnimRef.current = setTimeout(step, 0);
|
||
}, [activeId]);
|
||
|
||
const stopRecordLoop = useCallback(() => {
|
||
if (recAnimRef.current) {
|
||
clearTimeout(recAnimRef.current);
|
||
cancelAnimationFrame(recAnimRef.current);
|
||
recAnimRef.current = null;
|
||
}
|
||
}, []);
|
||
|
||
// ── Recording ──────────────────────────────────────────────────────────────
|
||
|
||
const startRecording = useCallback(() => {
|
||
const canvas = recordCanvas.current;
|
||
if (!canvas || recording) return;
|
||
|
||
startRecordLoop();
|
||
|
||
const stream = canvas.captureStream(TARGET_FPS);
|
||
const mimeType =
|
||
MediaRecorder.isTypeSupported('video/mp4') ? 'video/mp4' :
|
||
MediaRecorder.isTypeSupported('video/webm;codecs=vp9') ? 'video/webm;codecs=vp9' :
|
||
MediaRecorder.isTypeSupported('video/webm;codecs=vp8') ? 'video/webm;codecs=vp8' :
|
||
'video/webm';
|
||
|
||
chunksRef.current = [];
|
||
const mr = new MediaRecorder(stream, { mimeType, videoBitsPerSecond: 2_500_000 });
|
||
mr.ondataavailable = e => { if (e.data?.size > 0) chunksRef.current.push(e.data); };
|
||
mr.start(200);
|
||
mediaRecRef.current = mr;
|
||
setRecording(true);
|
||
setRecSeconds(0);
|
||
recTimerRef.current = setInterval(() => setRecSeconds(s => s + 1), 1000);
|
||
}, [recording, startRecordLoop]);
|
||
|
||
const stopRecording = useCallback(() => {
|
||
const mr = mediaRecRef.current;
|
||
if (!mr || mr.state === 'inactive') return;
|
||
|
||
mr.onstop = () => {
|
||
const ext = mr.mimeType.includes('mp4') ? 'mp4' : 'webm';
|
||
const blob = new Blob(chunksRef.current, { type: mr.mimeType });
|
||
const url = URL.createObjectURL(blob);
|
||
const a = document.createElement('a');
|
||
a.href = url;
|
||
a.download = `saltybot-${activeId}-${Date.now()}.${ext}`;
|
||
a.click();
|
||
URL.revokeObjectURL(url);
|
||
};
|
||
|
||
mr.stop();
|
||
stopRecordLoop();
|
||
clearInterval(recTimerRef.current);
|
||
setRecording(false);
|
||
}, [activeId, stopRecordLoop]);
|
||
|
||
// ── Snapshot ───────────────────────────────────────────────────────────────
|
||
|
||
const takeSnapshot = useCallback((overlayCanvasEl) => {
|
||
const src = latestFrameRef.current[activeId];
|
||
if (!src) return;
|
||
|
||
const cam = CAMERA_BY_ID[activeId];
|
||
const canvas = document.createElement('canvas');
|
||
canvas.width = cam.width;
|
||
canvas.height = cam.height;
|
||
const ctx = canvas.getContext('2d');
|
||
|
||
const img = new Image();
|
||
img.onload = () => {
|
||
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
|
||
|
||
// Composite detection overlay if provided
|
||
if (overlayCanvasEl) {
|
||
ctx.drawImage(overlayCanvasEl, 0, 0, canvas.width, canvas.height);
|
||
}
|
||
|
||
// Timestamp watermark
|
||
ctx.fillStyle = 'rgba(0,0,0,0.5)';
|
||
ctx.fillRect(0, canvas.height - 20, canvas.width, 20);
|
||
ctx.fillStyle = '#06b6d4';
|
||
ctx.font = '11px monospace';
|
||
ctx.fillText(`SALTYBOT ${cam.label} ${new Date().toISOString()}`, 8, canvas.height - 6);
|
||
|
||
canvas.toBlob(blob => {
|
||
const url = URL.createObjectURL(blob);
|
||
const a = document.createElement('a');
|
||
a.href = url;
|
||
a.download = `saltybot-snap-${activeId}-${Date.now()}.png`;
|
||
a.click();
|
||
URL.revokeObjectURL(url);
|
||
}, 'image/png');
|
||
};
|
||
img.src = src;
|
||
}, [activeId]);
|
||
|
||
// ── PiP management ─────────────────────────────────────────────────────────
|
||
|
||
const togglePip = useCallback(id => {
|
||
setPipList(prev => {
|
||
if (prev.includes(id)) return prev.filter(x => x !== id);
|
||
const next = [...prev, id].filter(x => x !== activeId);
|
||
return next.slice(-3); // max 3 PIPs
|
||
});
|
||
}, [activeId]);
|
||
|
||
// Remove PiP if it becomes the active camera
|
||
useEffect(() => {
|
||
setPipList(prev => prev.filter(id => id !== activeId));
|
||
}, [activeId]);
|
||
|
||
return {
|
||
cameras: CAMERAS,
|
||
frames,
|
||
fps,
|
||
activeId, setActiveId,
|
||
pipList, togglePip,
|
||
recording, recSeconds,
|
||
startRecording, stopRecording,
|
||
takeSnapshot,
|
||
};
|
||
}
|