sl-controls 5604670646 feat: Implement deaf/accessibility mode with STT, touch keyboard, TTS (Issue #371)
Accessibility mode for hearing-impaired users:
- Speech-to-text display: Integrates with saltybot_social speech_pipeline_node
- Touch keyboard overlay: 1024x600 optimized for MageDok 7in display
- TTS output: Routes to MageDok speakers via PulseAudio
- Web UI server: Responsive keyboard interface with real-time display updates
- Auto-confirm: Optional TTS feedback for spoken input
- Physical keyboard support: Both touch and physical input methods

Features:
- Keyboard buffer with backspace/clear/send controls
- Transcript history display (max 10 entries)
- Status indicators for STT/TTS ready state
- Number/symbol support (1-5, punctuation)
- HTML/CSS responsive design optimized for touch
- ROS2 integration via /social/speech/transcript and /social/conversation/request

Launch: ros2 launch saltybot_accessibility_mode accessibility_mode.launch.py
UI Port: 8080 (MageDok display access)
Config: config/accessibility_params.yaml

Co-Authored-By: Claude Haiku 4.5 <noreply@anthropic.com>
2026-03-03 18:17:41 -05:00

174 lines
5.6 KiB
JavaScript

/**
* Accessibility Mode UI - Touch Keyboard & STT Display
*/
class AccessibilityUI {
constructor() {
this.keyboardBuffer = '';
this.displayData = { history: [], keyboard_buffer: '' };
this.wsConnected = false;
this.initializeElements();
this.attachEventListeners();
this.startPolling();
}
initializeElements() {
this.keyboardBufferEl = document.getElementById('keyboard-buffer');
this.transcriptDisplay = document.getElementById('transcript-display');
this.transcriptHistoryEl = null;
this.sttStatusEl = document.getElementById('stt-status');
this.ttsStatusEl = document.getElementById('tts-status');
this.modeIndicatorEl = document.getElementById('mode-indicator');
// Keyboard buttons
this.keyButtons = document.querySelectorAll('.key[data-char]');
this.backspaceBtn = document.getElementById('backspace-btn');
this.spaceBtn = document.getElementById('space-btn');
this.clearBtn = document.getElementById('clear-btn');
this.sendBtn = document.getElementById('send-btn');
}
attachEventListeners() {
// Character keys
this.keyButtons.forEach(btn => {
btn.addEventListener('click', () => this.inputCharacter(btn.dataset.char));
btn.addEventListener('touch start', (e) => e.preventDefault());
});
// Special keys
this.backspaceBtn.addEventListener('click', () => this.backspace());
this.spaceBtn.addEventListener('click', () => this.inputCharacter(' '));
this.clearBtn.addEventListener('click', () => this.clearBuffer());
this.sendBtn.addEventListener('click', () => this.sendInput());
// Physical keyboard support
document.addEventListener('keydown', (e) => this.handlePhysicalKey(e));
}
inputCharacter(char) {
this.keyboardBuffer += char.toUpperCase();
this.updateDisplay();
this.sendToROS('', false); // Update display on ROS
}
backspace() {
this.keyboardBuffer = this.keyboardBuffer.slice(0, -1);
this.updateDisplay();
this.sendToROS('', false);
}
clearBuffer() {
this.keyboardBuffer = '';
this.updateDisplay();
this.sendToROS('[CLEAR]', false);
}
sendInput() {
if (this.keyboardBuffer.trim()) {
this.sendToROS('[SEND]', true);
this.keyboardBuffer = '';
this.updateDisplay();
}
}
handlePhysicalKey(e) {
if (e.target !== document.body) return;
const char = e.key.toUpperCase();
if (e.key === 'Backspace') {
e.preventDefault();
this.backspace();
} else if (e.key === 'Enter') {
e.preventDefault();
this.sendInput();
} else if (char.match(/^[A-Z0-9 .,!?]$/)) {
this.inputCharacter(char);
}
}
updateDisplay() {
this.keyboardBufferEl.textContent = this.keyboardBuffer || '(empty)';
this.renderTranscriptHistory();
}
renderTranscriptHistory() {
if (!this.displayData.history) return;
let html = '';
this.displayData.history.forEach(entry => {
const cls = entry.type === 'stt' ? 'transcript-stt' : 'transcript-keyboard';
const icon = entry.type === 'stt' ? '🎤' : '⌨️';
const text = entry.text || '';
html += `<div class="transcript-entry ${cls}">${icon} ${this.escapeHtml(text)}</div>`;
});
if (!html) {
html = '<div class="placeholder">Waiting for speech...</div>';
}
this.transcriptDisplay.innerHTML = html;
// Auto-scroll to bottom
this.transcriptDisplay.scrollTop = this.transcriptDisplay.scrollHeight;
}
sendToROS(command, isFinal) {
// This will be called when we have a WebSocket connection to the ROS2 bridge
const data = {
command: command,
buffer: this.keyboardBuffer,
timestamp: Date.now(),
final: isFinal
};
console.log('Sending to ROS:', data);
// Send via fetch API to UI server
fetch('/api/keyboard', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data)
}).catch(err => console.error('ROS send error:', err));
}
startPolling() {
// Poll for display updates from ROS2
setInterval(() => this.pollDisplayUpdate(), 500);
}
pollDisplayUpdate() {
fetch('/api/display')
.then(res => res.json())
.then(data => {
if (data && data.history) {
this.displayData = data;
this.renderTranscriptHistory();
this.updateStatusIndicators();
}
})
.catch(err => console.error('Display poll error:', err));
}
updateStatusIndicators() {
// Update STT/TTS status based on display data
if (this.displayData.history && this.displayData.history.length > 0) {
const lastEntry = this.displayData.history[this.displayData.history.length - 1];
if (lastEntry.type === 'stt') {
this.sttStatusEl.textContent = lastEntry.final ? 'Complete' : 'Listening...';
}
}
}
escapeHtml(text) {
const div = document.createElement('div');
div.textContent = text;
return div.innerHTML;
}
}
// Initialize when DOM is ready
document.addEventListener('DOMContentLoaded', () => {
window.accessibilityUI = new AccessibilityUI();
});