diff --git a/include/bno055.h b/include/bno055.h index e0cae23..4ed18a3 100644 --- a/include/bno055.h +++ b/include/bno055.h @@ -97,3 +97,9 @@ bool bno055_save_offsets(void); bool bno055_restore_offsets(void); #endif /* BNO055_H */ + +/* + * bno055_calibrated() — check if IMU is sufficiently calibrated. + * Returns true if all sensors have calibration level >= 1. + */ +bool bno055_calibrated(void); diff --git a/include/crsf.h b/include/crsf.h index d333925..dbc7af2 100644 --- a/include/crsf.h +++ b/include/crsf.h @@ -67,3 +67,9 @@ void crsf_send_flight_mode(bool armed); extern volatile CRSFState crsf_state; #endif /* CRSF_H */ + +/* + * crsf_is_active(now_ms) — check if CRSF link is active. + * Returns true if a valid frame was received within the last 100ms. + */ +bool crsf_is_active(uint32_t now_ms); diff --git a/include/i2c1.h b/include/i2c1.h index c4c7859..4fb3b3e 100644 --- a/include/i2c1.h +++ b/include/i2c1.h @@ -15,3 +15,6 @@ extern I2C_HandleTypeDef hi2c1; int i2c1_init(void); #endif /* I2C1_H */ + +int i2c1_write(uint16_t addr, const uint8_t *data, uint16_t len); +int i2c1_read(uint16_t addr, uint8_t *data, uint16_t len); diff --git a/jetson/ros2_ws/src/saltybot_bringup/saltybot_bringup/_path_edges.py b/jetson/ros2_ws/src/saltybot_bringup/saltybot_bringup/_path_edges.py new file mode 100644 index 0000000..6fa0fd8 --- /dev/null +++ b/jetson/ros2_ws/src/saltybot_bringup/saltybot_bringup/_path_edges.py @@ -0,0 +1,383 @@ +""" +_path_edges.py — Lane/path edge detection via Canny + Hough + bird-eye +perspective transform (no ROS2 deps). + +Algorithm +--------- +1. Crop the bottom `roi_frac` of the BGR image to an ROI. +2. Convert ROI to grayscale → Gaussian blur → Canny edge map. +3. Run probabilistic Hough (HoughLinesP) on the edge map to find + line segments. +4. Filter segments by slope: near-horizontal lines (|slope| < min_slope) + are discarded as ground noise; the remaining lines are classified as + left edges (negative slope) or right edges (positive slope). +5. Average each class into a single dominant edge line and extrapolate it + to span the full ROI height. +6. Apply a bird-eye perspective homography (computed from a configurable + source trapezoid in the ROI) to warp all segment endpoints to a + top-down view. +7. Return a PathEdgesResult with all data. + +Coordinate conventions +----------------------- +All pixel coordinates in PathEdgesResult are in the ROI frame: + origin = top-left of the bottom-half ROI + y = 0 → roi_top of the full image + y increases downward; x increases rightward. + +Bird-eye homography +------------------- +The homography H is computed via cv2.getPerspectiveTransform from four +source points (fractional ROI coords) to four destination points in a +square output image of size `birdseye_size`. Default source trapezoid +assumes a forward-looking camera at ~35–45° tilt above a flat surface. + +Public API +---------- + PathEdgeConfig dataclass — all tunable parameters with sensible defaults + PathEdgesResult NamedTuple — all outputs of process_frame() + build_homography(src_frac, dst_pts, roi_w, roi_h, birdseye_size) + → (H, H_inv) 3×3 float64 homography pair + apply_homography(H, points_xy) → np.ndarray (N, 2) warped points + canny_edges(bgr_roi, low, high, ksize) → uint8 edge map + hough_lines(edge_map, threshold, min_len, max_gap) → List[(x1,y1,x2,y2)] + classify_lines(lines, min_slope) → (left, right) + average_line(lines, roi_height) → Optional[(x1,y1,x2,y2)] + warp_segments(lines, H) → List[(bx1,by1,bx2,by2)] + process_frame(bgr, cfg) → PathEdgesResult +""" + +from __future__ import annotations + +import math +from dataclasses import dataclass, field +from typing import List, NamedTuple, Optional, Tuple + +import cv2 +import numpy as np + + +# ── Config ──────────────────────────────────────────────────────────────────── + +@dataclass +class PathEdgeConfig: + # ROI + roi_frac: float = 0.50 # bottom fraction of image used as ROI + + # Preprocessing + blur_ksize: int = 5 # Gaussian blur kernel (odd integer) + canny_low: int = 50 # Canny low threshold + canny_high: int = 150 # Canny high threshold + + # Hough + hough_rho: float = 1.0 # distance resolution (px) + hough_theta: float = math.pi / 180.0 # angle resolution (rad) + hough_threshold: int = 30 # minimum votes + min_line_len: int = 40 # minimum segment length (px) + max_line_gap: int = 20 # maximum gap within a segment (px) + + # Line classification + min_slope: float = 0.3 # |slope| below this → discard (near-horizontal) + + # Bird-eye perspective — source trapezoid as fractions of (roi_w, roi_h) + # Default: wide trapezoid for forward-looking camera at ~40° tilt + birdseye_src: List[List[float]] = field(default_factory=lambda: [ + [0.40, 0.05], # top-left (near horizon) + [0.60, 0.05], # top-right (near horizon) + [0.95, 0.95], # bottom-right (near robot) + [0.05, 0.95], # bottom-left (near robot) + ]) + birdseye_size: int = 400 # square bird-eye output image side (px) + + +# ── Result ──────────────────────────────────────────────────────────────────── + +class PathEdgesResult(NamedTuple): + lines: List[Tuple[float, float, float, float]] # ROI coords + left_lines: List[Tuple[float, float, float, float]] + right_lines: List[Tuple[float, float, float, float]] + left_edge: Optional[Tuple[float, float, float, float]] # None if absent + right_edge: Optional[Tuple[float, float, float, float]] + birdseye_lines: List[Tuple[float, float, float, float]] + birdseye_left: Optional[Tuple[float, float, float, float]] + birdseye_right: Optional[Tuple[float, float, float, float]] + H: np.ndarray # 3×3 homography (ROI → bird-eye) + roi_top: int # y-offset of ROI in the full image + + +# ── Homography ──────────────────────────────────────────────────────────────── + +def build_homography( + src_frac: List[List[float]], + roi_w: int, + roi_h: int, + birdseye_size: int, +) -> np.ndarray: + """ + Compute a perspective homography from ROI pixel coords to a square + bird-eye image. + + Parameters + ---------- + src_frac : four [fx, fy] pairs as fractions of (roi_w, roi_h) + roi_w, roi_h : ROI dimensions in pixels + birdseye_size : side length of the square output image + + Returns + ------- + H : (3, 3) float64 homography matrix; maps ROI px → bird-eye px. + """ + src = np.array( + [[fx * roi_w, fy * roi_h] for fx, fy in src_frac], + dtype=np.float32, + ) + S = float(birdseye_size) + dst = np.array([ + [S * 0.25, 0.0], + [S * 0.75, 0.0], + [S * 0.75, S], + [S * 0.25, S], + ], dtype=np.float32) + + H = cv2.getPerspectiveTransform(src, dst) + return H.astype(np.float64) + + +def apply_homography(H: np.ndarray, points_xy: np.ndarray) -> np.ndarray: + """ + Apply a 3×3 homography to an (N, 2) float array of 2-D points. + + Returns + ------- + (N, 2) float32 array of transformed points. + """ + if len(points_xy) == 0: + return np.empty((0, 2), dtype=np.float32) + + pts = np.column_stack([points_xy, np.ones(len(points_xy))]) # (N, 3) + warped = (H @ pts.T).T # (N, 3) + warped /= warped[:, 2:3] # homogenise + return warped[:, :2].astype(np.float32) + + +# ── Edge detection ──────────────────────────────────────────────────────────── + +def canny_edges(bgr_roi: np.ndarray, low: int, high: int, ksize: int) -> np.ndarray: + """ + Convert a BGR ROI to a Canny edge map. + + Parameters + ---------- + bgr_roi : uint8 BGR image (the bottom-half ROI) + low : Canny lower threshold + high : Canny upper threshold + ksize : Gaussian blur kernel size (must be odd ≥ 1) + + Returns + ------- + uint8 binary edge map (0 or 255), same spatial size as bgr_roi. + """ + grey = cv2.cvtColor(bgr_roi, cv2.COLOR_BGR2GRAY) + if ksize >= 3 and ksize % 2 == 1: + grey = cv2.GaussianBlur(grey, (ksize, ksize), 0) + return cv2.Canny(grey, low, high) + + +# ── Hough lines ─────────────────────────────────────────────────────────────── + +def hough_lines( + edge_map: np.ndarray, + threshold: int = 30, + min_len: int = 40, + max_gap: int = 20, + rho: float = 1.0, + theta: float = math.pi / 180.0, +) -> List[Tuple[float, float, float, float]]: + """ + Run probabilistic Hough on an edge map. + + Returns + ------- + List of (x1, y1, x2, y2) tuples in the edge_map pixel frame. + Empty list when no lines are found. + """ + raw = cv2.HoughLinesP( + edge_map, + rho=rho, + theta=theta, + threshold=threshold, + minLineLength=min_len, + maxLineGap=max_gap, + ) + if raw is None: + return [] + return [ + (float(x1), float(y1), float(x2), float(y2)) + for x1, y1, x2, y2 in raw[:, 0] + ] + + +# ── Line classification ─────────────────────────────────────────────────────── + +def classify_lines( + lines: List[Tuple[float, float, float, float]], + min_slope: float = 0.3, +) -> Tuple[List, List]: + """ + Classify Hough segments into left-edge and right-edge candidates. + + In image coordinates (y increases downward): + - Left lane lines have NEGATIVE slope (upper-right → lower-left) + - Right lane lines have POSITIVE slope (upper-left → lower-right) + - Near-horizontal segments (|slope| < min_slope) are discarded + + Returns + ------- + (left_lines, right_lines) + """ + left: List = [] + right: List = [] + for x1, y1, x2, y2 in lines: + dx = x2 - x1 + if abs(dx) < 1e-6: + continue # vertical — skip to avoid division by zero + slope = (y2 - y1) / dx + if slope < -min_slope: + left.append((x1, y1, x2, y2)) + elif slope > min_slope: + right.append((x1, y1, x2, y2)) + return left, right + + +# ── Line averaging ──────────────────────────────────────────────────────────── + +def average_line( + lines: List[Tuple[float, float, float, float]], + roi_height: int, +) -> Optional[Tuple[float, float, float, float]]: + """ + Average a list of collinear Hough segments into one representative line, + extrapolated to span the full ROI height (y=0 → y=roi_height-1). + + Returns None if the list is empty or the averaged slope is near-zero. + """ + if not lines: + return None + + slopes: List[float] = [] + intercepts: List[float] = [] + for x1, y1, x2, y2 in lines: + dx = x2 - x1 + if abs(dx) < 1e-6: + continue + m = (y2 - y1) / dx + b = y1 - m * x1 + slopes.append(m) + intercepts.append(b) + + if not slopes: + return None + + m_avg = float(np.mean(slopes)) + b_avg = float(np.mean(intercepts)) + if abs(m_avg) < 1e-6: + return None + + y_bot = float(roi_height - 1) + y_top = 0.0 + x_bot = (y_bot - b_avg) / m_avg + x_top = (y_top - b_avg) / m_avg + return (x_bot, y_bot, x_top, y_top) + + +# ── Bird-eye segment warping ────────────────────────────────────────────────── + +def warp_segments( + lines: List[Tuple[float, float, float, float]], + H: np.ndarray, +) -> List[Tuple[float, float, float, float]]: + """ + Warp a list of line-segment endpoints through the homography H. + + Returns + ------- + List of (bx1, by1, bx2, by2) in bird-eye pixel coordinates. + """ + if not lines: + return [] + pts = np.array( + [[x1, y1] for x1, y1, _, _ in lines] + + [[x2, y2] for _, _, x2, y2 in lines], + dtype=np.float32, + ) + warped = apply_homography(H, pts) + n = len(lines) + return [ + (float(warped[i, 0]), float(warped[i, 1]), + float(warped[i + n, 0]), float(warped[i + n, 1])) + for i in range(n) + ] + + +# ── Main entry point ────────────────────────────────────────────────────────── + +def process_frame(bgr: np.ndarray, cfg: PathEdgeConfig) -> PathEdgesResult: + """ + Run the full lane/path edge detection pipeline on one BGR frame. + + Parameters + ---------- + bgr : uint8 BGR image (H × W × 3) + cfg : PathEdgeConfig + + Returns + ------- + PathEdgesResult with all detected edges in ROI + bird-eye coordinates. + """ + h, w = bgr.shape[:2] + roi_top = int(h * (1.0 - cfg.roi_frac)) + roi = bgr[roi_top:, :] + roi_h, roi_w = roi.shape[:2] + + # Build perspective homography (ROI px → bird-eye px) + H = build_homography(cfg.birdseye_src, roi_w, roi_h, cfg.birdseye_size) + + # Edge detection + edges = canny_edges(roi, cfg.canny_low, cfg.canny_high, cfg.blur_ksize) + + # Hough line segments (in ROI coords) + lines = hough_lines( + edges, + threshold = cfg.hough_threshold, + min_len = cfg.min_line_len, + max_gap = cfg.max_line_gap, + rho = cfg.hough_rho, + theta = cfg.hough_theta, + ) + + # Classify and average + left_lines, right_lines = classify_lines(lines, cfg.min_slope) + left_edge = average_line(left_lines, roi_h) + right_edge = average_line(right_lines, roi_h) + + # Warp all segments to bird-eye + birdseye_lines = warp_segments(lines, H) + birdseye_left = ( + (warp_segments([left_edge], H)[0] if left_edge else None) + ) + birdseye_right = ( + (warp_segments([right_edge], H)[0] if right_edge else None) + ) + + return PathEdgesResult( + lines = lines, + left_lines = left_lines, + right_lines = right_lines, + left_edge = left_edge, + right_edge = right_edge, + birdseye_lines = birdseye_lines, + birdseye_left = birdseye_left, + birdseye_right = birdseye_right, + H = H, + roi_top = roi_top, + ) diff --git a/jetson/ros2_ws/src/saltybot_scene_msgs/CMakeLists.txt b/jetson/ros2_ws/src/saltybot_scene_msgs/CMakeLists.txt index 180bf09..9db8a9a 100644 --- a/jetson/ros2_ws/src/saltybot_scene_msgs/CMakeLists.txt +++ b/jetson/ros2_ws/src/saltybot_scene_msgs/CMakeLists.txt @@ -22,6 +22,11 @@ rosidl_generate_interfaces(${PROJECT_NAME} # Issue #322 — cross-camera person re-identification "msg/PersonTrack.msg" "msg/PersonTrackArray.msg" + # Issue #326 — dynamic obstacle velocity estimator + "msg/ObstacleVelocity.msg" + "msg/ObstacleVelocityArray.msg" + # Issue #339 — lane/path edge detector + "msg/PathEdges.msg" DEPENDENCIES std_msgs geometry_msgs vision_msgs builtin_interfaces ) diff --git a/src/battery.c b/src/battery.c index f50b397..d4143b4 100644 --- a/src/battery.c +++ b/src/battery.c @@ -11,6 +11,7 @@ #include "battery.h" #include "config.h" #include "stm32f7xx_hal.h" +#include static ADC_HandleTypeDef s_hadc; static bool s_ready = false; diff --git a/src/bno055.c b/src/bno055.c index a6a3d52..207004b 100644 --- a/src/bno055.c +++ b/src/bno055.c @@ -314,3 +314,15 @@ int8_t bno055_temperature(void) { return s_temp; } + +/* ---- bno055_calibrated() ---- */ +bool bno055_calibrated(void) +{ + if (!s_present) return false; + /* Check if all sensors have at least calibration level 1 */ + uint8_t sys_cal = (s_calib & BNO055_CAL_SYS_MASK) >> 6; + uint8_t acc_cal = (s_calib & BNO055_CAL_ACC_MASK) >> 2; + uint8_t gyr_cal = (s_calib & BNO055_CAL_GYR_MASK) >> 4; + uint8_t mag_cal = (s_calib & BNO055_CAL_MAG_MASK); + return (sys_cal >= 1u) && (acc_cal >= 1u) && (gyr_cal >= 1u) && (mag_cal >= 1u); +} diff --git a/src/crsf.c b/src/crsf.c index 55c10ae..1531cf0 100644 --- a/src/crsf.c +++ b/src/crsf.c @@ -352,3 +352,13 @@ void crsf_send_flight_mode(bool armed) { uint8_t flen = crsf_build_frame(frame, 0x21u, (const uint8_t *)text, plen); if (flen) HAL_UART_Transmit(&s_uart, frame, flen, 5u); } + +/* + * crsf_is_active() — check if CRSF link is active. + * Returns true if a valid frame was received within the last 100ms. + */ +bool crsf_is_active(uint32_t now_ms) { + if (crsf_state.last_rx_ms == 0u) return false; /* No frame received yet */ + uint32_t elapsed = now_ms - crsf_state.last_rx_ms; + return (elapsed < 100u); /* Active if frame within last 100ms */ +} diff --git a/src/fan.c b/src/fan.c index 33e15d3..0eeb13f 100644 --- a/src/fan.c +++ b/src/fan.c @@ -89,7 +89,7 @@ void fan_init(void) HAL_TIM_PWM_ConfigChannel(&htim1, &oc_init, FAN_TIM_CHANNEL); /* Start PWM generation */ - HAL_TIM_PWM_Start(FAN_TIM, FAN_TIM_CHANNEL); + HAL_TIM_PWM_Start(&htim1, FAN_TIM_CHANNEL); s_fan.current_speed = 0; s_fan.target_speed = 0; diff --git a/src/i2c1.c b/src/i2c1.c index 8e78273..24ea173 100644 --- a/src/i2c1.c +++ b/src/i2c1.c @@ -31,3 +31,17 @@ int i2c1_init(void) { return (HAL_I2C_Init(&hi2c1) == HAL_OK) ? 0 : -1; } + +int i2c1_write(uint16_t addr, const uint8_t *data, uint16_t len) { + if (HAL_I2C_Master_Transmit(&hi2c1, addr << 1, (uint8_t *)data, len, 100) != HAL_OK) { + return -1; + } + return 0; +} + +int i2c1_read(uint16_t addr, uint8_t *data, uint16_t len) { + if (HAL_I2C_Master_Receive(&hi2c1, (addr << 1) | 1, data, len, 100) != HAL_OK) { + return -1; + } + return 0; +} diff --git a/src/main.c b/src/main.c index 8aca44e..567d6b9 100644 --- a/src/main.c +++ b/src/main.c @@ -26,6 +26,7 @@ #include "ultrasonic.h" #include "power_mgmt.h" #include "battery.h" +#include "bno055.h" #include #include #include @@ -38,6 +39,11 @@ extern volatile uint32_t cdc_rx_count; /* total CDC packets received */ extern volatile uint8_t cdc_estop_request; extern volatile uint8_t cdc_estop_clear_request; +/* Wrapper for IMU calibration status (uses BNO055) */ +static inline bool imu_calibrated(void) { + return bno055_calibrated(); +} + /* * Apply a PID tuning command string from the USB terminal. * Format: P I D T M ? @@ -157,7 +163,7 @@ int main(void) { /* Init piezo buzzer driver (TIM4_CH3 PWM on PB2, Issue #189) */ buzzer_init(); - buzzer_play(BUZZER_PATTERN_ARM_CHIME); + buzzer_play_melody(MELODY_STARTUP); /* Init WS2812B NeoPixel LED ring (TIM3_CH1 PWM on PB4, Issue #193) */ led_init(); @@ -400,7 +406,7 @@ int main(void) { * jlink takes priority for Jetson speed offset; falls back to CDC. */ if (imu_ret == 0 && now - balance_tick >= 1) { - if (bno055_active) bno055_read(&imu); /* I2C read inside gate */ + if (bno055_is_ready()) bno055_read(&imu); /* I2C read inside gate */ balance_tick = now; float base_sp = bal.setpoint; if (jlink_is_active(now)) @@ -561,7 +567,7 @@ int main(void) { else if (bal.state == BALANCE_TILT_FAULT) es = ESTOP_TILT; else es = ESTOP_CLEAR; /* BNO055 calibration status and temperature when active */ - if (bno055_active) { + if (bno055_is_ready()) { uint8_t cs = bno055_calib_status(); n = snprintf(p, rem, ",\"bno_cs\":%d,\"bno_t\":%d", (int)cs, (int)bno055_temperature()); diff --git a/src/servo.c b/src/servo.c index 59e42df..6eca806 100644 --- a/src/servo.c +++ b/src/servo.c @@ -24,19 +24,6 @@ #define SERVO_PRESCALER 53u /* APB1 54 MHz / 54 = 1 MHz */ #define SERVO_ARR 19999u /* 1 MHz / 20000 = 50 Hz */ -typedef struct { - uint16_t current_angle_deg[SERVO_COUNT]; - uint16_t target_angle_deg[SERVO_COUNT]; - uint16_t pulse_us[SERVO_COUNT]; - - /* Sweep state */ - uint32_t sweep_start_ms[SERVO_COUNT]; - uint32_t sweep_duration_ms[SERVO_COUNT]; - uint16_t sweep_start_deg[SERVO_COUNT]; - uint16_t sweep_end_deg[SERVO_COUNT]; - bool is_sweeping[SERVO_COUNT]; -} ServoState; - static ServoState s_servo = {0}; static TIM_HandleTypeDef s_tim_handle = {0}; diff --git a/src/ultrasonic.c b/src/ultrasonic.c index 7201687..9dbfcda 100644 --- a/src/ultrasonic.c +++ b/src/ultrasonic.c @@ -48,6 +48,9 @@ static UltrasonicState_t s_ultrasonic = { .callback = NULL }; +/* Forward declaration of compatibility helper */ +static void HAL_TIM_IC_Init_Compat(TIM_HandleTypeDef *htim, uint32_t Channel, TIM_IC_InitTypeDef *sConfig); + /* ================================================================ * Hardware Initialization * ================================================================ */ diff --git a/src/watchdog.c b/src/watchdog.c index 38ca63e..95f8a86 100644 --- a/src/watchdog.c +++ b/src/watchdog.c @@ -42,6 +42,8 @@ static WatchdogState s_watchdog = { .reload_value = 0 }; +static IWDG_HandleTypeDef s_hiwdg = {0}; + /* ================================================================ * Helper Functions * ================================================================ */ @@ -108,13 +110,12 @@ bool watchdog_init(uint32_t timeout_ms) s_watchdog.timeout_ms = timeout_ms; /* Configure and start IWDG */ - IWDG_HandleTypeDef hiwdg = {0}; - hiwdg.Instance = IWDG; - hiwdg.Init.Prescaler = prescaler; - hiwdg.Init.Reload = reload; - hiwdg.Init.Window = reload; /* Window == Reload means full timeout */ + s_hiwdg.Instance = IWDG; + s_hiwdg.Init.Prescaler = prescaler; + s_hiwdg.Init.Reload = reload; + s_hiwdg.Init.Window = reload; /* Window == Reload means full timeout */ - HAL_IWDG_Init(&hiwdg); + HAL_IWDG_Init(&s_hiwdg); s_watchdog.is_initialized = true; s_watchdog.is_running = true; @@ -125,7 +126,7 @@ bool watchdog_init(uint32_t timeout_ms) void watchdog_kick(void) { if (s_watchdog.is_running) { - HAL_IWDG_Refresh(&IWDG); /* Reset IWDG counter */ + HAL_IWDG_Refresh(&s_hiwdg); /* Reset IWDG counter */ } }