From 8c6defc093d6e9bbed648deda92b2589bad3309f Mon Sep 17 00:00:00 2001 From: Andrew Straw Date: Mon, 22 Jan 2024 09:07:02 +0100 Subject: [PATCH] networking overhaul - convert to axum - rework strand/braid connections - remove bui-backend --- Cargo.toml | 6 +- ads-webasm/Cargo.toml | 3 +- ads-webasm/src/components/video_field.rs | 4 +- braid-http-session/Cargo.toml | 12 +- braid-http-session/src/lib.rs | 50 +- braid-offline/src/lib.rs | 39 +- braid-process-video/Cargo.toml | 1 - braid-process-video/src/lib.rs | 38 +- braid-process-video/src/output_braidz.rs | 30 +- braid/Cargo.toml | 2 + braid/braid-run/Cargo.toml | 42 +- braid/braid-run/braid_frontend/src/lib.rs | 34 +- braid/braid-run/build.rs | 4 +- braid/braid-run/src/callback_handling.rs | 165 +++ braid/braid-run/src/main.rs | 78 +- braid/braid-run/src/mainbrain.rs | 1036 +++++-------- .../src/multicam_http_session_handler.rs | 105 +- braid/src/lib.rs | 7 +- bui-backend-session/Cargo.toml | 6 +- bui-backend-session/demo/Cargo.toml | 8 +- bui-backend-session/demo/src/main.rs | 2 +- bui-backend-session/src/lib.rs | 25 +- bui-backend-session/types/Cargo.toml | 13 + bui-backend-session/types/src/lib.rs | 32 + build-util/Cargo.toml | 7 - build-util/src/lib.rs | 25 +- event-stream-types/Cargo.toml | 19 + event-stream-types/src/lib.rs | 241 +++ flydra-types/Cargo.toml | 12 +- flydra-types/src/lib.rs | 246 +++- flydra2/Cargo.toml | 19 +- flydra2/build.rs | 83 +- flydra2/src/bin/send_pose.rs | 29 +- flydra2/src/bundled_data.rs | 8 +- flydra2/src/connected_camera_manager.rs | 151 +- flydra2/src/error.rs | 14 +- flydra2/src/flydra2.rs | 15 +- flydra2/src/frame_bundler.rs | 4 +- flydra2/src/model_server.rs | 451 ++---- flydra2/src/new_object_test_2d.rs | 6 +- flydra2/src/new_object_test_3d.rs | 12 +- flydra2/src/tracking_core.rs | 16 +- flydra2/src/write_data.rs | 2 +- flytrax-csv-to-braidz/Cargo.toml | 2 - http-video-streaming/Cargo.toml | 7 +- .../http-video-streaming-types/Cargo.toml | 3 +- .../http-video-streaming-types/src/lib.rs | 5 +- http-video-streaming/src/lib.rs | 12 +- rust-cam-bui-types/Cargo.toml | 7 +- rust-cam-bui-types/src/lib.rs | 5 +- strand-cam/Cargo.toml | 29 +- strand-cam/build.rs | 26 +- strand-cam/src/cli_app.rs | 199 +-- strand-cam/src/flydratrax_handle_msg.rs | 1 + strand-cam/src/strand-cam.rs | 1302 ++++++++++------- strand-cam/yew_frontend/src/lib.rs | 41 +- 56 files changed, 2385 insertions(+), 2356 deletions(-) create mode 100644 braid/braid-run/src/callback_handling.rs create mode 100644 bui-backend-session/types/Cargo.toml create mode 100644 bui-backend-session/types/src/lib.rs create mode 100644 event-stream-types/Cargo.toml create mode 100644 event-stream-types/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 4097a3412..cb1b80778 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ members = [ "braidz-viewer", "bui-backend-session", "bui-backend-session/demo", + "bui-backend-session/types", "build-util", "camcal", "led-box", @@ -49,6 +50,7 @@ members = [ "enum-iter", "env-tracing-logger", "env-tracing-logger/env-tracing-logger-sample", + "event-stream-types", "fastimage", "fastfreeimage", "fly-eye", @@ -120,9 +122,7 @@ members = [ "zip-or-dir/dir2zip", ] -exclude = [ - "led-box-firmware", -] +exclude = ["led-box-firmware"] [profile.release] debug = true diff --git a/ads-webasm/Cargo.toml b/ads-webasm/Cargo.toml index 551791f06..c918486e9 100644 --- a/ads-webasm/Cargo.toml +++ b/ads-webasm/Cargo.toml @@ -12,7 +12,6 @@ js-sys = "0.3" gloo = "0.8.0" gloo-file = "0.2" wasm-bindgen = { version = "0.2.58" } -http = "0.2" serde = "1.0" serde_yaml = "0.9" serde_derive = "1.0" @@ -27,7 +26,6 @@ uuid = { version = "1.2.2", default-features = false, features = [ ] } # add feature flag required for uuid crate csv = { version = "1.1", optional = true } -bui-backend-types = "0.8" yew-tincture = "0.1" simple-obj-parse = { path = "../simple-obj-parse", optional = true } @@ -35,6 +33,7 @@ textured-tri-mesh = { path = "../textured-tri-mesh", optional = true } http-video-streaming-types = { path = "../http-video-streaming/http-video-streaming-types" } enum-iter = { path = "../enum-iter" } rust-cam-bui-types = { path = "../rust-cam-bui-types" } +bui-backend-session-types = { path = "../bui-backend-session/types" } [dependencies.web-sys] version = "0.3" diff --git a/ads-webasm/src/components/video_field.rs b/ads-webasm/src/components/video_field.rs index cc93bfc37..dba88be94 100644 --- a/ads-webasm/src/components/video_field.rs +++ b/ads-webasm/src/components/video_field.rs @@ -1,7 +1,7 @@ use std::{cell::RefCell, rc::Rc}; use crate::video_data::VideoData; -use bui_backend_types; +use bui_backend_session_types; use gloo::timers::callback::Timeout; use serde::{Deserialize, Serialize}; use wasm_bindgen::prelude::*; @@ -29,7 +29,7 @@ pub struct ImData2 { pub draw_shapes: Vec, pub fno: u64, pub ts_rfc3339: String, // timestamp in RFC3339 format - pub ck: bui_backend_types::ConnectionKey, + pub ck: bui_backend_session_types::ConnectionKey, pub name: Option, } diff --git a/braid-http-session/Cargo.toml b/braid-http-session/Cargo.toml index 7d6f4973d..c22f5521e 100644 --- a/braid-http-session/Cargo.toml +++ b/braid-http-session/Cargo.toml @@ -2,19 +2,17 @@ name = "braid-http-session" version = "0.1.0" edition = "2021" -rust-version="1.60" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +rust-version = "1.60" [dependencies] -log = "0.4" +tracing = "0.1" thiserror = "1" hyper = "1.1" -serde = {version="1.0",features=["derive"]} +serde = { version = "1.0", features = ["derive"] } serde_json = "1" futures = "0.3" -bui-backend-session = {path="../bui-backend-session"} -flydra-types = { path = "../flydra-types", features=["with-dns"] } +bui-backend-session = { path = "../bui-backend-session" } +flydra-types = { path = "../flydra-types" } http-body-util = "0.1.0" bytes = "1.5.0" diff --git a/braid-http-session/src/lib.rs b/braid-http-session/src/lib.rs index 25dc2ea57..eeecbb186 100644 --- a/braid-http-session/src/lib.rs +++ b/braid-http-session/src/lib.rs @@ -1,5 +1,5 @@ use ::bui_backend_session::{future_session, InsecureSession}; -use log::{debug, error}; +use tracing::{debug, error}; #[derive(thiserror::Error, Debug)] pub enum Error { @@ -47,33 +47,24 @@ impl MainbrainSession { async fn do_post(&mut self, bytes: Vec) -> Result<(), Error> { let body = body_from_buf(&bytes); - let resp = self.inner.post("callback", body).await?; - - debug!("called do_post and got response: {:?}", resp); - if !resp.status().is_success() { - error!( - "error: POST response was not a success {}:{}", - file!(), - line!() - ); - // TODO: return Err(_)? - }; + debug!("calling mainbrain callback handler"); + let _resp = self.inner.post("callback", body).await?; Ok(()) } pub async fn get_remote_info( &mut self, - orig_cam_name: &flydra_types::RawCamName, + raw_cam_name: &flydra_types::RawCamName, ) -> Result { let path = format!( - "{}?camera={}", - flydra_types::REMOTE_CAMERA_INFO_PATH, - orig_cam_name.as_str() + "{}/{}", + flydra_types::braid_http::REMOTE_CAMERA_INFO_PATH, + flydra_types::braid_http::encode_cam_name(&raw_cam_name) ); debug!( "Getting remote camera info for camera \"{}\".", - orig_cam_name.as_str() + raw_cam_name.as_str() ); let resp = self.inner.get(&path).await?; @@ -97,31 +88,10 @@ impl MainbrainSession { >(&data)?) } - pub async fn register_flydra_camnode( + pub async fn post_callback_message( &mut self, - msg: &flydra_types::RegisterNewCamera, + msg: flydra_types::BraidHttpApiCallback, ) -> Result<(), Error> { - debug!("register_flydra_camnode with message {:?}", msg); - let msg = flydra_types::HttpApiCallback::NewCamera(msg.clone()); - Ok(self.send_message(msg).await?) - } - - pub async fn update_image( - &mut self, - ros_cam_name: flydra_types::RosCamName, - current_image_png: flydra_types::PngImageData, - ) -> Result<(), Error> { - let msg = flydra_types::PerCam { - ros_cam_name, - inner: flydra_types::UpdateImage { current_image_png }, - }; - - debug!("update_image with message {:?}", msg); - let msg = flydra_types::HttpApiCallback::UpdateCurrentImage(msg); - Ok(self.send_message(msg).await?) - } - - pub async fn send_message(&mut self, msg: flydra_types::HttpApiCallback) -> Result<(), Error> { let bytes = serde_json::to_vec(&msg).unwrap(); Ok(self.do_post(bytes).await?) } diff --git a/braid-offline/src/lib.rs b/braid-offline/src/lib.rs index 7cdb22bfc..0299dec8d 100644 --- a/braid-offline/src/lib.rs +++ b/braid-offline/src/lib.rs @@ -19,7 +19,7 @@ use flydra2::{ NumberedRawUdpPoint, StreamItem, }; use flydra_types::{ - CamInfoRow, PerCamSaveData, RawCamName, RosCamName, SyncFno, TrackingParams, + CamInfoRow, PerCamSaveData, RawCamName, SyncFno, TrackingParams, FEATURE_DETECT_SETTINGS_DIRNAME, IMAGES_DIRNAME, }; use groupby::{AscendingGroupIter, BufferedSortIter}; @@ -334,7 +334,7 @@ where let all_expected_cameras = recon .cam_names() - .map(|x| RosCamName::new(x.to_string())) + .map(|x| RawCamName::new(x.to_string())) .collect(); let signal_all_cams_present = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); @@ -347,12 +347,6 @@ where signal_all_cams_synced, ); - // Create `stream_cancel::Valve` for shutting everything down. Note this is - // `Clone`, so we can (and should) shut down everything with it. Here we let - // _quit_trigger drop when it goes out of scope. This is due to use in this - // offline context. - let (_quit_trigger, valve) = stream_cancel::Valve::new(); - let (frame_data_tx, frame_data_rx) = tokio::sync::mpsc::channel(10); let frame_data_rx = tokio_stream::wrappers::ReceiverStream::new(frame_data_rx); let save_empty_data2d = true; @@ -368,7 +362,6 @@ where cam_manager.clone(), Some(recon.clone()), metadata_builder.clone(), - valve, )?; let images_dirname = data_src.path_starter().join(IMAGES_DIRNAME); @@ -420,13 +413,13 @@ where let images_dirname = data_src.path_starter().join(IMAGES_DIRNAME); - let per_cam_data: BTreeMap = match images_dirname.list_paths() { + let per_cam_data: BTreeMap = match images_dirname.list_paths() { Ok(relnames) => relnames .iter() .map(|relname| { assert_eq!(relname.extension(), Some(std::ffi::OsStr::new("png"))); - let ros_cam_name = - RosCamName::new(relname.file_stem().unwrap().to_str().unwrap().to_string()); + let raw_cam_name = + RawCamName::new(relname.file_stem().unwrap().to_str().unwrap().to_string()); let png_fname = data_src.path_starter().join(IMAGES_DIRNAME).join(relname); let current_image_png = { @@ -439,7 +432,7 @@ where let mut current_feature_detect_settings_fname = data_src .path_starter() .join(FEATURE_DETECT_SETTINGS_DIRNAME) - .join(format!("{}.toml", ros_cam_name.as_str())); + .join(format!("{}.toml", raw_cam_name.as_str())); let current_feature_detect_settings = if current_feature_detect_settings_fname.exists() { @@ -452,7 +445,7 @@ where }; ( - ros_cam_name, + raw_cam_name, PerCamSaveData { current_image_png: current_image_png.into(), cam_settings_data: None, @@ -471,18 +464,17 @@ where let mut cam_info_fname = data_src.path_starter(); cam_info_fname.push(flydra_types::CAM_INFO_CSV_FNAME); let cam_info_file = open_maybe_gzipped(cam_info_fname)?; - let mut orig_camn_to_cam_name: BTreeMap = BTreeMap::new(); + let mut orig_camn_to_cam_name: BTreeMap = BTreeMap::new(); let rdr = csv::Reader::from_reader(cam_info_file); for row in rdr.into_deserialize::() { let row = row?; let orig_cam_name = RawCamName::new(row.cam_id.to_string()); - let ros_cam_name = RosCamName::new(row.cam_id.to_string()); - let no_server = flydra_types::StrandCamHttpServerInfo::NoServer; + let no_server = flydra_types::BuiServerInfo::NoServer; - orig_camn_to_cam_name.insert(row.camn, ros_cam_name.clone()); + orig_camn_to_cam_name.insert(row.camn, orig_cam_name.clone()); - cam_manager.register_new_camera(&orig_cam_name, &no_server, &ros_cam_name); + cam_manager.register_new_camera(&orig_cam_name, &no_server); } { @@ -672,21 +664,14 @@ where let expected_framerate = Some(fps as f32); - // let model_server_addr = opt.model_server_addr.clone(); - - let (_quit_trigger, valve) = stream_cancel::Valve::new(); let (data_tx, data_rx) = tokio::sync::mpsc::channel(50); let _model_server = match &opt2.model_server_addr { Some(ref addr) => { let addr = addr.parse().unwrap(); info!("send_pose server at {}", addr); - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; coord_processor.add_listener(data_tx); - Some(flydra2::new_model_server(data_rx, valve, &addr, info, rt_handle).await?) + Some(flydra2::new_model_server(data_rx, addr).await?) } None => None, }; diff --git a/braid-process-video/Cargo.toml b/braid-process-video/Cargo.toml index b0c3a1077..40ed0bb68 100644 --- a/braid-process-video/Cargo.toml +++ b/braid-process-video/Cargo.toml @@ -11,7 +11,6 @@ log = "0.4" env_logger = "0.10" tokio = {version="1.17", features=["macros","rt","tracing"]} tokio-stream = "0.1.8" -stream-cancel = "0.8" anyhow = "1" chrono = {version="0.4.23", default-features=false, features=["serde", "clock", "std", "wasmbind"]} serde = {version="1", features=["derive"]} diff --git a/braid-process-video/src/lib.rs b/braid-process-video/src/lib.rs index ba75ccff5..cdac81b14 100644 --- a/braid-process-video/src/lib.rs +++ b/braid-process-video/src/lib.rs @@ -9,7 +9,7 @@ use ordered_float::NotNan; use machine_vision_formats::ImageData; use timestamped_frame::ExtraTimeData; -use flydra_types::{Data2dDistortedRow, RawCamName, RosCamName}; +use flydra_types::{Data2dDistortedRow, RawCamName}; mod peek2; use peek2::Peek2; @@ -170,7 +170,7 @@ fn synchronize_readers_from( struct PerCamRender { best_name: String, - ros_name: Option, + raw_name: RawCamName, frame0_png_buf: flydra_types::PngImageData, width: usize, height: usize, @@ -179,7 +179,7 @@ struct PerCamRender { impl PerCamRender { fn from_reader(cam_id: &CameraIdentifier) -> Self { let best_name = cam_id.best_name(); - let ros_name = cam_id.ros_name().map(RosCamName::new); + let raw_name = RawCamName::new(best_name.clone()); let rdr = match &cam_id { CameraIdentifier::MovieOnly(m) | CameraIdentifier::Both((m, _)) => { @@ -221,7 +221,7 @@ impl PerCamRender { Self { best_name, - ros_name, + raw_name, frame0_png_buf, width, height, @@ -233,9 +233,9 @@ impl PerCamRender { braidz_cam: &BraidzCamId, ) -> Self { let image_sizes = braid_archive.image_sizes.as_ref().unwrap(); - let (width, height) = image_sizes.get(&braidz_cam.ros_cam_name).unwrap(); - let best_name = braidz_cam.ros_cam_name.clone(); // this is the best we can do - let ros_name = Some(RosCamName::new(braidz_cam.ros_cam_name.clone())); + let (width, height) = image_sizes.get(&braidz_cam.cam_id_str).unwrap(); + let best_name = braidz_cam.cam_id_str.clone(); // this is the best we can do + let raw_name = RawCamName::new(best_name.clone()); // generate blank first image of the correct size. let image_data: Vec = vec![0; *width * *height]; @@ -253,7 +253,7 @@ impl PerCamRender { Self { best_name, - ros_name, + raw_name, frame0_png_buf, width: *width, height: *height, @@ -337,15 +337,7 @@ impl CameraIdentifier { .unwrap_or_else(|| m.filename.clone()) }) } - CameraIdentifier::BraidzOnly(b) => b.ros_cam_name.clone(), - } - } - fn ros_name(&self) -> Option { - match self { - CameraIdentifier::MovieOnly(m) => m.ros_name(), - CameraIdentifier::BraidzOnly(b) | CameraIdentifier::Both((_, b)) => { - Some(b.ros_cam_name.clone()) - } + CameraIdentifier::BraidzOnly(b) => b.cam_id_str.clone(), } } fn frame0_time(&self) -> chrono::DateTime { @@ -390,7 +382,7 @@ impl MovieCamId { #[derive(Clone, Debug, PartialEq)] struct BraidzCamId { - ros_cam_name: String, + cam_id_str: String, camn: flydra_types::CamNum, } @@ -513,7 +505,7 @@ pub async fn run_config(cfg: &Valid) -> Result) -> Result { - if Some(braidz_cam_id.ros_cam_name.clone()) == m.ros_name() { + if Some(braidz_cam_id.cam_id_str.clone()) == m.ros_name() { CameraIdentifier::Both((m, braidz_cam_id.clone())) } else { CameraIdentifier::MovieOnly(m) @@ -589,7 +581,7 @@ pub async fn run_config(cfg: &Valid) -> Result { m.ros_name().unwrap() } - CameraIdentifier::BraidzOnly(b) => b.ros_cam_name.clone(), + CameraIdentifier::BraidzOnly(b) => b.cam_id_str.clone(), }) .collect(); @@ -701,8 +693,8 @@ pub async fn run_config(cfg: &Valid) -> Result>(); + .map(|x| RawCamName::new(x.clone())) + .collect::>(); // Initialize outputs let output_storage: Vec> = diff --git a/braid-process-video/src/output_braidz.rs b/braid-process-video/src/output_braidz.rs index 8eeec6f7b..475c8446a 100644 --- a/braid-process-video/src/output_braidz.rs +++ b/braid-process-video/src/output_braidz.rs @@ -1,7 +1,7 @@ use anyhow::Result; use std::collections::{BTreeMap, BTreeSet}; -use flydra_types::{PerCamSaveData, RawCamName, RosCamName}; +use flydra_types::{PerCamSaveData, RawCamName}; use crate::{ config::{BraidRetrackVideoConfig, CameraCalibrationSource, TrackingParametersSource}, @@ -20,7 +20,7 @@ impl BraidStorage { b: &crate::config::BraidzOutputConfig, tracking_parameters: Option, sources: &[crate::CameraSource], - all_expected_cameras: BTreeSet, + all_expected_cameras: BTreeSet, expected_framerate: Option, ) -> Result { let output_braidz_path = std::path::PathBuf::from(&b.filename); @@ -60,14 +60,14 @@ impl BraidStorage { std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); let signal_all_cams_synced = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); - let braidz_per_cam_save_data: BTreeMap = sources + let braidz_per_cam_save_data: BTreeMap = sources .iter() .map(|source| { - let ros_cam_name = source.per_cam_render.ros_name.clone().unwrap(); + let raw_cam_name = source.per_cam_render.raw_name.clone(); let current_image_png = source.per_cam_render.frame0_png_buf.clone(); ( - ros_cam_name, + raw_cam_name, PerCamSaveData { current_image_png, cam_settings_data: None, @@ -84,18 +84,11 @@ impl BraidStorage { signal_all_cams_synced, ); - for ros_cam_name in all_expected_cameras.iter() { - let no_server = flydra_types::StrandCamHttpServerInfo::NoServer; - let orig_cam_name = RawCamName::new(ros_cam_name.to_string()); // this is a lie... - cam_manager.register_new_camera(&orig_cam_name, &no_server, ros_cam_name); + for raw_cam_name in all_expected_cameras.iter() { + let no_server = flydra_types::BuiServerInfo::NoServer; + cam_manager.register_new_camera(&raw_cam_name, &no_server); } - // Create `stream_cancel::Valve` for shutting everything down. Note this is - // `Clone`, so we can (and should) shut down everything with it. Here we let - // _quit_trigger drop when it goes out of scope. This is due to use in this - // offline context. - let (_quit_trigger, valve) = stream_cancel::Valve::new(); - let (frame_data_tx, frame_data_rx) = tokio::sync::mpsc::channel(10); let frame_data_rx = tokio_stream::wrappers::ReceiverStream::new(frame_data_rx); let save_empty_data2d = true; @@ -111,7 +104,6 @@ impl BraidStorage { cam_manager.clone(), recon.clone(), flydra2::BraidMetadataBuilder::saving_program_name("braid-process-video"), - valve, )?; let save_cfg = flydra2::StartSavingCsvConfig { @@ -146,8 +138,8 @@ impl BraidStorage { all_cam_render_data: &[PerCamRenderFrame<'_>], ) -> Result<()> { for cam_render_data in all_cam_render_data.iter() { - let ros_cam_name = cam_render_data.p.ros_name.clone().unwrap(); - let cam_num = self.cam_manager.cam_num(&ros_cam_name).unwrap(); + let raw_cam_name = cam_render_data.p.raw_name.clone(); + let cam_num = self.cam_manager.cam_num(&raw_cam_name).unwrap(); let trigger_timestamp = synced_data .braidz_info @@ -155,7 +147,7 @@ impl BraidStorage { .and_then(|bi| bi.trigger_timestamp.clone()); let frame_data = flydra2::FrameData::new( - ros_cam_name, + raw_cam_name, cam_num, flydra_types::SyncFno(out_fno.try_into().unwrap()), trigger_timestamp, diff --git a/braid/Cargo.toml b/braid/Cargo.toml index 2c90d9a66..d8bdf7d28 100644 --- a/braid/Cargo.toml +++ b/braid/Cargo.toml @@ -20,3 +20,5 @@ flydra-types = {path="../flydra-types"} flydra-feature-detector-types = {path = "../flydra-feature-detector/flydra-feature-detector-types"} flydra-pt-detect-cfg = {path = "../flydra-feature-detector/flydra-pt-detect-cfg"} braid-config-data = {path = "../braid-config-data"} +tracing-subscriber = "0.3.18" +tracing = "0.1.40" diff --git a/braid/braid-run/Cargo.toml b/braid/braid-run/Cargo.toml index 7d9843bda..437606acf 100644 --- a/braid/braid-run/Cargo.toml +++ b/braid/braid-run/Cargo.toml @@ -22,7 +22,6 @@ serde = "1.0" serde_json = "1.0" toml = "0.5" regex = "1.0" -url = "2" braid-triggerbox = "0.4.1" chrono = {version="0.4.23", default-features=false, features=["clock", "std", "wasmbind"]} futures = "0.3" @@ -36,38 +35,43 @@ qrcodegen = "1.4" image = "0.24.2" hyper = "1.1" lazy_static = "1.4" -ctrlc = { version = "3.1.3", features = ["termination"] } csv = "1.1" http-body-util = "0.1.0" http = "1.0.0" - -includedir = { version = "0.6", optional = true } -phf = { version = "0.8", optional = true } -async-change-tracker = "0.3" -bui-backend-types = "0.8" -bui-backend = {version="0.15", default-features = false} +async-change-tracker = "0.3.4" +tracing = "0.1.40" +axum = "0.7.4" +tower = "0.4.13" +cookie = "0.18.0" +tower-http = { version = "0.5.1", features = ["fs", "trace"] } +tower-serve-static = { version = "0.1", optional = true } +include_dir = { version = "0.7.3", optional = true } +axum-token-auth = "0.1.0" braid = {path=".."} braid-config-data = {path="../../braid-config-data"} -flydra-pt-detect-cfg = {path = "../../flydra-feature-detector/flydra-pt-detect-cfg"} -strand-cam-storetype = {path = "../../strand-cam-storetype"} -flydra-types = {path="../../flydra-types", features=["with-dns"]} -flydra2 = {path="../../flydra2", default-features = false, features=["braid"]} -rust-cam-bui-types = {path="../../rust-cam-bui-types"} -mvg = {path="../../mvg"} -flydra-mvg = {path="../../flydra-mvg"} +bui-backend-session-types = { path = "../../bui-backend-session/types" } bui-backend-session = {path="../../bui-backend-session"} -flydra-feature-detector-types = {path = "../../flydra-feature-detector/flydra-feature-detector-types", default-features = false} ci2-remote-control = {path = "../../ci2-remote-control"} datetime-conversion = {path = "../../datetime-conversion"} +event-stream-types = { path = "../../event-stream-types" } +flydra-feature-detector-types = {path = "../../flydra-feature-detector/flydra-feature-detector-types", default-features = false} +flydra-mvg = {path="../../flydra-mvg"} +flydra-pt-detect-cfg = {path = "../../flydra-feature-detector/flydra-pt-detect-cfg"} +flydra-types = { path="../../flydra-types", features = [ "start-listener", "build-urls" ] } +flydra2 = {path="../../flydra2", default-features = false, features=["braid"]} +mvg = {path="../../mvg"} +rust-cam-bui-types = {path="../../rust-cam-bui-types"} +strand-cam-storetype = {path = "../../strand-cam-storetype"} +http-body = "1.0.0" [features] default = ["bundle_files"] deadlock_detection = ["parking_lot/deadlock_detection"] -# BUI frontend -bundle_files = ["flydra2/bundle_files", "bui-backend/bundle_files", "build-util/bundle_files", "includedir", "phf"] -serve_files = ["flydra2/serve_files", "bui-backend/serve_files", "build-util/serve_files"] +# BUI frontend. must pick one of the following two: +bundle_files = ["flydra2/bundle_files", "tower-serve-static", "include_dir"] +serve_files = ["flydra2/serve_files"] backtrace = ["flydra2/backtrace", "mvg/backtrace", "flydra-mvg/backtrace"] diff --git a/braid/braid-run/braid_frontend/src/lib.rs b/braid/braid-run/braid_frontend/src/lib.rs index 026099b83..56b7764af 100644 --- a/braid/braid-run/braid_frontend/src/lib.rs +++ b/braid/braid-run/braid_frontend/src/lib.rs @@ -11,7 +11,7 @@ use wasm_bindgen::{JsCast, JsValue}; use wasm_bindgen_futures::JsFuture; use web_sys::{Event, EventSource, MessageEvent}; -use flydra_types::{CamInfo, HttpApiCallback, HttpApiShared, StrandCamHttpServerInfo}; +use flydra_types::{BraidHttpApiCallback, BraidHttpApiSharedState, BuiServerInfo, CamInfo}; use rust_cam_bui_types::{ClockModel, RecordingPath}; use yew::prelude::*; @@ -41,7 +41,7 @@ impl std::fmt::Display for MyError { // Model struct Model { - shared: Option, + shared: Option, es: EventSource, fail_msg: String, html_page_title: Option, @@ -54,7 +54,7 @@ struct Model { // ----------------------------------------------------------------------------- enum Msg { - NewServerState(HttpApiShared), + NewServerState(BraidHttpApiSharedState), FailedDecode(serde_json::Error), DoRecordCsvTables(bool), DoRecordMp4Files(bool), @@ -165,7 +165,7 @@ impl Component for Model { } Msg::DoRecordCsvTables(val) => { ctx.link().send_future(async move { - match post_callback(&HttpApiCallback::DoRecordCsvTables(val)).await { + match post_callback(&BraidHttpApiCallback::DoRecordCsvTables(val)).await { Ok(()) => Msg::SendMessageFetchState(FetchState::Success), Err(err) => Msg::SendMessageFetchState(FetchState::Failed(err)), } @@ -175,13 +175,14 @@ impl Component for Model { return false; // Don't update DOM, do that when backend notifies us of new state. } Msg::DoRecordMp4Files(val) => { - return self.send_to_all_cams(&ctx, HttpApiCallback::DoRecordMp4Files(val)); + return self.send_to_all_cams(&ctx, BraidHttpApiCallback::DoRecordMp4Files(val)); } Msg::SetPostTriggerBufferSize(val) => { - return self.send_to_all_cams(&ctx, HttpApiCallback::SetPostTriggerBufferSize(val)); + return self + .send_to_all_cams(&ctx, BraidHttpApiCallback::SetPostTriggerBufferSize(val)); } Msg::PostTriggerMp4Recording => { - return self.send_to_all_cams(&ctx, HttpApiCallback::PostTriggerMp4Recording); + return self.send_to_all_cams(&ctx, BraidHttpApiCallback::PostTriggerMp4Recording); } } true @@ -215,7 +216,7 @@ impl Component for Model { // View impl Model { - fn send_to_all_cams(&mut self, ctx: &Context, msg: HttpApiCallback) -> bool { + fn send_to_all_cams(&mut self, ctx: &Context, msg: BraidHttpApiCallback) -> bool { ctx.link().send_future(async move { match post_callback(&msg).await { Ok(()) => Msg::SendMessageFetchState(FetchState::Success), @@ -382,9 +383,15 @@ fn view_cam_list(cams: &Vec) -> Html { let all_rendered: Vec = cams .iter() .map(|cci| { - let cam_url = match cci.http_camserver_info { - StrandCamHttpServerInfo::NoServer => "http://127.0.0.1/notexist".to_string(), - StrandCamHttpServerInfo::Server(ref details) => details.guess_base_url_with_token(), + let cam_url = match cci.strand_cam_http_server_info { + BuiServerInfo::NoServer => "/does-not-exist".to_string(), + BuiServerInfo::Server(_) => { + format!( + "/{}/{}", + flydra_types::braid_http::CAM_PROXY_PATH, + flydra_types::braid_http::encode_cam_name(&cci.name) + ) + } }; let state = format!("{:?}", cci.state); let stats = format!("{:?}", cci.recent_stats); @@ -442,14 +449,11 @@ fn view_model_server_link(opt_addr: &Option) -> Html { // ----------------------------------------------------------------------------- -async fn post_callback(msg: &HttpApiCallback) -> Result<(), FetchError> { +async fn post_callback(msg: &BraidHttpApiCallback) -> Result<(), FetchError> { use web_sys::{Request, RequestInit, Response}; let mut opts = RequestInit::new(); opts.method("POST"); opts.cache(web_sys::RequestCache::NoStore); - // opts.mode(web_sys::RequestMode::Cors); - // opts.headers("Content-Type", "application/json;charset=UTF-8") - // set SameOrigin let buf = serde_json::to_string(&msg).unwrap_throw(); opts.body(Some(&JsValue::from_str(&buf))); diff --git a/braid/braid-run/build.rs b/braid/braid-run/build.rs index 606ac29b7..12409547d 100644 --- a/braid/braid-run/build.rs +++ b/braid/braid-run/build.rs @@ -1,7 +1,9 @@ fn main() -> Result<(), Box> { build_util::git_hash(env!("CARGO_PKG_VERSION"))?; + #[cfg(feature = "bundle_files")] let frontend_dir = std::path::PathBuf::from("braid_frontend"); + #[cfg(feature = "bundle_files")] let frontend_pkg_dir = frontend_dir.join("pkg"); #[cfg(feature = "bundle_files")] @@ -14,7 +16,5 @@ fn main() -> Result<(), Box> { .into()); } - build_util::bui_backend_generate_code(&frontend_pkg_dir, "mainbrain_frontend.rs")?; - Ok(()) } diff --git a/braid/braid-run/src/callback_handling.rs b/braid/braid-run/src/callback_handling.rs new file mode 100644 index 000000000..6cd92065a --- /dev/null +++ b/braid/braid-run/src/callback_handling.rs @@ -0,0 +1,165 @@ +use axum::response::IntoResponse; + +use event_stream_types::TolerantJson; +use flydra_types::{BraidHttpApiCallback, PerCamSaveData}; +use http::StatusCode; +use rust_cam_bui_types::RecordingPath; + +use crate::mainbrain::*; + +fn start_saving_mp4s_all_cams(app_state: &BraidAppState, start_saving: bool) { + let mut tracker = app_state.shared_store.write(); + tracker.modify(|store| { + if start_saving { + store.fake_mp4_recording_path = Some(RecordingPath::new("".to_string())); + } else { + store.fake_mp4_recording_path = None; + } + }); +} + +pub(crate) async fn callback_handler( + axum::extract::State(app_state): axum::extract::State, + _session_key: axum_token_auth::SessionKey, + TolerantJson(payload): TolerantJson, +) -> impl IntoResponse { + let fut = async { + use BraidHttpApiCallback::*; + match payload { + NewCamera(cam_info) => { + debug!("got NewCamera {:?}", cam_info); + let http_camserver_info = cam_info.http_camserver_info.unwrap(); + let cam_settings_data = cam_info.cam_settings_data.unwrap(); + let mut cam_manager3 = app_state.cam_manager.clone(); + cam_manager3.register_new_camera(&cam_info.raw_cam_name, &http_camserver_info); + + let mut current_cam_data = app_state.per_cam_data_arc.write(); + if current_cam_data + .insert( + cam_info.raw_cam_name.clone(), + PerCamSaveData { + cam_settings_data: Some(cam_settings_data), + feature_detect_settings: None, + current_image_png: cam_info.current_image_png, + }, + ) + .is_some() + { + panic!("camera {} already known", cam_info.ros_cam_name.as_str()); + } + } + UpdateCurrentImage(image_info) => { + // new image from camera + debug!( + "got new image for camera \"{}\"", + image_info.raw_cam_name.as_str() + ); + let mut current_cam_data = app_state.per_cam_data_arc.write(); + current_cam_data + .get_mut(&image_info.raw_cam_name) + .unwrap() + .current_image_png = image_info.inner.current_image_png; + } + UpdateCamSettings(cam_settings) => { + let mut current_cam_data = app_state.per_cam_data_arc.write(); + current_cam_data + .get_mut(&cam_settings.raw_cam_name) + .unwrap() + .cam_settings_data = Some(cam_settings.inner); + } + UpdateFeatureDetectSettings(feature_detect_settings) => { + let mut current_cam_data = app_state.per_cam_data_arc.write(); + current_cam_data + .get_mut(&feature_detect_settings.raw_cam_name) + .unwrap() + .feature_detect_settings = Some(feature_detect_settings.inner); + } + DoRecordCsvTables(value) => { + debug!("got DoRecordCsvTables({})", value); + toggle_saving_csv_tables( + value, + app_state.expected_framerate_arc.clone(), + app_state.output_base_dirname.clone(), + app_state.braidz_write_tx_weak.clone(), + app_state.per_cam_data_arc.clone(), + app_state.shared_store.clone(), + ) + .await; + } + DoRecordMp4Files(start_saving) => { + debug!("got DoRecordMp4Files({start_saving})"); + + app_state + .strand_cam_http_session_handler + .toggle_saving_mp4_files_all(start_saving) + .await + .map_err(|_e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "toggle_saving_mp4_files_all failed", + ) + })?; + + start_saving_mp4s_all_cams(&app_state, start_saving); + } + SetExperimentUuid(value) => { + debug!("got SetExperimentUuid({})", value); + if let Some(braidz_write_tx) = app_state.braidz_write_tx_weak.upgrade() { + // `braidz_write_tx` will be dropped after this scope. + braidz_write_tx + .send(flydra2::SaveToDiskMsg::SetExperimentUuid(value)) + .await + .unwrap(); + } + } + SetPostTriggerBufferSize(val) => { + debug!("got SetPostTriggerBufferSize({val})"); + + app_state + .strand_cam_http_session_handler + .set_post_trigger_buffer_all(val) + .await + .map_err(|_e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "set_post_trigger_buffer_all failed", + ) + })?; + + { + let mut tracker = app_state.shared_store.write(); + tracker.modify(|store| { + store.post_trigger_buffer_size = val; + }); + } + } + PostTriggerMp4Recording => { + debug!("got PostTriggerMp4Recording"); + + let is_saving = { + let tracker = app_state.shared_store.read(); + (*tracker).as_ref().fake_mp4_recording_path.is_some() + }; + + if !is_saving { + app_state + .strand_cam_http_session_handler + .initiate_post_trigger_mp4_all() + .await + .map_err(|_e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "initiate_post_trigger_mp4_all failed", + ) + })?; + + start_saving_mp4s_all_cams(&app_state, true); + } else { + debug!("Already saving, not initiating again."); + } + } + } + Ok::<_, (StatusCode, &'static str)>(()) + }; + fut.await +} diff --git a/braid/braid-run/src/main.rs b/braid/braid-run/src/main.rs index f462d3db9..c7a3a88a4 100644 --- a/braid/braid-run/src/main.rs +++ b/braid/braid-run/src/main.rs @@ -13,6 +13,7 @@ use braid::braid_start; use braid_config_data::parse_config_file; use flydra_types::BraidCameraConfig; +mod callback_handling; mod mainbrain; mod multicam_http_session_handler; @@ -67,7 +68,8 @@ fn launch_strand_cam( Ok(()) } -fn main() -> Result<()> { +#[tokio::main] +async fn main() -> Result<()> { braid_start("run")?; let args = BraidRunCliArgs::parse(); @@ -76,23 +78,16 @@ fn main() -> Result<()> { let cfg = parse_config_file(&args.config_file)?; debug!("{:?}", cfg); - let n_local_cameras = cfg - .cameras - .iter() - .filter(|c| c.start_backend != StartCameraBackend::Remote) - .count(); - - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .worker_threads(4 + 4 * n_local_cameras) - .thread_name("braid-runtime") - .thread_stack_size(3 * 1024 * 1024) - .build()?; - let camera_configs = cfg .cameras .iter() - .map(|cfg| (cfg.name.clone(), cfg.clone())) + .map(|cfg| { + let raw_cam_name = RawCamName::new(cfg.name.to_string()); + ( + flydra_types::braid_http::encode_cam_name(&raw_cam_name), + cfg.clone(), + ) + }) .collect(); let trig_cfg = cfg.trigger; @@ -106,40 +101,23 @@ fn main() -> Result<()> { }; let show_tracking_params = false; - let handle = runtime.handle().clone(); + // let handle = runtime.handle().clone(); let all_expected_cameras = cfg .cameras .iter() - .map(|x| RawCamName::new(x.name.clone()).to_ros()) + .map(|x| RawCamName::new(x.name.clone())) .collect(); - let phase1 = runtime.block_on(mainbrain::pre_run( - &handle, - show_tracking_params, - // Raising the mainbrain thread priority is currently disabled. - // cfg.mainbrain.sched_policy_priority, - camera_configs, - trig_cfg, - &cfg.mainbrain, - cfg.mainbrain - .jwt_secret - .as_ref() - .map(|x| x.as_bytes().to_vec()), - all_expected_cameras, - force_camera_sync_mode, - software_limit_framerate.clone(), - "braid", - ))?; - let mainbrain_server_info = MainbrainBuiLocation(phase1.mainbrain_server_info.clone()); + let address_string: String = cfg.mainbrain.http_api_server_addr.clone(); + let (listener, mainbrain_server_info) = flydra_types::start_listener(&address_string).await?; + let mainbrain_internal_addr = MainbrainBuiLocation(mainbrain_server_info.clone()); let cfg_cameras = cfg.cameras; - - let _enter_guard = runtime.enter(); let _strand_cams = cfg_cameras .into_iter() .filter_map(|camera| { if camera.start_backend != StartCameraBackend::Remote { - Some(launch_strand_cam(camera, mainbrain_server_info.clone())) + Some(launch_strand_cam(camera, mainbrain_internal_addr.clone())) } else { log::info!("Not starting remote camera \"{}\"", camera.name); None @@ -149,10 +127,26 @@ fn main() -> Result<()> { debug!("done launching cameras"); - // This runs the whole thing and blocks. - runtime.block_on(mainbrain::run(phase1))?; - - // Now wait for everything to end.. + // This runs the whole thing and "blocks". Now wait for everything to end. + mainbrain::do_run_forever( + show_tracking_params, + // Raising the mainbrain thread priority is currently disabled. + // cfg.mainbrain.sched_policy_priority, + camera_configs, + trig_cfg, + &cfg.mainbrain, + cfg.mainbrain + .jwt_secret + .as_ref() + .map(|x| x.as_bytes().to_vec()), + all_expected_cameras, + force_camera_sync_mode, + software_limit_framerate.clone(), + "braid", + listener, + mainbrain_server_info, + ) + .await?; debug!("done {}:{}", file!(), line!()); diff --git a/braid/braid-run/src/mainbrain.rs b/braid/braid-run/src/mainbrain.rs index 41c0a649f..7d709c1c1 100644 --- a/braid/braid-run/src/mainbrain.rs +++ b/braid/braid-run/src/mainbrain.rs @@ -1,60 +1,59 @@ -use std::collections::BTreeMap; -use std::net::SocketAddr; -use std::sync::{ - atomic::{AtomicBool, Ordering}, - Arc, +use std::{ + collections::BTreeMap, + net::SocketAddr, + path::PathBuf, + pin::Pin, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, }; -use std::{error::Error as StdError, future::Future, pin::Pin}; - +use async_change_tracker::ChangeTracker; +use axum::{ + extract::{Path, State}, + routing::get, +}; +use futures::StreamExt; +use http::StatusCode; use parking_lot::RwLock; - use tokio::net::UdpSocket; use tokio_util::udp::UdpFramed; +use tower_http::trace::TraceLayer; -use async_change_tracker::ChangeTracker; -use bui_backend_types::CallbackDataAndSession; - -use bui_backend::{ - highlevel::{create_bui_app_inner, BuiAppInner}, - AccessControl, CallbackHandler, -}; - +use bui_backend_session_types::AccessToken; +use event_stream_types::{AcceptsEventStream, EventBroadcaster}; use flydra2::{CoordProcessor, CoordProcessorConfig, FrameDataAndPoints, MyFloat, StreamItem}; use flydra_types::{ - CamInfo, CborPacketCodec, FlydraFloatTimestampLocal, HttpApiCallback, HttpApiShared, - PerCamSaveData, RosCamName, StrandCamBuiServerInfo, SyncFno, TriggerType, Triggerbox, + braid_http::{UrlEncodedCamName, CAM_PROXY_PATH, REMOTE_CAMERA_INFO_PATH}, + BraidHttpApiSharedState, BuiServerAddrInfo, CamInfo, CborPacketCodec, + FlydraFloatTimestampLocal, PerCamSaveData, RawCamName, SyncFno, TriggerType, Triggerbox, + BRAID_EVENTS_URL_PATH, BRAID_EVENT_NAME, }; - -use futures::StreamExt; -use rust_cam_bui_types::ClockModel; -use rust_cam_bui_types::RecordingPath; +use rust_cam_bui_types::{ClockModel, RecordingPath}; pub use crate::multicam_http_session_handler::StrandCamHttpSessionHandler; +// use crate::{callback_handling::BraidCallbackHandler, raw_request_handling::BraidRequestHandler}; -lazy_static::lazy_static! { - static ref EVENTS_PREFIX: String = format!("/{}", flydra_types::BRAID_EVENTS_URL_PATH); -} +pub(crate) type BoxedStdError = Box; -pub(crate) mod from_bui_backend { - // Include the files to be served and define `fn get_default_config()` and `Config`. - include!(concat!(env!("OUT_DIR"), "/mainbrain_frontend.rs")); // Despite slash, this works on Windows. +#[cfg(feature = "bundle_files")] +static ASSETS_DIR: include_dir::Dir<'static> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/braid_frontend/pkg"); - pub(crate) fn get_bui_backend_config() -> Config { - get_default_config() - } +lazy_static::lazy_static! { + static ref EVENTS_PREFIX: String = format!("/{}", BRAID_EVENTS_URL_PATH); } use anyhow::Result; const SYNCHRONIZE_DURATION_SEC: u8 = 3; -const JSON_TYPE: &str = "application/json"; -const EMPTY_JSON_BUF: &[u8] = b"{}"; +type SharedStore = Arc>>; #[derive(thiserror::Error, Debug)] pub(crate) enum MainbrainError { - #[error("The `jwt_secret` configuration variable must be set.")] - JwtError, + // #[error("The `jwt_secret` configuration variable must be set.")] + // JwtError, #[error("{source}")] HyperError { #[from] @@ -62,6 +61,11 @@ pub(crate) enum MainbrainError { #[cfg(feature = "backtrace")] backtrace: std::backtrace::Backtrace, }, + // #[error("session errored")] + // SessionErrored { + // #[cfg(feature = "backtrace")] + // backtrace: std::backtrace::Backtrace, + // }, #[error("{source}")] BuiBackendSessionError { #[from] @@ -73,335 +77,196 @@ pub(crate) enum MainbrainError { pub(crate) type MainbrainResult = std::result::Result; -/// When dropped, send a message. This is used to shutdown the HTTP listener. -struct DropSend(Option>); +/// The structure that holds our app data +#[derive(Clone)] +pub(crate) struct BraidAppState { + pub(crate) shared_store: SharedStore, + camdata_addr: String, + force_camera_sync_mode: bool, + software_limit_framerate: flydra_types::StartSoftwareFrameRateLimit, + event_broadcaster: EventBroadcaster, + pub(crate) per_cam_data_arc: Arc>>, + pub(crate) expected_framerate_arc: Arc>>, + camera_configs: BTreeMap, + next_connection_id: Arc>, + pub(crate) strand_cam_http_session_handler: StrandCamHttpSessionHandler, + pub(crate) cam_manager: flydra2::ConnectedCamerasManager, + pub(crate) output_base_dirname: PathBuf, + pub(crate) braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, +} -impl Drop for DropSend { - fn drop(&mut self) { - if let Some(shutdown_tx) = self.0.take() { - match shutdown_tx.send(()) { - Ok(()) => {} - Err(_) => { - error!("DropSend::drop failed"); - } +async fn events_handler( + State(app_state): State, + _session_key: axum_token_auth::SessionKey, + _: AcceptsEventStream, +) -> impl axum::response::IntoResponse { + let key = { + let mut next_connection_id = app_state.next_connection_id.write(); + let key = *next_connection_id; + *next_connection_id += 1; + key + }; + let (tx, body) = app_state.event_broadcaster.new_connection(key); + + // Send an initial copy of our state. + { + let current_state = app_state.shared_store.read().as_ref().clone(); + let frame_string = to_event_frame(¤t_state); + match tx + .send(Ok(http_body::Frame::data(frame_string.into()))) + .await + { + Ok(()) => {} + Err(_) => { + // The receiver was dropped because the connection closed. Should probably do more here. + tracing::debug!("initial send error"); } } } -} -/// The structure that holds our app data -struct HttpApiApp { - inner: BuiAppInner, - time_model_arc: Arc>>, - triggerbox_cmd: Option>, - sync_pulse_pause_started_arc: Arc>>, - expected_framerate_arc: Arc>>, - /// Sender which fires to shutdown the HTTP server upon drop. - _shutdown_tx: DropSend, + body } -#[derive(Clone)] -struct MyCallbackHandler { - cam_manager: flydra2::ConnectedCamerasManager, - per_cam_data_arc: Arc>>, - expected_framerate_arc: Arc>>, - braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, - output_base_dirname: std::path::PathBuf, - shared_data: Arc>>, - strand_cam_http_session_handler: StrandCamHttpSessionHandler, -} - -impl MyCallbackHandler { - fn start_saving_mp4s_all_cams(&self, start_saving: bool) { - let mut tracker = self.shared_data.write(); - tracker.modify(|store| { - if start_saving { - store.fake_mp4_recording_path = Some(RecordingPath::new("".to_string())); - } else { - store.fake_mp4_recording_path = None; - } - }); +async fn handle_auth_error(err: tower::BoxError) -> (StatusCode, &'static str) { + match err.downcast::() { + Ok(err) => { + tracing::error!( + "Validation error(s): {:?}", + err.errors().collect::>() + ); + (StatusCode::UNAUTHORIZED, "Request is not authorized") + } + Err(orig_err) => { + tracing::error!("Unhandled internal error: {orig_err}"); + (StatusCode::INTERNAL_SERVER_ERROR, "internal server error") + } } } -impl CallbackHandler for MyCallbackHandler { - type Data = HttpApiCallback; - - /// HTTP request to "/callback" has been made with payload which as been - /// deserialized into `Self::Data` and session data stored in - /// [CallbackDataAndSession]. - fn call<'a>( - &'a self, - data_sess: CallbackDataAndSession, - ) -> Pin>> + Send + 'a>> { - let payload = data_sess.payload; - - let fut = async { - use HttpApiCallback::*; - match payload { - NewCamera(cam_info) => { - debug!("got NewCamera {:?}", cam_info); - let http_camserver_info = cam_info.http_camserver_info.unwrap(); - let cam_settings_data = cam_info.cam_settings_data.unwrap(); - let mut cam_manager3 = self.cam_manager.clone(); - cam_manager3.register_new_camera( - &cam_info.orig_cam_name, - &http_camserver_info, - &cam_info.ros_cam_name, - ); - - let mut current_cam_data = self.per_cam_data_arc.write(); - if current_cam_data - .insert( - cam_info.ros_cam_name.clone(), - PerCamSaveData { - cam_settings_data: Some(cam_settings_data), - feature_detect_settings: None, - current_image_png: cam_info.current_image_png, - }, - ) - .is_some() - { - panic!("camera {} already known", cam_info.ros_cam_name.as_str()); - } - } - UpdateCurrentImage(image_info) => { - // new image from camera - debug!( - "got new image for camera {}", - image_info.ros_cam_name.as_str() - ); - let mut current_cam_data = self.per_cam_data_arc.write(); - current_cam_data - .get_mut(&image_info.ros_cam_name) - .unwrap() - .current_image_png = image_info.inner.current_image_png; - } - UpdateCamSettings(cam_settings) => { - let mut current_cam_data = self.per_cam_data_arc.write(); - current_cam_data - .get_mut(&cam_settings.ros_cam_name) - .unwrap() - .cam_settings_data = Some(cam_settings.inner); - } - UpdateFeatureDetectSettings(feature_detect_settings) => { - let mut current_cam_data = self.per_cam_data_arc.write(); - current_cam_data - .get_mut(&feature_detect_settings.ros_cam_name) - .unwrap() - .feature_detect_settings = Some(feature_detect_settings.inner); - } - DoRecordCsvTables(value) => { - debug!("got DoRecordCsvTables({})", value); - toggle_saving_csv_tables( - value, - self.expected_framerate_arc.clone(), - self.output_base_dirname.clone(), - self.braidz_write_tx_weak.clone(), - self.per_cam_data_arc.clone(), - self.shared_data.clone(), - ) - .await; - } - DoRecordMp4Files(start_saving) => { - debug!("got DoRecordMp4Files({start_saving})"); - - self.strand_cam_http_session_handler - .toggle_saving_mp4_files_all(start_saving) - .await?; - - self.start_saving_mp4s_all_cams(start_saving); - } - SetExperimentUuid(value) => { - debug!("got SetExperimentUuid({})", value); - if let Some(braidz_write_tx) = self.braidz_write_tx_weak.upgrade() { - // `braidz_write_tx` will be dropped after this scope. - braidz_write_tx - .send(flydra2::SaveToDiskMsg::SetExperimentUuid(value)) - .await - .unwrap(); - } - } - SetPostTriggerBufferSize(val) => { - debug!("got SetPostTriggerBufferSize({val})"); - - self.strand_cam_http_session_handler - .set_post_trigger_buffer_all(val) - .await?; - - { - let mut tracker = self.shared_data.write(); - tracker.modify(|store| { - store.post_trigger_buffer_size = val; - }); - } - } - PostTriggerMp4Recording => { - debug!("got PostTriggerMp4Recording"); - - let is_saving = { - let tracker = self.shared_data.read(); - (*tracker).as_ref().fake_mp4_recording_path.is_some() - }; - - if !is_saving { - self.strand_cam_http_session_handler - .initiate_post_trigger_mp4_all() - .await?; - - self.start_saving_mp4s_all_cams(true); - } else { - debug!("Already saving, not initiating again."); - } - } - } - Ok::<_, MainbrainError>(()) +async fn cam_info_handler( + State(app_state): State, + _session_key: axum_token_auth::SessionKey, + Path(encoded_cam_name): Path, +) -> impl axum::response::IntoResponse { + let cam_cfg = app_state + .camera_configs + .get(&UrlEncodedCamName(encoded_cam_name.clone())); + + if let Some(config) = cam_cfg { + let camdata_addr = app_state.camdata_addr.clone(); + let software_limit_framerate = app_state.software_limit_framerate.clone(); + + let msg = flydra_types::RemoteCameraInfoResponse { + camdata_addr, + config: config.clone(), + force_camera_sync_mode: app_state.force_camera_sync_mode, + software_limit_framerate, }; - Box::pin(async { - match fut.await { - Ok(()) => Ok(()), - Err(e) => { - let e: Box = Box::new(e); - Err(e) - } - } - }) + Ok(axum::Json(msg)) + } else { + error!("HTTP camera not found: \"{encoded_cam_name:?}\""); + Err(( + StatusCode::NOT_FOUND, + format!("Camera \"{encoded_cam_name}\" not found."), + )) } } -pub(crate) type MyBody = http_body_util::combinators::BoxBody; - -pub(crate) fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - use http_body_util::BodyExt; - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) -} - async fn launch_braid_http_backend( - auth: AccessControl, - cam_manager: flydra2::ConnectedCamerasManager, - shared: HttpApiShared, - bui_backend_config: bui_backend::lowlevel::Config, - camdata_addr: String, - camera_configs: BTreeMap, - time_model_arc: Arc>>, - triggerbox_cmd: Option>, - sync_pulse_pause_started_arc: Arc>>, - expected_framerate_arc: Arc>>, - output_base_dirname: std::path::PathBuf, - braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, - per_cam_data_arc: Arc>>, - force_camera_sync_mode: bool, - software_limit_framerate: flydra_types::StartSoftwareFrameRateLimit, - strand_cam_http_session_handler: StrandCamHttpSessionHandler, -) -> Result { - // Create our shared state. - let shared_store = Arc::new(RwLock::new(ChangeTracker::new(shared))); + jwt_secret: Option>, + listener: tokio::net::TcpListener, + mainbrain_server_info: BuiServerAddrInfo, + app_state: BraidAppState, +) -> Result>> { + let persistent_secret = if let Some(_secret) = jwt_secret { + todo!(); + } else { + tracing::warn!("Using newly generated persistent secret. All previously issued session keys will be invalidated. FIXME todo!"); + cookie::Key::generate() + }; - // Create `inner`, which takes care of the browser communication details for us. - let chan_size = 10; + // Setup our auth layer. + let token_config = match mainbrain_server_info.token() { + AccessToken::PreSharedToken(value) => Some(axum_token_auth::TokenConfig { + name: "token".to_string(), + value: value.clone(), + }), + AccessToken::NoToken => None, + }; - let callback_handler = Box::new(MyCallbackHandler { - shared_data: shared_store.clone(), - cam_manager: cam_manager.clone(), - expected_framerate_arc: expected_framerate_arc.clone(), - output_base_dirname: output_base_dirname.clone(), - per_cam_data_arc: per_cam_data_arc.clone(), - braidz_write_tx_weak, - strand_cam_http_session_handler: strand_cam_http_session_handler.clone(), - }); + let cfg = axum_token_auth::AuthConfig { + token_config, + persistent_secret, + cookie_name: "braid-bui-session", + ..Default::default() + }; - let raw_req_handler: bui_backend::lowlevel::RawReqHandler = Arc::new(Box::new( - move |resp: http::response::Builder, req: http::Request| { - debug!("got HTTP request {}", req.uri()); - let path = req.uri().path(); - let mut resp = resp.header(hyper::header::CONTENT_TYPE, JSON_TYPE); - let resp = if &path[..1] == "/" && &path[1..] == flydra_types::REMOTE_CAMERA_INFO_PATH { - let query = req.uri().query(); - let query_pairs = url::form_urlencoded::parse(query.unwrap_or("").as_bytes()); - let mut orig_camera_name: Option = None; - for (key, value) in query_pairs { - use std::ops::Deref; - if key.deref() == "camera" { - orig_camera_name = Some(value.to_string()); - } - } - if let Some(camera_name) = orig_camera_name { - if camera_configs.contains_key(&camera_name) { - let config = camera_configs.get(&camera_name).unwrap().clone(); - let camdata_addr = camdata_addr.clone(); - let software_limit_framerate = software_limit_framerate.clone(); - - let msg = flydra_types::RemoteCameraInfoResponse { - camdata_addr, - config, - force_camera_sync_mode, - software_limit_framerate, - }; - let body_buf = serde_json::to_vec(&msg).unwrap(); - resp.body(body_from_buf(&body_buf))? - } else { - error!("HTTP camera not found: \"{camera_name}\""); - resp = resp.status(hyper::StatusCode::NOT_FOUND); - resp.body(body_from_buf(EMPTY_JSON_BUF))? - } - } else { - error!("HTTP request for configuration but no camera specified"); - resp = resp.status(hyper::StatusCode::BAD_REQUEST); - resp.body(body_from_buf(EMPTY_JSON_BUF))? - } - } else { - error!("HTTP request unknown"); - resp = resp.status(hyper::StatusCode::BAD_REQUEST); - resp.body(body_from_buf(EMPTY_JSON_BUF))? - }; - let resp: http::Response> = resp; // type annotation - Ok(resp) - }, - )); - - let (rx_conn, bui_server) = bui_backend::lowlevel::launcher( - bui_backend_config.clone(), - &auth, - chan_size, - &EVENTS_PREFIX, - Some(raw_req_handler), - callback_handler, - ); + #[cfg(feature = "bundle_files")] + let serve_dir = tower_serve_static::ServeDir::new(&ASSETS_DIR); - let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); + #[cfg(feature = "serve_files")] + let serve_dir = tower_http::services::fs::ServeDir::new( + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("braid_frontend") + .join("pkg"), + ); - let (_, inner) = create_bui_app_inner( - tokio::runtime::Handle::current(), - Some(shutdown_rx), - &auth, - shared_store, - Some(flydra_types::BRAID_EVENT_NAME.to_string()), - rx_conn, - bui_server, - ) - .await?; - - let mainbrain_server_info = { - let local_addr = *inner.local_addr(); - let token = inner.token(); - StrandCamBuiServerInfo::new(local_addr, token) + let auth_layer = cfg.into_layer(); + + assert_eq!(BRAID_EVENTS_URL_PATH, "braid-events"); + assert_eq!(REMOTE_CAMERA_INFO_PATH, "remote_camera_info"); + assert_eq!(CAM_PROXY_PATH, "cam_proxy"); + + // Create axum router. + let router = axum::Router::new() + .route("/braid-events", get(events_handler)) + .route( + "/remote_camera_info/:encoded_cam_name", + get(cam_info_handler), + ) + // .route( + // "/cam_proxy/:encoded_cam_name", + // get(cam_proxy_handler), + // ) + .route( + "/callback", + axum::routing::post(crate::callback_handling::callback_handler), + ) + .nest_service("/", serve_dir) + .layer( + tower::ServiceBuilder::new() + .layer(TraceLayer::new_for_http()) + // Auth layer will produce an error if the request cannot be + // authorized so we must handle that. + .layer(axum::error_handling::HandleErrorLayer::new( + handle_auth_error, + )) + .layer(auth_layer), + ) + .with_state(app_state); + + // create future for our app + let http_serve_future = { + use futures::TryFutureExt; + use std::future::IntoFuture; + axum::serve(listener, router) + .into_future() + .map_err(|e| anyhow::Error::from(e)) }; - debug!( - "initialized HttpApiApp listening at {}", - mainbrain_server_info.guess_base_url_with_token() - ); + // Display where we are listening. + println!("Braid listening at {}", mainbrain_server_info.addr()); + + println!("Predicted URL(s):"); + for url in mainbrain_server_info.build_urls()?.iter() { + println!(" * {url}"); + if !flydra_types::is_loopback(url) { + println!("This same URL as a QR code:"); + display_qr_url(&format!("{url}")); + } + } - // Return our app. - Ok(HttpApiApp { - inner, - time_model_arc, - triggerbox_cmd, - sync_pulse_pause_started_arc, - expected_framerate_arc, - _shutdown_tx: DropSend(Some(shutdown_tx)), - }) + Ok(http_serve_future) } fn compute_trigger_timestamp( @@ -426,7 +291,7 @@ fn addr_to_buf(local_addr: &std::net::SocketAddr) -> Result { } struct SendConnectedCamToBuiBackend { - shared_store: Arc>>, + shared_store: SharedStore, } impl flydra2::ConnectedCamCallback for SendConnectedCamToBuiBackend { @@ -460,44 +325,26 @@ fn display_qr_url(url: &str) { writeln!(stdout_handle).expect("write failed"); } -pub struct StartupPhase1 { - pub camdata_socket: UdpSocket, - my_app: HttpApiApp, - pub mainbrain_server_info: StrandCamBuiServerInfo, - cam_manager: flydra2::ConnectedCamerasManager, - strand_cam_http_session_handler: StrandCamHttpSessionHandler, - handle: tokio::runtime::Handle, - valve: stream_cancel::Valve, - trigger_cfg: TriggerType, - triggerbox_rx: Option>, - model_pose_server_addr: std::net::SocketAddr, - coord_processor: CoordProcessor, - signal_all_cams_present: Arc, - signal_all_cams_synced: Arc, - raw_packet_logger: RawPacketLogger, -} - -pub async fn pre_run( - handle: &tokio::runtime::Handle, +pub(crate) async fn do_run_forever( show_tracking_params: bool, // sched_policy_priority: Option<(libc::c_int, libc::c_int)>, - camera_configs: BTreeMap, + camera_configs: BTreeMap, trigger_cfg: TriggerType, mainbrain_config: &braid_config_data::MainbrainConfig, jwt_secret: Option>, - all_expected_cameras: std::collections::BTreeSet, + all_expected_cameras: std::collections::BTreeSet, force_camera_sync_mode: bool, software_limit_framerate: flydra_types::StartSoftwareFrameRateLimit, saving_program_name: &str, -) -> Result { + listener: tokio::net::TcpListener, + mainbrain_server_info: BuiServerAddrInfo, +) -> Result<()> { let cal_fname: Option = mainbrain_config.cal_fname.clone(); let output_base_dirname: std::path::PathBuf = mainbrain_config.output_base_dirname.clone(); let tracking_params: flydra_types::TrackingParams = mainbrain_config.tracking_params.clone(); let camdata_addr_unspecified: &str = &mainbrain_config.lowlatency_camdata_udp_addr; - let http_api_server_addr: String = mainbrain_config.http_api_server_addr.clone(); - let http_api_server_token: Option = mainbrain_config.http_api_server_token.clone(); let model_pose_server_addr: std::net::SocketAddr = mainbrain_config.model_server_addr; let save_empty_data2d: bool = mainbrain_config.save_empty_data2d; @@ -506,27 +353,7 @@ pub async fn pre_run( // Create `stream_cancel::Valve` for shutting everything down. Note this is // `Clone`, so we can (and should) shut down everything with it. let (quit_trigger, valve) = stream_cancel::Valve::new(); - let (shtdwn_q_tx, mut shtdwn_q_rx) = tokio::sync::mpsc::channel::<()>(5); - - ctrlc::set_handler(move || { - // This closure can get called multiple times, but quit_trigger - // and shutdown_tx cannot be copied or cloned and thus can only - // but fired once. So in this signal handler we fire a message - // on a queue and then on the receive side only deal with the first - // send. - info!("got Ctrl-C, shutting down"); - - let shtdwn_q_tx2 = shtdwn_q_tx.clone(); - - // Send quit message. - match futures::executor::block_on(shtdwn_q_tx2.send(())) { - Ok(()) => {} - Err(e) => { - error!("failed sending quit command: {}", e); - } - } - }) - .expect("Error setting Ctrl-C handler"); + let (_shtdwn_q_tx, mut shtdwn_q_rx) = tokio::sync::mpsc::channel::<()>(5); let recon = if let Some(ref cal_fname) = cal_fname { info!("using calibration: {}", cal_fname.display()); @@ -555,7 +382,7 @@ pub async fn pre_run( let signal_all_cams_present = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); let signal_all_cams_synced = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); - let cam_manager = flydra2::ConnectedCamerasManager::new( + let mut cam_manager = flydra2::ConnectedCamerasManager::new( &recon, all_expected_cameras, signal_all_cams_present.clone(), @@ -571,7 +398,7 @@ pub async fn pre_run( } let ignore_latency = false; - let coord_processor = CoordProcessor::new( + let mut coord_processor = CoordProcessor::new( CoordProcessorConfig { tracking_params, save_empty_data2d, @@ -582,7 +409,6 @@ pub async fn pre_run( cam_manager.clone(), recon.clone(), flydra2::BraidMetadataBuilder::saving_program_name(saving_program_name), - valve.clone(), )?; // Here is what we do on quit: @@ -592,7 +418,7 @@ pub async fn pre_run( let mut quit_trigger_container = Some(quit_trigger); let mut strand_cam_http_session_handler2 = strand_cam_http_session_handler.clone(); let braidz_write_tx_weak = coord_processor.braidz_write_tx.downgrade(); - handle.spawn(async move { + tokio::spawn(async move { while let Some(()) = shtdwn_q_rx.recv().await { debug!("got shutdown command {}:{}", file!(), line!()); @@ -625,11 +451,6 @@ pub async fn pre_run( debug!("shutdown handler finished {}:{}", file!(), line!()); }); - let mut bui_backend_config = from_bui_backend::get_bui_backend_config(); - bui_backend_config.cookie_name = "braid-bui-token".to_string(); - - let time_model_arc = Arc::new(RwLock::new(None)); - let (triggerbox_cmd, triggerbox_rx, fake_sync) = match &trigger_cfg { TriggerType::TriggerboxV1(_) => { let (tx, rx) = tokio::sync::mpsc::channel(20); @@ -643,7 +464,7 @@ pub async fn pre_run( let flydra_app_name = "Braid".to_string(); - let shared = HttpApiShared { + let shared = BraidHttpApiSharedState { fake_sync, csv_tables_dirname: None, fake_mp4_recording_path: None, @@ -655,30 +476,14 @@ pub async fn pre_run( flydra_app_name, all_expected_cameras_are_synced: false, }; + let shared_store = ChangeTracker::new(shared); + let mut shared_store_changes_rx = shared_store.get_changes(1); + let shared_store = Arc::new(RwLock::new(shared_store)); let expected_framerate_arc = Arc::new(RwLock::new(None)); let per_cam_data_arc = Arc::new(RwLock::new(Default::default())); - use std::net::ToSocketAddrs; - let http_api_server_addr = http_api_server_addr.to_socket_addrs()?.next().unwrap(); - - let auth = if let Some(ref secret) = jwt_secret { - if let Some(token) = http_api_server_token { - bui_backend::highlevel::generate_auth_with_token( - http_api_server_addr, - secret.to_vec(), - token, - )? - } else { - bui_backend::highlevel::generate_random_auth(http_api_server_addr, secret.to_vec())? - } - } else if http_api_server_addr.ip().is_loopback() { - AccessControl::Insecure(http_api_server_addr) - } else { - return Err(MainbrainError::JwtError.into()); - }; - let (camdata_addr, camdata_socket) = { // The port of the low latency UDP incoming data socket may be specified // as 0 in which case the OS will decide which port will actually be @@ -709,214 +514,52 @@ pub async fn pre_run( let braidz_write_tx_weak = coord_processor.braidz_write_tx.downgrade(); - let my_app = launch_braid_http_backend( - auth, - cam_manager.clone(), - shared, - bui_backend_config, - camdata_addr, - camera_configs, - time_model_arc, - triggerbox_cmd, - sync_pulse_pause_started_arc, - expected_framerate_arc, - output_base_dirname.clone(), - braidz_write_tx_weak, - per_cam_data_arc.clone(), - force_camera_sync_mode, - software_limit_framerate, - strand_cam_http_session_handler.clone(), - ) - .await?; - // This creates a debug logger when `packet_capture_dump_fname` is not // `None`. - let raw_packet_logger = + let mut raw_packet_logger = RawPacketLogger::new(mainbrain_config.packet_capture_dump_fname.as_deref())?; - let is_loopback = my_app.inner.local_addr().ip().is_loopback(); - let mainbrain_server_info = - flydra_types::StrandCamBuiServerInfo::new(*my_app.inner.local_addr(), my_app.inner.token()); - let url = mainbrain_server_info.guess_base_url_with_token(); - println!( - "Depending on things, you may be able to login with this url: {}", - url - ); - if !is_loopback { - println!("This same URL as a QR code:"); - display_qr_url(&url); - } - - Ok(StartupPhase1 { - camdata_socket, - my_app, - mainbrain_server_info, - cam_manager, - strand_cam_http_session_handler, - handle: handle.clone(), - trigger_cfg, - triggerbox_rx, - model_pose_server_addr, - coord_processor, - valve, - signal_all_cams_present, - signal_all_cams_synced, - raw_packet_logger, - }) -} - -use flydra_types::HostClock; -use serde::Serialize; - -/// Format for debugging raw packet data direct from Strand Cam. -#[derive(Serialize)] -struct RawPacketLogRow { - cam_name: String, - #[serde(with = "flydra_types::timestamp_opt_f64")] - timestamp: Option>, - #[serde(with = "flydra_types::timestamp_f64")] - cam_received_time: FlydraFloatTimestampLocal, - device_timestamp: Option, - block_id: Option, - framenumber: i32, - n_frames_skipped: u32, - done_camnode_processing: f64, - preprocess_stamp: f64, - cam_num: Option, - synced_frame: Option, -} - -/// Logger for debugging raw packet data direct from Strand Cam. -struct RawPacketLogger { - fd: Option>, -} + let time_model_arc = Arc::new(RwLock::new(None)); -impl RawPacketLogger { - /// Create a new logger for debugging raw packet data. - /// - /// If `fname` argument is None, this does very little. - fn new(fname: Option<&std::path::Path>) -> Result { - let fd = fname - .map(std::fs::File::create) - .transpose()? - .map(csv::Writer::from_writer); - Ok(Self { fd }) - } + // Create our app state. + let app_state = BraidAppState { + shared_store: shared_store.clone(), + camdata_addr, + force_camera_sync_mode, + software_limit_framerate, + event_broadcaster: Default::default(), + per_cam_data_arc: per_cam_data_arc.clone(), + camera_configs, + next_connection_id: Arc::new(RwLock::new(0)), + expected_framerate_arc: expected_framerate_arc.clone(), + braidz_write_tx_weak, + cam_manager: cam_manager.clone(), + output_base_dirname, + strand_cam_http_session_handler: strand_cam_http_session_handler.clone(), + }; - /// Log debug data for raw packets. - /// - /// If no filename was given to `Self::new`, this does very little. - fn log_raw_packets( - &mut self, - packet: &flydra_types::FlydraRawUdpPacket, - cam_num: Option, - synced_frame: Option, - ) -> Result<()> { - if let Some(ref mut fd) = self.fd { - let row = RawPacketLogRow { - cam_name: packet.cam_name.clone(), - timestamp: packet.timestamp.clone(), - cam_received_time: packet.cam_received_time.clone(), - device_timestamp: packet.device_timestamp, - block_id: packet.block_id, - framenumber: packet.framenumber, - n_frames_skipped: packet.n_frames_skipped, - done_camnode_processing: packet.done_camnode_processing, - preprocess_stamp: packet.preprocess_stamp, - cam_num, - synced_frame, - }; - fd.serialize(row)?; + // This future will send state updates to all connected event listeners. + let event_broadcaster = app_state.event_broadcaster.clone(); + let send_updates_future = async move { + while let Some((_prev_state, next_state)) = shared_store_changes_rx.next().await { + let frame_string = to_event_frame(&next_state); + event_broadcaster.broadcast_frame(frame_string).await; } - Ok(()) - } -} - -struct ValvedDebug -where - T: futures::stream::Stream, -{ - inner: T, -} - -impl ValvedDebug -where - T: futures::stream::Stream, -{ - fn new(inner: T) -> Self { - Self { inner } - } -} - -impl std::fmt::Debug for ValvedDebug -where - T: futures::stream::Stream, -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_struct("ValvedDebug").finish_non_exhaustive() - } -} - -impl futures::stream::Stream for ValvedDebug -where - T: futures::stream::Stream, -{ - type Item = X; - fn poll_next( - self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - // safe since we never move nor leak &mut - let inner = unsafe { self.map_unchecked_mut(|s| &mut s.inner) }; - inner.poll_next(cx) - } -} + }; -pub async fn run(phase1: StartupPhase1) -> Result<()> { - let camdata_socket = phase1.camdata_socket; - let my_app = phase1.my_app; - - let mainbrain_server_info = phase1.mainbrain_server_info; - let mut cam_manager = phase1.cam_manager; - let strand_cam_http_session_handler = phase1.strand_cam_http_session_handler; - let handle = phase1.handle; - let rt_handle = handle.clone(); - let rt_handle2 = rt_handle.clone(); - let rt_handle3 = rt_handle2.clone(); - let trigger_cfg = phase1.trigger_cfg; - let triggerbox_rx = phase1.triggerbox_rx; - let model_pose_server_addr = phase1.model_pose_server_addr; - let mut coord_processor = phase1.coord_processor; - let valve = phase1.valve; - let signal_all_cams_present = phase1.signal_all_cams_present; - let signal_all_cams_synced = phase1.signal_all_cams_synced; - let mut raw_packet_logger = phase1.raw_packet_logger; + let http_serve_future = + launch_braid_http_backend(jwt_secret, listener, mainbrain_server_info, app_state).await?; let signal_triggerbox_connected = Arc::new(AtomicBool::new(false)); - let triggerbox_cmd = my_app.triggerbox_cmd.clone(); - - info!( - "http api server at {}", - mainbrain_server_info.guess_base_url_with_token() - ); - - let time_model_arc = my_app.time_model_arc.clone(); - let expected_framerate_arc = my_app.expected_framerate_arc.clone(); - let sync_pulse_pause_started_arc = my_app.sync_pulse_pause_started_arc.clone(); { let sender = SendConnectedCamToBuiBackend { - shared_store: my_app.inner.shared_arc().clone(), + shared_store: shared_store.clone(), }; let old_callback = cam_manager.set_cam_changed_callback(Box::new(sender)); assert!(old_callback.is_none()); } - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; - let (triggerbox_data_tx, triggerbox_data_rx) = tokio::sync::mpsc::channel::(20); @@ -960,7 +603,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { tokio::spawn(triggerbox_future); } - let tracker = my_app.inner.shared_arc().clone(); + let tracker = shared_store.clone(); let on_new_clock_model = { let time_model_arc = time_model_arc.clone(); @@ -983,7 +626,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { tracker_guard.modify(|shared| shared.clock_model_copy = cm.clone()); } let strand_cam_http_session_handler2 = strand_cam_http_session_handler.clone(); - handle.spawn(async move { + tokio::spawn(async move { let r = strand_cam_http_session_handler2 .send_clock_model_to_all(cm) .await; @@ -1006,7 +649,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { use braid_triggerbox::{make_trig_fps_cmd, Cmd}; - let tx = my_app.triggerbox_cmd.clone().unwrap(); + let tx = triggerbox_cmd.clone().unwrap(); let cmd_rx = triggerbox_rx.unwrap(); let (rate_cmd, rate_actual) = make_trig_fps_cmd(*fps as f64); @@ -1107,7 +750,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { let time_model_arc2 = time_model_arc.clone(); let cam_manager2 = cam_manager.clone(); let valve2 = valve.clone(); - let sync_start_jh = rt_handle3.spawn(async move { + let _sync_start_jh = tokio::spawn(async move { let interval_stream = tokio_stream::wrappers::IntervalStream::new(tokio::time::interval( std::time::Duration::from_secs(1), )); @@ -1134,9 +777,8 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { // Signal cameras are synchronized - let shared_store = my_app.inner.shared_arc().clone(); let valve2 = valve.clone(); - let sync_done_jh = rt_handle3.spawn(async move { + let _sync_done_jh = tokio::spawn(async move { let interval_stream = tokio_stream::wrappers::IntervalStream::new(tokio::time::interval( std::time::Duration::from_secs(1), )); @@ -1178,8 +820,8 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { } }; - let ros_cam_name = RosCamName::new(packet.cam_name.clone()); - live_stats_collector2.register_new_frame_data(&ros_cam_name, packet.points.len()); + let raw_cam_name = RawCamName::new(packet.cam_name.clone()); + live_stats_collector2.register_new_frame_data(&raw_cam_name, packet.points.len()); let sync_time_min = match &trigger_cfg { TriggerType::TriggerboxV1(_) => { @@ -1196,7 +838,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { // (which occurs upon synchronization). let send_new_frame_offset = |frame| { let strand_cam_http_session_handler = strand_cam_http_session_handler2.clone(); - let cam_name = ros_cam_name.clone(); + let cam_name = raw_cam_name.clone(); let fut_no_err = async move { match strand_cam_http_session_handler .send_frame_offset(&cam_name, frame) @@ -1208,7 +850,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { } }; }; - rt_handle.spawn(fut_no_err); + tokio::spawn(fut_no_err); }; let synced_frame = cam_manager2.got_new_frame_live( @@ -1219,7 +861,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { send_new_frame_offset, ); - let cam_num = cam_manager.cam_num(&ros_cam_name); + let cam_num = cam_manager.cam_num(&raw_cam_name); raw_packet_logger .log_raw_packets(&packet, cam_num, synced_frame) @@ -1228,15 +870,15 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { let cam_num = match cam_num { Some(cam_num) => cam_num, None => { - let known_ros_cam_names = cam_manager.all_ros_cam_names(); - let cam_names = known_ros_cam_names + let known_raw_cam_names = cam_manager.all_raw_cam_names(); + let cam_names = known_raw_cam_names .iter() .map(|x| format!("\"{}\"", x.as_str())) .collect::>() .join(", "); debug!( "Unknown camera name \"{}\" ({} expected cameras: [{}]).", - ros_cam_name.as_str(), + raw_cam_name.as_str(), cam_names.len(), cam_names ); @@ -1258,7 +900,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { }; let frame_data = flydra2::FrameData::new( - ros_cam_name, + raw_cam_name, cam_num, synced_frame, trigger_timestamp, @@ -1286,18 +928,11 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { let (data_tx, data_rx) = tokio::sync::mpsc::channel(50); - let ms = flydra2::new_model_server( - data_rx, - valve.clone(), - &model_pose_server_addr, - info, - rt_handle2, - ) - .await?; + tokio::spawn(flydra2::new_model_server(data_rx, model_pose_server_addr)); { let mut tracker = tracker2.write(); - tracker.modify(|shared| shared.model_server_addr = Some(*ms.local_addr())) + tracker.modify(|shared| shared.model_server_addr = Some(model_pose_server_addr)) } let expected_framerate: Option = *expected_framerate_arc9.read(); @@ -1309,30 +944,141 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { expected_framerate, ); + tokio::spawn(async { send_updates_future.await }); + tokio::spawn(async { http_serve_future.await }); + // We "block" (in an async way) here for the entire runtime of the program. let writer_jh = consume_future.await; - // If these tasks are still running, cancel them. - debug!("Runtime ending. Aborting any remaining tasks."); - sync_start_jh.abort(); - sync_done_jh.abort(); - // Allow writer task time to finish writing. debug!("Runtime ending. Joining coord_processor.consume_stream future."); + writer_jh .join() .expect("join writer task 1") .expect("join writer task 2"); + // If these tasks are still running, cancel them. + debug!("Runtime ending. Aborting any remaining tasks."); + // sync_start_jh.abort(); + // sync_done_jh.abort(); + debug!("done {}:{}", file!(), line!()); Ok(()) } +use flydra_types::HostClock; +use serde::Serialize; + +/// Format for debugging raw packet data direct from Strand Cam. +#[derive(Serialize)] +struct RawPacketLogRow { + cam_name: String, + #[serde(with = "flydra_types::timestamp_opt_f64")] + timestamp: Option>, + #[serde(with = "flydra_types::timestamp_f64")] + cam_received_time: FlydraFloatTimestampLocal, + device_timestamp: Option, + block_id: Option, + framenumber: i32, + n_frames_skipped: u32, + done_camnode_processing: f64, + preprocess_stamp: f64, + cam_num: Option, + synced_frame: Option, +} + +/// Logger for debugging raw packet data direct from Strand Cam. +struct RawPacketLogger { + fd: Option>, +} + +impl RawPacketLogger { + /// Create a new logger for debugging raw packet data. + /// + /// If `fname` argument is None, this does very little. + fn new(fname: Option<&std::path::Path>) -> Result { + let fd = fname + .map(std::fs::File::create) + .transpose()? + .map(csv::Writer::from_writer); + Ok(Self { fd }) + } + + /// Log debug data for raw packets. + /// + /// If no filename was given to `Self::new`, this does very little. + fn log_raw_packets( + &mut self, + packet: &flydra_types::FlydraRawUdpPacket, + cam_num: Option, + synced_frame: Option, + ) -> Result<()> { + if let Some(ref mut fd) = self.fd { + let row = RawPacketLogRow { + cam_name: packet.cam_name.clone(), + timestamp: packet.timestamp.clone(), + cam_received_time: packet.cam_received_time.clone(), + device_timestamp: packet.device_timestamp, + block_id: packet.block_id, + framenumber: packet.framenumber, + n_frames_skipped: packet.n_frames_skipped, + done_camnode_processing: packet.done_camnode_processing, + preprocess_stamp: packet.preprocess_stamp, + cam_num, + synced_frame, + }; + fd.serialize(row)?; + } + Ok(()) + } +} + +struct ValvedDebug +where + T: futures::stream::Stream, +{ + inner: T, +} + +impl ValvedDebug +where + T: futures::stream::Stream, +{ + fn new(inner: T) -> Self { + Self { inner } + } +} + +impl std::fmt::Debug for ValvedDebug +where + T: futures::stream::Stream, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + f.debug_struct("ValvedDebug").finish_non_exhaustive() + } +} + +impl futures::stream::Stream for ValvedDebug +where + T: futures::stream::Stream, +{ + type Item = X; + fn poll_next( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + // safe since we never move nor leak &mut + let inner = unsafe { self.map_unchecked_mut(|s| &mut s.inner) }; + inner.poll_next(cx) + } +} + #[derive(Clone)] struct LiveStatsCollector { - shared: Arc>>, - collected: Arc>>, + shared: SharedStore, + collected: Arc>>, } #[derive(Debug)] @@ -1368,12 +1114,12 @@ impl LiveStatsAccum { } impl LiveStatsCollector { - fn new(shared: Arc>>) -> Self { + fn new(shared: SharedStore) -> Self { let collected = Arc::new(RwLock::new(BTreeMap::new())); Self { shared, collected } } - fn register_new_frame_data(&self, name: &RosCamName, n_points: usize) { + fn register_new_frame_data(&self, name: &RawCamName, n_points: usize) { let to_send = { // scope for lock on self.collected let mut collected = self.collected.write(); @@ -1406,13 +1152,13 @@ impl LiveStatsCollector { } } -async fn toggle_saving_csv_tables( +pub(crate) async fn toggle_saving_csv_tables( start_saving: bool, expected_framerate_arc: Arc>>, output_base_dirname: std::path::PathBuf, braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, - per_cam_data_arc: Arc>>, - shared_data: Arc>>, + per_cam_data_arc: Arc>>, + shared_data: SharedStore, ) { if start_saving { let expected_framerate: Option = *expected_framerate_arc.read(); @@ -1520,3 +1266,9 @@ async fn begin_cam_sync_triggerbox_in_process( info!("requesting triggerbox to start sending pulses again"); Ok(()) } + +fn to_event_frame(state: &BraidHttpApiSharedState) -> String { + let buf = serde_json::to_string(&state).unwrap(); + let frame_string = format!("event: {BRAID_EVENT_NAME}\ndata: {buf}\n\n"); + frame_string +} diff --git a/braid/braid-run/src/multicam_http_session_handler.rs b/braid/braid-run/src/multicam_http_session_handler.rs index 1078a7ffd..a6b369019 100644 --- a/braid/braid-run/src/multicam_http_session_handler.rs +++ b/braid/braid-run/src/multicam_http_session_handler.rs @@ -1,15 +1,16 @@ +use http_body_util::combinators::BoxBody; use parking_lot::RwLock; use std::{collections::BTreeMap, sync::Arc}; use bui_backend_session::{self, InsecureSession}; -use flydra_types::{RosCamName, StrandCamHttpServerInfo}; +use flydra_types::{BuiServerInfo, RawCamName}; use strand_cam_storetype::CallbackType; /// Keeps HTTP sessions for all connected cameras. #[derive(Clone)] pub struct StrandCamHttpSessionHandler { cam_manager: flydra2::ConnectedCamerasManager, - name_to_session: Arc>>, + name_to_session: Arc>>, } #[derive(Clone)] @@ -18,34 +19,46 @@ enum MaybeSession { Errored, } -use crate::mainbrain::{MainbrainError, MainbrainResult}; - -type MyBody = http_body_util::combinators::BoxBody; +// impl MaybeSession { +// fn as_mut(&mut self) -> Option<&mut InsecureSession> { +// match self { +// MaybeSession::Alive(session) => Some(session), +// MaybeSession::Errored => None, +// } +// } +// } + +trait MyErrorTrait { + fn boxerr(self) -> std::result::Result; +} -fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - use http_body_util::BodyExt; - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) +impl MyErrorTrait for std::result::Result { + fn boxerr(self) -> std::result::Result { + match self { + Ok(v) => Ok(v), + Err(e) => Err(Box::new(e)), + } + } } +use crate::mainbrain::{BoxedStdError, MainbrainError, MainbrainResult}; + impl StrandCamHttpSessionHandler { - pub fn new(cam_manager: flydra2::ConnectedCamerasManager) -> Self { + pub(crate) fn new(cam_manager: flydra2::ConnectedCamerasManager) -> Self { Self { cam_manager, name_to_session: Arc::new(RwLock::new(BTreeMap::new())), } } - async fn open_session(&self, cam_name: &RosCamName) -> Result { + async fn open_session(&self, cam_name: &RawCamName) -> Result { // Create a new session if it doesn't exist. let (base_url, token) = { if let Some(cam_addr) = self.cam_manager.http_camserver_info(cam_name) { match cam_addr { - StrandCamHttpServerInfo::NoServer => { + BuiServerInfo::NoServer => { panic!("cannot connect to camera with no server"); } - StrandCamHttpServerInfo::Server(details) => { - (details.base_url(), details.token().clone()) - } + BuiServerInfo::Server(details) => (details.base_url(), details.token().clone()), } } else { panic!("attempting post to unknown camera") @@ -75,7 +88,7 @@ impl StrandCamHttpSessionHandler { async fn get_or_open_session( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, ) -> Result { // Get session if it already exists. let opt_session = { self.name_to_session.read().get(cam_name).cloned() }; @@ -87,9 +100,25 @@ impl StrandCamHttpSessionHandler { } } + // async fn get( + // &self, + // cam_name: &RawCamName, + // path: &str, + // ) -> Result, MainbrainError> { + // let mut session = self.get_or_open_session(cam_name).await?; + // if let Some(session) = session.as_mut() { + // Ok(session.get(path).await?) + // } else { + // Err(MainbrainError::SessionErrored { + // #[cfg(feature = "backtrace")] + // backtrace: std::backtrace::Backtrace::capture(), + // }) + // } + // } + async fn post( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, args: ci2_remote_control::CamArg, ) -> Result<(), MainbrainError> { let session = self.get_or_open_session(cam_name).await?; @@ -97,8 +126,11 @@ impl StrandCamHttpSessionHandler { // Post to session match session { MaybeSession::Alive(mut session) => { - let body = - body_from_buf(&serde_json::to_vec(&CallbackType::ToCamera(args)).unwrap()); + let body = http_body_util::Full::new(bytes::Bytes::from( + serde_json::to_vec(&CallbackType::ToCamera(args)).unwrap(), + )); + use http_body_util::BodyExt; + let body = BoxBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())); let result = session.post("callback", body).await; match result { @@ -110,7 +142,8 @@ impl StrandCamHttpSessionHandler { } Err(err) => { error!( - "For {cam_name}: StrandCamHttpSessionHandler::post() got error {err:?}" + "For \"{}\": StrandCamHttpSessionHandler::post() got error {err:?}", + cam_name.as_str(), ); let mut name_to_session = self.name_to_session.write(); name_to_session.insert(cam_name.clone(), MaybeSession::Errored); @@ -124,7 +157,7 @@ impl StrandCamHttpSessionHandler { pub async fn send_frame_offset( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, frame_offset: u64, ) -> Result<(), MainbrainError> { info!( @@ -136,7 +169,7 @@ impl StrandCamHttpSessionHandler { self.post(cam_name, args).await } - async fn send_quit(&mut self, cam_name: &RosCamName) -> Result<(), MainbrainError> { + async fn send_quit(&mut self, cam_name: &RawCamName) -> Result<(), MainbrainError> { info!("for cam {}, sending quit", cam_name.as_str()); let args = ci2_remote_control::CamArg::DoQuit; @@ -154,8 +187,9 @@ impl StrandCamHttpSessionHandler { Ok(_) => Ok(()), Err(e) => { warn!( - "Ignoring error while sending quit command to {}: {}", - cam_name, e + "Ignoring error while sending quit command to \"{}\": {}", + cam_name.as_str(), + e ); Err(e.into()) } @@ -166,7 +200,7 @@ impl StrandCamHttpSessionHandler { use futures::{stream, StreamExt}; // Based on https://stackoverflow.com/a/51047786 const CONCURRENT_REQUESTS: usize = 5; - let results = stream::iter(self.cam_manager.all_ros_cam_names()) + let results = stream::iter(self.cam_manager.all_raw_cam_names()) .map(|cam_name| { let mut session = self.clone(); let cam_name = cam_name.clone(); @@ -184,8 +218,9 @@ impl StrandCamHttpSessionHandler { match r { Ok(()) => {} Err((cam_name, e)) => warn!( - "Ignoring error When sending quit command to camera {}: {}", - cam_name, e + "Ignoring error When sending quit command to camera \"{}\": {}", + cam_name.as_str(), + e ), } }) @@ -193,7 +228,7 @@ impl StrandCamHttpSessionHandler { } pub async fn toggle_saving_mp4_files_all(&self, start_saving: bool) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.toggle_saving_mp4_files(cam_name, start_saving).await?; } @@ -202,7 +237,7 @@ impl StrandCamHttpSessionHandler { pub async fn toggle_saving_mp4_files( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, start_saving: bool, ) -> MainbrainResult<()> { debug!( @@ -221,7 +256,7 @@ impl StrandCamHttpSessionHandler { &self, clock_model: Option, ) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.send_clock_model(cam_name, clock_model.clone()).await?; } @@ -230,7 +265,7 @@ impl StrandCamHttpSessionHandler { pub async fn send_clock_model( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, clock_model: Option, ) -> MainbrainResult<()> { debug!( @@ -245,7 +280,7 @@ impl StrandCamHttpSessionHandler { } pub async fn set_post_trigger_buffer_all(&self, num_frames: usize) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.set_post_trigger_buffer(cam_name, num_frames).await?; } @@ -254,7 +289,7 @@ impl StrandCamHttpSessionHandler { pub async fn set_post_trigger_buffer( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, num_frames: usize, ) -> MainbrainResult<()> { debug!( @@ -270,14 +305,14 @@ impl StrandCamHttpSessionHandler { } pub async fn initiate_post_trigger_mp4_all(&self) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.initiate_post_trigger_mp4(cam_name).await?; } Ok(()) } - pub async fn initiate_post_trigger_mp4(&self, cam_name: &RosCamName) -> MainbrainResult<()> { + pub async fn initiate_post_trigger_mp4(&self, cam_name: &RawCamName) -> MainbrainResult<()> { debug!( "for cam {}, initiating post trigger recording", cam_name.as_str(), diff --git a/braid/src/lib.rs b/braid/src/lib.rs index 44f327bb3..abc7377ec 100644 --- a/braid/src/lib.rs +++ b/braid/src/lib.rs @@ -9,7 +9,12 @@ pub fn braid_start(name: &str) -> Result<()> { std::env::set_var("RUST_LOG", "braid=info,flydra2=info,braid_run=info,strand_cam=info,flydra_feature_detector=info,rt_image_viewer=info,flydra1_triggerbox=info,error"); } - env_tracing_logger::init(); + // env_tracing_logger::init(); + + // construct a subscriber that prints formatted traces to stdout + let subscriber = tracing_subscriber::FmtSubscriber::new(); + // use that subscriber to process traces emitted after this point + tracing::subscriber::set_global_default(subscriber)?; let version = format!("{} (git {})", env!("CARGO_PKG_VERSION"), env!("GIT_HASH")); log::info!("{} {}", name, version); diff --git a/bui-backend-session/Cargo.toml b/bui-backend-session/Cargo.toml index 0ee7b0806..8036fece3 100644 --- a/bui-backend-session/Cargo.toml +++ b/bui-backend-session/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" rust-version="1.60" [dependencies] -log = "0.4" +tracing = "0.1.40" futures = "0.3" hyper = {version="1.1", default-features = false, features=["client","http1"]} parking_lot = "0.12" @@ -17,4 +17,6 @@ cookie = "0.18" hyper-util = { version = "0.1.1", features = ["client-legacy", "tokio", "client", "http1"] } http-body-util = "0.1.0" thiserror = "1.0.51" -bui-backend-types = "0.8" + +bui-backend-session-types = { path = "../bui-backend-session/types" } +rust-cam-bui-types = {path="../rust-cam-bui-types"} diff --git a/bui-backend-session/demo/Cargo.toml b/bui-backend-session/demo/Cargo.toml index 29f32ee01..1e2959c67 100644 --- a/bui-backend-session/demo/Cargo.toml +++ b/bui-backend-session/demo/Cargo.toml @@ -6,13 +6,11 @@ edition = "2021" rust-version="1.60" [dependencies] -log = "0.4" futures = "0.3" hyper = "1.1" tokio = {version="1.0.1", features=["full"]} -env_logger = "0.10" -bui-backend-types = "0.8" - -bui-backend-session = {path=".."} http-body-util = "0.1.0" bytes = "1.5.0" + +bui-backend-session = {path=".."} +bui-backend-session-types = { path = "../types" } diff --git a/bui-backend-session/demo/src/main.rs b/bui-backend-session/demo/src/main.rs index bad70262e..10489c2b3 100644 --- a/bui-backend-session/demo/src/main.rs +++ b/bui-backend-session/demo/src/main.rs @@ -1,7 +1,7 @@ use http_body_util::BodyExt; use bui_backend_session::future_session; -use bui_backend_types::AccessToken; +use bui_backend_session_types::AccessToken; #[tokio::main] async fn main() -> Result<(), bui_backend_session::Error> { diff --git a/bui-backend-session/src/lib.rs b/bui-backend-session/src/lib.rs index 652e25d8b..8741211f3 100644 --- a/bui-backend-session/src/lib.rs +++ b/bui-backend-session/src/lib.rs @@ -1,10 +1,8 @@ -#[macro_use] -extern crate log; - -use bui_backend_types::AccessToken; +use bui_backend_session_types::AccessToken; use parking_lot::RwLock; use std::sync::Arc; use thiserror::Error; +use tracing::debug; const SET_COOKIE: &str = "set-cookie"; const COOKIE: &str = "cookie"; @@ -26,6 +24,9 @@ pub enum Error { /// A wrapped error from the hyper-util crate #[error("hyper-util error `{0}`")] HyperUtil(#[from] hyper_util::client::legacy::Error), + /// The request was not successful. + #[error("request not successful. status code: `{0}`")] + RequestFailed(http::StatusCode), } /// A session for a single server. @@ -95,7 +96,8 @@ impl InsecureSession { let mut req = hyper::Request::new(body_from_buf(b"")); *req.method_mut() = hyper::Method::GET; *req.uri_mut() = uri; - self.make_request(req).await + let body = self.make_request(req).await?; + Ok(body) } pub async fn get( &mut self, @@ -144,12 +146,23 @@ impl InsecureSession { } } + req.headers_mut().insert( + http::header::CONTENT_TYPE, + hyper::header::HeaderValue::from_str("application/json").unwrap(), + ); + let jar2 = self.jar.clone(); debug!("making request {:?}", req); let response = client.request(req).await?; debug!("handling response {:?}", response); - handle_response(jar2, response) + let body = handle_response(jar2, response)?; + let status_code = body.status(); + if !status_code.is_success() { + tracing::error!("response status code {status_code:?}"); + return Err(Error::RequestFailed(status_code)); + } + Ok(body) } } diff --git a/bui-backend-session/types/Cargo.toml b/bui-backend-session/types/Cargo.toml new file mode 100644 index 000000000..2fdfcf382 --- /dev/null +++ b/bui-backend-session/types/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "bui-backend-session-types" +version = "0.1.0" +edition = "2021" + +[dependencies] +uuid = { version = "1.0", features = ["serde"] } +serde = {version="1.0", features=["derive"]} + +[features] +default = [] + +uuid-v4 = ["uuid/v4"] diff --git a/bui-backend-session/types/src/lib.rs b/bui-backend-session/types/src/lib.rs new file mode 100644 index 000000000..8a62bcbc4 --- /dev/null +++ b/bui-backend-session/types/src/lib.rs @@ -0,0 +1,32 @@ +use serde::{Deserialize, Serialize}; + +/// Identifier for each session (one per client browser). +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct SessionKey(pub uuid::Uuid); + +#[cfg(feature = "uuid-v4")] +impl SessionKey { + /// Create a new SessionKey + #[cfg_attr(docsrs, doc(cfg(feature = "uuid-v4")))] + pub fn new() -> Self { + SessionKey(uuid::Uuid::new_v4()) + } +} + +/// Identifier for each connected event stream listener (one per client tab). +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct ConnectionKey { + pub addr: std::net::SocketAddr, +} + +/// A token which can be required to gain access to HTTP API +/// +/// If the server receives a valid token, it will respond with a cookie carrying +/// a session key. +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub enum AccessToken { + /// No token needed (access must be controlled via other means). + NoToken, + /// A pre-shared token to gain access. + PreSharedToken(String), +} diff --git a/build-util/Cargo.toml b/build-util/Cargo.toml index 0c3d4fbe3..312309527 100644 --- a/build-util/Cargo.toml +++ b/build-util/Cargo.toml @@ -3,10 +3,3 @@ name = "build-util" version = "0.1.0" edition = "2021" rust-version = "1.60" - -[dependencies] -bui-backend-codegen = {version="0.9.1", default-features = false} - -[features] -bundle_files = ["bui-backend-codegen/bundle_files"] -serve_files = ["bui-backend-codegen/serve_files"] diff --git a/build-util/src/lib.rs b/build-util/src/lib.rs index 723a5b7cb..46af1ebd5 100644 --- a/build-util/src/lib.rs +++ b/build-util/src/lib.rs @@ -11,19 +11,12 @@ pub fn git_hash(orig_version: &str) -> Result<(), Box<(dyn std::error::Error)>> Ok(()) } -pub fn bui_backend_generate_code

( - files_dir: P, - generated_path: &str, -) -> Result<(), Box<(dyn std::error::Error)>> -where - P: AsRef, -{ - match bui_backend_codegen::codegen(&files_dir, generated_path) { - Ok(()) => Ok(()), - Err(e) => Err(format!( - "Error in the process of generating '{generated_path}' when attempting to read {} : {e}", - files_dir.as_ref().display() - ) - .into()), - } -} +// pub fn bui_backend_generate_code

( +// _files_dir: P, +// _generated_path: &str, +// ) -> Result<(), Box<(dyn std::error::Error)>> +// where +// P: AsRef, +// { +// todo!(); +// } \ No newline at end of file diff --git a/event-stream-types/Cargo.toml b/event-stream-types/Cargo.toml new file mode 100644 index 000000000..b0d7b757f --- /dev/null +++ b/event-stream-types/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "event-stream-types" +version = "0.1.0" +edition = "2021" + +[dependencies] +tokio = "1" +uuid = { version = "1.0", features = ["serde"] } +serde = { version = "1.0", features = ["derive"] } +http = "1" +axum = "0.7.4" +http-body = "1.0.0" +bytes = "1.5.0" +tokio-stream = "0.1.14" +futures = "0.3.30" +tracing = "0.1.37" +mime = "0.3.17" + +bui-backend-session-types = { path = "../bui-backend-session/types" } diff --git a/event-stream-types/src/lib.rs b/event-stream-types/src/lib.rs new file mode 100644 index 000000000..2b264adf9 --- /dev/null +++ b/event-stream-types/src/lib.rs @@ -0,0 +1,241 @@ +use bui_backend_session_types::{ConnectionKey, SessionKey}; +use bytes::Bytes; +use futures::StreamExt; +use http::{header::ACCEPT, request::Parts, StatusCode}; +use http_body::Frame; +use std::{ + collections::HashMap, + convert::Infallible, + pin::Pin, + sync::{Arc, RwLock}, +}; +use tokio::sync::mpsc::Sender; +use tokio_stream::wrappers::ReceiverStream; + +pub type EventChunkSender = Sender, Infallible>>; +type EventReceiver = ReceiverStream, Infallible>>; + +/// The type of possible connect event, either connect or disconnect. +#[derive(Debug)] +pub enum ConnectionEventType { + /// A connection event with sink for event stream messages to the connected client. + Connect(EventChunkSender), + /// A disconnection event. + Disconnect, +} + +/// State associated with connection or disconnection. +#[derive(Debug)] +pub struct ConnectionEvent { + /// The type of connection for this event. + pub typ: ConnectionEventType, + /// Identifier for the connecting session (one per browser). + pub session_key: SessionKey, + /// Identifier for the connection (one per tab). + pub connection_key: ConnectionKey, + /// The path being requested (starts with `BuiService::events_prefix`). + pub path: String, +} + +// header extractor for "Accept: text/event-stream" -------------------------- + +pub struct AcceptsEventStream; + +#[axum::async_trait] +impl axum::extract::FromRequestParts for AcceptsEventStream { + type Rejection = (StatusCode, &'static str); + async fn from_request_parts(p: &mut Parts, _: &S) -> Result { + const ES: &[u8] = b"text/event-stream"; + if p.headers.get_all(ACCEPT).iter().any(|v| v.as_bytes() == ES) { + Ok(AcceptsEventStream) + } else { + Err(( + StatusCode::BAD_REQUEST, + "Bad request: It is required that you have an \ + HTTP Header \"Accept: text/event-stream\"", + )) + } + } +} + +// TolerantJson extractor -------------------------- + +/// This is much like `axum::Json` but does not fail if the request does not set +/// the 'Content-Type' header. +/// +/// This is purely for backwards-compatibility and can be removed sometime. +pub struct TolerantJson(pub T); + +#[axum::async_trait] +impl axum::extract::FromRequest for TolerantJson +where + T: serde::de::DeserializeOwned, + S: Send + Sync, +{ + type Rejection = axum::extract::rejection::JsonRejection; + + async fn from_request( + mut req: axum::extract::Request, + state: &S, + ) -> Result { + if !json_content_type(req.headers()) { + tracing::error!("request should indicate \"Content-Type: application/json\""); + req.headers_mut().insert( + http::header::CONTENT_TYPE, + http::HeaderValue::from_static("application/json"), + ); + } + match axum::Json::from_request(req, state).await { + Ok(payload) => Ok(TolerantJson(payload.0)), + Err(e) => Err(e), + } + } +} + +// events body --------------------------- + +pub struct EventsBody { + events: EventReceiver, +} + +impl EventsBody { + fn new(events: EventReceiver) -> Self { + Self { events } + } +} + +impl http_body::Body for EventsBody { + type Data = Bytes; + type Error = Infallible; + + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll, Self::Error>>> { + self.events.poll_next_unpin(cx) + } +} + +impl axum::response::IntoResponse for EventsBody { + fn into_response(self) -> axum::response::Response { + let mut response = axum::response::Response::new(axum::body::Body::new(self)); + response.headers_mut().insert( + "content-type", + http::header::HeaderValue::from_static("text/event-stream"), + ); + response + } +} + +// ----- + +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct ConnectionSessionKey { + session_key: uuid::Uuid, + connection_key: std::net::SocketAddr, +} + +impl ConnectionSessionKey { + pub fn new(session_key: uuid::Uuid, connection_key: std::net::SocketAddr) -> Self { + Self { + session_key, + connection_key, + } + } +} + +/// broadcasts events to many listeners. +/// +/// This is generic over the key type. +#[derive(Debug, Clone)] +pub struct EventBroadcaster { + txers: Arc>>, +} + +impl Default for EventBroadcaster { + fn default() -> Self { + Self { + txers: Default::default(), + } + } +} + +impl EventBroadcaster +where + KEY: std::cmp::Eq + std::hash::Hash, +{ + /// Add a new connection indexed by a key. + /// + /// This returns an [EventsBody]. + pub fn new_connection(&self, key: KEY) -> (EventChunkSender, EventsBody) { + let (tx, rx) = tokio::sync::mpsc::channel(10); + let mut txers = self.txers.write().unwrap(); + txers.insert(key, tx.clone()); + let rx = tokio_stream::wrappers::ReceiverStream::new(rx); + let body = EventsBody::new(rx); + + (tx, body) + } + /// Transmit bytes as frame + /// + /// This will drop connections which have errored. + pub async fn broadcast_frame(&self, frame_string: String) { + let txers: Vec<_> = { + // Keep lock in this scope. + // Move all listeners out of shared map. + self.txers.write().unwrap().drain().collect() + }; + + // now we have released the lock and can await without holding the lock. + let mut keep_event_listeners = Vec::with_capacity(txers.len()); + for (key, tx) in txers.into_iter() { + match tx.send(Ok(Frame::data(frame_string.clone().into()))).await { + Ok(()) => { + keep_event_listeners.push((key, tx)); + } + Err(tokio::sync::mpsc::error::SendError(_frame)) => { + // The receiver was dropped because the connection closed. + tracing::debug!("send error"); + } + } + } + + { + // Keep lock in this scope. + // Move all listeners back into shared map. + let mut event_listeners = self.txers.write().unwrap(); + for (key, value) in keep_event_listeners.into_iter() { + event_listeners.insert(key, value); + } + }; + } +} + +// ---- + +// This does not really belong here... + +fn json_content_type(headers: &http::HeaderMap) -> bool { + let content_type = if let Some(content_type) = headers.get(http::header::CONTENT_TYPE) { + content_type + } else { + return false; + }; + + let content_type = if let Ok(content_type) = content_type.to_str() { + content_type + } else { + return false; + }; + + let mime = if let Ok(mime) = content_type.parse::() { + mime + } else { + return false; + }; + + let is_json_content_type = mime.type_() == "application" + && (mime.subtype() == "json" || mime.suffix().map_or(false, |name| name == "json")); + + is_json_content_type +} diff --git a/flydra-types/Cargo.toml b/flydra-types/Cargo.toml index d6dff1435..6edd5f497 100644 --- a/flydra-types/Cargo.toml +++ b/flydra-types/Cargo.toml @@ -16,24 +16,30 @@ serde_cbor = {version="0.11.2", optional=true} tokio-util = {version="0.7.3", features=["codec"], optional=true} bytes = {version="1.0", optional=true} bitflags = "1.0" -dns-lookup = {version="1", optional=true} ordered-float = {version="3.0.0", features=["serde"]} static_assertions = "1.1.0" -bui-backend-types = "0.8" nalgebra = {version="0.32", features=["serde-serialize"]} num-integer = "0.1" +http = "1" +if-addrs = {version="0.11.0", optional=true} + +anyhow = { version = "1", optional = true } +axum-token-auth = { version = "0.1.0", optional = true } +tokio = {version="1", optional=true} withkey = {path="../withkey"} datetime-conversion = {path="../datetime-conversion"} rust-cam-bui-types = {path="../rust-cam-bui-types"} flydra-pt-detect-cfg = {path="../flydra-feature-detector/flydra-pt-detect-cfg"} flydra-feature-detector-types = {path="../flydra-feature-detector/flydra-feature-detector-types"} +bui-backend-session-types = { path = "../bui-backend-session/types" } [features] default=["with-tokio-codec"] -with-dns=["dns-lookup"] with-tokio-codec=["tokio-util", "bytes", "serde_cbor"] +start-listener = [ "anyhow", "axum-token-auth", "tokio" ] +build-urls = ["if-addrs"] [dev-dependencies] anyhow = "1" diff --git a/flydra-types/src/lib.rs b/flydra-types/src/lib.rs index a2de77818..43c433d9f 100644 --- a/flydra-types/src/lib.rs +++ b/flydra-types/src/lib.rs @@ -12,10 +12,11 @@ extern crate static_assertions; use ordered_float::NotNan; use rust_cam_bui_types::{ClockModel, RecordingPath}; +use std::net::{IpAddr, SocketAddr}; use serde::{Deserialize, Deserializer, Serialize}; -use bui_backend_types::AccessToken; +use bui_backend_session_types::AccessToken; use withkey::WithKey; pub const DEFAULT_MODEL_SERVER_ADDR: &str = "0.0.0.0:8397"; @@ -71,18 +72,23 @@ pub const REPROJECTION_DIST_HLOG_FNAME: &str = "reprojection_distance_100x_pixel // this approach is common with a scale factor of 10. // -------------------------------------------------------------------- +// Changes to this struct should update BraidMetadataSchemaTag. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct CamInfoRow { - // changes to this should update BraidMetadataSchemaTag + /// The index of the camera. This changes from invocation to invocation of Braid. pub camn: CamNum, + /// The name of the camera. This is stable across invocations of Braid. + /// + /// Any valid UTF-8 string is possible. (Previously, this was the "ROS name" + /// of the camera in which, e.g. '-' was replaced with '_'. This is no + /// longer the case.) pub cam_id: String, - // pub hostname: String, } +// Changes to this struct should update BraidMetadataSchemaTag. #[allow(non_snake_case)] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct KalmanEstimatesRow { - // changes to this struct should update BraidMetadataSchemaTag pub obj_id: u32, pub frame: SyncFno, /// The timestamp when the trigger pulse fired. @@ -150,6 +156,7 @@ impl RawCamName { pub fn to_ros(&self) -> RosCamName { let ros_name: String = self.0.replace('-', "_"); let ros_name: String = ros_name.replace(' ', "_"); + let ros_name: String = ros_name.replace('/', "_"); RosCamName::new(ros_name) } pub fn as_str(&self) -> &str { @@ -162,9 +169,21 @@ impl RawCamName { pub struct RosCamName(String); impl RosCamName { + /// Create new `RosCamName` assuming input `s` is already valid ROS name. pub fn new(s: String) -> Self { RosCamName(s) } + /// Create new `RosCamName` if `s` is valid ROS name. + pub fn new_checked(s: String) -> Option { + let raw = RawCamName::new(s); + let ros_name = raw.to_ros(); + if ros_name.0 == raw.0 { + // No replacement was used, therefore `s` is ROS already. + Some(ros_name) + } else { + None + } + } pub fn as_str(&self) -> &str { &self.0 } @@ -176,15 +195,36 @@ impl std::fmt::Display for RosCamName { } } -pub const REMOTE_CAMERA_INFO_PATH: &str = "remote_camera_info/"; +pub mod braid_http { + // URL paths on Braid HTTP server. + pub const REMOTE_CAMERA_INFO_PATH: &str = "remote_camera_info"; + pub const CAM_PROXY_PATH: &str = "cam_proxy"; -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] + /// Encode camera name, potentially with slashes or spaces, to be a single + /// URL path component. + pub fn encode_cam_name(cam_name: &crate::RawCamName) -> UrlEncodedCamName { + // Maybe URL encoding would be better? + UrlEncodedCamName(cam_name.0.replace(' ', "_").replace('/', "_")) + } + + #[derive(Ord, PartialOrd, Debug, Clone, Eq, PartialEq)] + pub struct UrlEncodedCamName(pub String); + + impl std::fmt::Display for UrlEncodedCamName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + write!(f, "{}", self.0) + } + } +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Default)] pub enum StartSoftwareFrameRateLimit { /// Set the frame_rate limit at a given frame rate. Enable(f64), /// Disable the frame_rate limit. Disabled, /// Do not change the frame rate limit. + #[default] NoChange, } @@ -206,6 +246,9 @@ fn return_false() -> bool { #[serde(deny_unknown_fields)] pub struct BraidCameraConfig { /// The name of the camera (e.g. "Basler-22005677") + /// + /// (This is the original UTF-8 camera name, not the ROS-encoded camera name + /// in which certain characters are not allowed.) pub name: String, /// Filename of vendor-specific camera settings file. pub camera_settings_filename: Option, @@ -275,11 +318,11 @@ pub struct PerCamSaveData { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct RegisterNewCamera { /// The name of the camera as returned by the camera - pub orig_cam_name: RawCamName, + pub raw_cam_name: RawCamName, /// The name of the camera used in ROS (e.g. with '-' converted to '_'). pub ros_cam_name: RosCamName, /// Location of the camera control HTTP server. - pub http_camserver_info: Option, + pub http_camserver_info: Option, /// The camera settings. pub cam_settings_data: Option, /// The current image. @@ -347,7 +390,7 @@ impl ConnectedCameraSyncState { } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct HttpApiShared { +pub struct BraidHttpApiSharedState { pub fake_sync: bool, pub clock_model_copy: Option, pub csv_tables_dirname: Option, @@ -357,7 +400,7 @@ pub struct HttpApiShared { pub post_trigger_buffer_size: usize, pub calibration_filename: Option, pub connected_cameras: Vec, // TODO: make this a BTreeMap? - pub model_server_addr: Option, + pub model_server_addr: Option, pub flydra_app_name: String, pub all_expected_cameras_are_synced: bool, } @@ -369,40 +412,65 @@ pub struct RecentStats { pub points_detected: usize, } +/// Generic HTTP API server information +/// +/// This is used for both the Strand Camera BUI and the Braid BUI. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub enum StrandCamHttpServerInfo { +pub enum BuiServerInfo { /// No server is present (e.g. prerecorded data). NoServer, /// A server is available. - Server(StrandCamBuiServerInfo), + Server(BuiServerAddrInfo), } +/// HTTP API server access information +/// +/// This contains the address and access token. +/// +/// This is used for both the Strand Camera BUI and the Braid BUI. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct StrandCamBuiServerInfo { - /// The address of the camera control HTTP server. - addr: std::net::SocketAddr, - /// The token for initial connection to the camera control HTTP server. +pub struct BuiServerAddrInfo { + /// The address of the HTTP server. + addr: SocketAddr, + /// The token for initial connection to the HTTP server. token: AccessToken, - resolved_addr: String, } -impl StrandCamBuiServerInfo { - #[cfg(feature = "with-dns")] - pub fn new(addr: std::net::SocketAddr, token: AccessToken) -> Self { - let resolved_addr = if addr.ip().is_unspecified() { - format!("{}:{}", dns_lookup::get_hostname().unwrap(), addr.port()) - } else { - format!("{}", addr) +impl BuiServerAddrInfo { + pub fn new(addr: SocketAddr, token: AccessToken) -> Self { + Self { addr, token } + } + + pub fn addr(&self) -> &SocketAddr { + &self.addr + } + + pub fn token(&self) -> &AccessToken { + &self.token + } + + #[cfg(feature = "build-urls")] + pub fn build_urls(&self) -> std::io::Result> { + let query = match &self.token { + AccessToken::NoToken => "".to_string(), + AccessToken::PreSharedToken(tok) => format!("token={tok}"), }; - Self { - addr, - token, - resolved_addr, - } + + Ok(expand_unspecified_ip(self.addr.ip())? + .into_iter() + .map(|ip| { + http::uri::Builder::new() + .scheme("http") + .authority(format!("{ip}:{}", self.addr.port())) + .path_and_query(format!("/?{query}")) + .build() + .unwrap() + }) + .collect()) } - #[cfg(feature = "with-dns")] pub fn parse_url_with_token(url: &str) -> Result { + // TODO: replace this ugly implementation... let stripped = url .strip_prefix("http://") .ok_or(FlydraTypesError::UrlParseError)?; @@ -429,42 +497,69 @@ impl StrandCamBuiServerInfo { Ok(Self::new(addr, token)) } - pub fn guess_base_url_with_token(&self) -> String { - match self.token { - AccessToken::NoToken => format!("http://{}/", self.resolved_addr), - AccessToken::PreSharedToken(ref tok) => { - format!("http://{}/?token={}", self.resolved_addr, tok) - } - } - } - pub fn base_url(&self) -> String { format!("http://{}", self.addr) } +} - pub fn token(&self) -> &AccessToken { - &self.token +pub fn is_loopback(url: &http::Uri) -> bool { + let authority = match url.authority() { + None => return false, + Some(authority) => authority, + }; + match authority.host() { + "127.0.0.1" | "[::1]" => true, + // should we include "localhost"? only if it actually resolves? + _ => false, } } -#[cfg(feature = "with-dns")] -#[test] -fn test_bui_server_info() { - for addr_str in &[ - "127.0.0.1:1234", - // Ideally, we would also test unspecified addresses here. - // "0.0.0.0:222" - ] { - let addr1 = std::net::ToSocketAddrs::to_socket_addrs(addr_str) - .unwrap() - .next() - .unwrap(); - let bsi1 = StrandCamBuiServerInfo::new(addr1, AccessToken::PreSharedToken("token1".into())); - - let url1 = bsi1.guess_base_url_with_token(); - let test1 = StrandCamBuiServerInfo::parse_url_with_token(&url1).unwrap(); - let url2 = test1.guess_base_url_with_token(); - assert_eq!(url1, url2); +// ----- + +#[cfg(feature = "start-listener")] +pub async fn start_listener( + address_string: &str, +) -> anyhow::Result<(tokio::net::TcpListener, BuiServerAddrInfo)> { + let socket_addr = std::net::ToSocketAddrs::to_socket_addrs(&address_string)? + .next() + .ok_or_else(|| anyhow::anyhow!("no address found for HTTP server"))?; + + let listener = tokio::net::TcpListener::bind(socket_addr).await?; + let listener_local_addr = listener.local_addr()?; + let token_config = if !listener_local_addr.ip().is_loopback() { + Some(axum_token_auth::TokenConfig::new_token("token")) + } else { + None + }; + let token = match token_config { + None => bui_backend_session_types::AccessToken::NoToken, + Some(cfg) => bui_backend_session_types::AccessToken::PreSharedToken(cfg.value.clone()), + }; + let http_camserver_info = BuiServerAddrInfo::new(listener_local_addr, token); + + Ok((listener, http_camserver_info)) +} + +// ----- + +#[cfg(feature = "build-urls")] +fn expand_unspecified_ip(ip: IpAddr) -> std::io::Result> { + if ip.is_unspecified() { + // Get all interfaces if IP is unspecified. + Ok(if_addrs::get_if_addrs()? + .iter() + .filter_map(|x| { + let this_ip = x.addr.ip(); + // Take only IP addresses from correct family. + if ip.is_ipv4() == this_ip.is_ipv4() { + Some(this_ip) + } else { + None + } + }) + .collect()) + } else { + Ok(vec![ip]) } } @@ -785,15 +880,15 @@ pub fn make_hypothesis_test_full3d_default() -> HypothesisTestParams { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct CamInfo { - pub name: RosCamName, + pub name: RawCamName, pub state: ConnectedCameraSyncState, - pub http_camserver_info: StrandCamHttpServerInfo, + pub strand_cam_http_server_info: BuiServerInfo, pub recent_stats: RecentStats, } -/// Messages to the mainbrain +/// Messages to Braid #[derive(Clone, Debug, Serialize, Deserialize)] -pub enum HttpApiCallback { +pub enum BraidHttpApiCallback { /// Called from strand-cam to register a camera NewCamera(RegisterNewCamera), /// Called from strand-cam to update the current image @@ -819,8 +914,7 @@ pub enum HttpApiCallback { #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct PerCam { - /// The name of the camera used in ROS (e.g. with '-' converted to '_'). - pub ros_cam_name: RosCamName, + pub raw_cam_name: RawCamName, pub inner: T, } @@ -829,7 +923,7 @@ pub struct FlydraRawUdpPacket { /// The name of the camera /// /// Traditionally this was the ROS camera name (e.g. with '-' converted to - /// '_'), but we should transition to allowing any valid UTF-8 string. + /// '_'), but have transitioned to allowing any valid UTF-8 string. pub cam_name: String, /// frame timestamp of trigger pulse start (or None if cannot be determined) #[serde(with = "crate::timestamp_opt_f64")] @@ -891,24 +985,24 @@ pub enum FlydraTypesError { UrlParseError, } -#[derive(Deserialize, Serialize, Debug)] +#[derive(Deserialize, Serialize, Debug, Clone)] pub struct AddrInfoUnixDomainSocket { pub filename: String, } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct AddrInfoIP { - inner: std::net::SocketAddr, + inner: SocketAddr, } impl AddrInfoIP { - pub fn from_socket_addr(src: &std::net::SocketAddr) -> Self { + pub fn from_socket_addr(src: &SocketAddr) -> Self { Self { inner: *src } } - pub fn to_socket_addr(&self) -> std::net::SocketAddr { + pub fn to_socket_addr(&self) -> SocketAddr { self.inner } - pub fn ip(&self) -> std::net::IpAddr { + pub fn ip(&self) -> IpAddr { self.inner.ip() } pub fn port(&self) -> u16 { @@ -916,7 +1010,7 @@ impl AddrInfoIP { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum RealtimePointsDestAddr { UnixDomainSocket(AddrInfoUnixDomainSocket), IpAddr(AddrInfoIP), @@ -932,7 +1026,7 @@ impl RealtimePointsDestAddr { } #[derive(Debug, Clone)] -pub struct MainbrainBuiLocation(pub StrandCamBuiServerInfo); +pub struct MainbrainBuiLocation(pub BuiServerAddrInfo); #[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct TriggerClockInfoRow { @@ -946,12 +1040,6 @@ pub struct TriggerClockInfoRow { pub stop_timestamp: FlydraFloatTimestampLocal, } -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct StaticMainbrainInfo { - pub name: String, - pub version: String, -} - bitflags! { #[derive(Serialize, Deserialize)] pub struct ImageProcessingSteps: u8 { diff --git a/flydra2/Cargo.toml b/flydra2/Cargo.toml index 290945dbb..6b5d9b82e 100644 --- a/flydra2/Cargo.toml +++ b/flydra2/Cargo.toml @@ -9,8 +9,7 @@ rust-version = "1.59" path = "src/flydra2.rs" [build-dependencies] -walkdir = { version = "2.2.5", optional = true } -includedir_codegen = { version = "0.5", optional = true } +build-util = { path = "../build-util" } [dependencies] thiserror = "1.0.33" @@ -34,19 +33,18 @@ parry3d-f64 = "0.13.5" alga = "0.9" configure = "0.1.1" itertools = "0.8" +axum = "0.7.4" http = "1.0" -hyper = { version = "1.1", features = ["server", "http1"] } +# hyper = { version = "1.1", features = ["server", "http1"] } tokio = { version = "1.0.1", default-features = false, features = [ "macros", + "net", "rt", "rt-multi-thread", "sync", "time", ] } tokio-stream = { version = "0.1.8" } -stream-cancel = "0.8" -includedir = { version = "0.5", optional = true } -phf = { version = "0.7.23", optional = true } libflate = "0.1" zip = { version = "0.6.3", default-features = false, features = ["time"] } machine-vision-formats = "0.1" @@ -60,6 +58,10 @@ adskalman = "0.15" pretty-print-nalgebra = "0.1.0" nalgebra-mvn = "0.14" iana-time-zone = "0.1" +tower-http = { version = "0.5.0", features = ["fs"], optional = true } +tower-serve-static = { version = "0.1", optional = true } +include_dir = { version = "0.7.3", optional = true } +const_format = "0.2.32" braidz-types = { path = "../braidz-types" } braidz-writer = { path = "../braid/braidz-writer" } @@ -75,6 +77,7 @@ withkey = { path = "../withkey" } simple-frame = { path = "../simple-frame" } convert-image = { path = "../convert-image" } strand-cam-csv-config-types = { path = "../strand-cam-csv-config-types" } +event-stream-types = { path = "../event-stream-types" } [dev-dependencies] tempfile = "3.4.0" @@ -85,8 +88,8 @@ download-verify = { path = "../download-verify" } default = ["bundle_files"] # must pick one of the following two: -bundle_files = ["walkdir", "includedir_codegen", "includedir", "phf"] -serve_files = [] +bundle_files = ["tower-serve-static", "include_dir"] +serve_files = ["tower-http"] braid = [] diff --git a/flydra2/build.rs b/flydra2/build.rs index 9d1ad9ee9..68b87f456 100644 --- a/flydra2/build.rs +++ b/flydra2/build.rs @@ -1,84 +1,7 @@ -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -use std::error::Error; -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -use std::path::Path; - -use std::process::Command; - -fn git_hash() { - let output = Command::new("git") - .args(["rev-parse", "HEAD"]) - .output() - .expect("git"); - let git_hash = String::from_utf8(output.stdout).expect("from_utf8"); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); -} - -/// Do codegen to write a file (`codegen_fname`) which includes -/// the contents of all entries in `files_dir`. -#[cfg(feature = "bundle_files")] -fn create_codegen_file(files_dir: P, codegen_fname: Q) -> Result<(), std::io::Error> -where - P: AsRef, - Q: AsRef, -{ - // Collect list of files to include - let entries = walkdir::WalkDir::new(files_dir.as_ref()) - .into_iter() - .map(|entry| entry.expect("DirEntry error").path().into()) - .collect::>(); - - // Make sure we recompile if these files change - println!("cargo:rerun-if-changed={}", files_dir.as_ref().display()); - for entry in entries.iter() { - println!("cargo:rerun-if-changed={}", entry.display()); - } - - // Check that at least one of the needed files is there. - let required: std::path::PathBuf = files_dir.as_ref().join("index.html"); - if !entries.contains(&required) { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!("no {:?} file (hint: run make in elm_frontend)", required), - )); - } - - let codegen_fname_str = format!("{}", codegen_fname.as_ref().display()); - // Write the contents of the files. - includedir_codegen::start("PUBLIC") - .dir(files_dir, includedir_codegen::Compression::Gzip) - .build(&codegen_fname_str)?; - Ok(()) -} - -/// Create an empty file (`codegen_fname`). -#[cfg(feature = "serve_files")] -fn create_codegen_file(_: P, codegen_fname: Q) -> Result<(), Box> -where - P: AsRef, - Q: AsRef, -{ - let out_dir = std::env::var("OUT_DIR")?; - let dest_path = std::path::Path::new(&out_dir).join(codegen_fname); - std::fs::File::create(dest_path)?; - Ok(()) -} - -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -pub fn codegen(files_dir: P, generated_path: Q) -> Result<(), Box> -where - P: AsRef, - Q: AsRef, -{ - create_codegen_file(&files_dir, &generated_path)?; - Ok(()) -} - #[cfg(not(any(feature = "bundle_files", feature = "serve_files")))] compile_error!("Need cargo feature \"bundle_files\" or \"serve_files\""); -fn main() { - #[cfg(any(feature = "bundle_files", feature = "serve_files"))] - codegen("static", "public.rs").expect("codegen failed"); - git_hash(); +fn main() -> Result<(), Box<(dyn std::error::Error)>> { + build_util::git_hash(env!("CARGO_PKG_VERSION"))?; + Ok(()) } diff --git a/flydra2/src/bin/send_pose.rs b/flydra2/src/bin/send_pose.rs index 800e204d6..a8f45efb6 100644 --- a/flydra2/src/bin/send_pose.rs +++ b/flydra2/src/bin/send_pose.rs @@ -1,34 +1,21 @@ use chrono::Local; -use std::{sync::Arc, time::Instant}; -use tracing::info; +use std::time::Instant; use flydra2::{new_model_server, Result, SendType, TimeDataPassthrough}; use flydra_types::{FlydraFloatTimestampLocal, KalmanEstimatesRow, SyncFno, Triggerbox}; -fn main() -> Result<()> { +#[tokio::main] +async fn main() -> Result<()> { let _tracing_guard = env_tracing_logger::init(); - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build()?; - - let runtime = Arc::new(runtime); - runtime.block_on(inner(runtime.handle().clone())) -} - -async fn inner(rt_handle: tokio::runtime::Handle) -> Result<()> { - let addr = flydra_types::DEFAULT_MODEL_SERVER_ADDR.parse().unwrap(); - info!("send_pose server at {}", addr); - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; - - let (_quit_trigger, valve) = stream_cancel::Valve::new(); + let addr: std::net::SocketAddr = flydra_types::DEFAULT_MODEL_SERVER_ADDR.parse().unwrap(); + println!("send_pose server at {}", &addr); let (data_tx, data_rx) = tokio::sync::mpsc::channel(50); - new_model_server(data_rx, valve, &addr, info, rt_handle).await?; + let model_server_future = new_model_server(data_rx, addr); + + tokio::spawn(async { model_server_future.await }); let starti = Instant::now(); diff --git a/flydra2/src/bundled_data.rs b/flydra2/src/bundled_data.rs index 434c4c6be..9f0a20487 100644 --- a/flydra2/src/bundled_data.rs +++ b/flydra2/src/bundled_data.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use flydra_types::{MiniArenaConfig, RosCamName}; +use flydra_types::{MiniArenaConfig, RawCamName}; use nalgebra::Point2; use crate::connected_camera_manager::CameraList; @@ -39,7 +39,7 @@ pub(crate) struct MiniArenaPointPerCam { #[derive(Debug, Default)] pub(crate) struct PerMiniArenaAllCamsOneFrameUndistorted { - pub(crate) per_cam: BTreeMap>, + pub(crate) per_cam: BTreeMap>, } // impl PerMiniArenaAllCamsOneFrameUndistorted { @@ -176,9 +176,9 @@ impl BundledAllCamsOneFrameDistorted { let is_new = self.cameras.inner.insert(fdp.frame_data.cam_num.0); assert!( is_new, - "Received data twice: camera={}, orig frame={}. \ + "Received data twice: camera=\"{}\", orig frame={}. \ new frame={}", - fdp.frame_data.cam_name, + fdp.frame_data.cam_name.as_str(), self.frame().0, fdp.frame_data.synced_frame.0 ); diff --git a/flydra2/src/connected_camera_manager.rs b/flydra2/src/connected_camera_manager.rs index ff479f386..d889882c0 100644 --- a/flydra2/src/connected_camera_manager.rs +++ b/flydra2/src/connected_camera_manager.rs @@ -6,8 +6,7 @@ use tracing::{debug, error, info}; use crate::{safe_u8, CamInfoRow, MyFloat}; use flydra_types::{ - CamInfo, CamNum, ConnectedCameraSyncState, RawCamName, RecentStats, RosCamName, - StrandCamHttpServerInfo, SyncFno, + BuiServerInfo, CamInfo, CamNum, ConnectedCameraSyncState, RawCamName, RecentStats, SyncFno, }; pub(crate) trait HasCameraList { @@ -39,10 +38,9 @@ impl HasCameraList for CameraList { #[derive(Debug)] pub struct ConnectedCameraInfo { cam_num: CamNum, - orig_cam_name: RawCamName, - ros_cam_name: RosCamName, + raw_cam_name: RawCamName, sync_state: ConnectedCameraSyncState, - http_camserver_info: StrandCamHttpServerInfo, + http_camserver_info: BuiServerInfo, frames_during_sync: u64, } @@ -50,20 +48,20 @@ impl ConnectedCameraInfo { fn copy_to_caminfo(&self) -> CamInfoRow { CamInfoRow { camn: self.cam_num, - cam_id: self.ros_cam_name.as_str().to_string(), + cam_id: self.raw_cam_name.as_str().to_string(), } } } #[derive(Debug)] struct ConnectedCamerasManagerInner { - all_expected_cameras: BTreeSet, + all_expected_cameras: BTreeSet, next_cam_num: CamNum, - ccis: BTreeMap, - not_yet_connected: BTreeMap, + ccis: BTreeMap, + not_yet_connected: BTreeMap, all_expected_cameras_are_present: bool, all_expected_cameras_are_synced: bool, - first_frame_arrived: BTreeSet, + first_frame_arrived: BTreeSet, } pub trait ConnectedCamCallback: Send { @@ -100,7 +98,7 @@ impl HasCameraList for ConnectedCamerasManager { impl ConnectedCamerasManager { pub fn new( recon: &Option>, - all_expected_cameras: BTreeSet, + all_expected_cameras: BTreeSet, signal_all_cams_present: Arc, signal_all_cams_synced: Arc, ) -> Self { @@ -109,9 +107,9 @@ impl ConnectedCamerasManager { // pre-reserve cam numbers for cameras in calibration let next_cam_num = if let Some(ref recon) = recon { for (base_num, cam_name) in recon.cam_names().enumerate() { - let ros_cam_name = RosCamName::new(cam_name.to_string()); + let raw_cam_name = RawCamName::new(cam_name.to_string()); let cam_num: CamNum = safe_u8(base_num).into(); - not_yet_connected.insert(ros_cam_name, cam_num); + not_yet_connected.insert(raw_cam_name, cam_num); } safe_u8(recon.len()) } else { @@ -147,9 +145,9 @@ impl ConnectedCamerasManager { for cam_name in recon.cam_names() { let cam_num = next_cam_num; next_cam_num = safe_u8(next_cam_num as usize + 1); - let ros_cam_name = RosCamName::new(cam_name.to_string()); + let raw_cam_name = RawCamName::new(cam_name.to_string()); let cam_num: CamNum = cam_num.into(); - not_yet_connected.insert(ros_cam_name, cam_num); + not_yet_connected.insert(raw_cam_name, cam_num); } } @@ -163,11 +161,7 @@ impl ConnectedCamerasManager { for cam_info in old_ccis.values() { // This calls self.notify_cam_changed_listeners(): - self.register_new_camera( - &cam_info.orig_cam_name, - &cam_info.http_camserver_info, - &cam_info.ros_cam_name, - ); + self.register_new_camera(&cam_info.raw_cam_name, &cam_info.http_camserver_info); } } @@ -198,9 +192,9 @@ impl ConnectedCamerasManager { .ccis .values() .map(|cci| CamInfo { - name: cci.ros_cam_name.clone(), + name: cci.raw_cam_name.clone(), state: cci.sync_state.clone(), - http_camserver_info: cci.http_camserver_info.clone(), + strand_cam_http_server_info: cci.http_camserver_info.clone(), recent_stats: RecentStats::default(), }) .collect() @@ -214,17 +208,15 @@ impl ConnectedCamerasManager { /// See `new` and `register_new_camera` for the case when multiple cameras /// will be added. pub fn new_single_cam( - orig_cam_name: &RawCamName, - http_camserver_info: &StrandCamHttpServerInfo, + raw_cam_name: &RawCamName, + http_camserver_info: &BuiServerInfo, recon: &Option>, ) -> Self { - let ros_cam_name = orig_cam_name.to_ros(); - let signal_all_cams_present = Arc::new(AtomicBool::new(false)); let signal_all_cams_synced = Arc::new(AtomicBool::new(false)); let mut all_expected_cameras = BTreeSet::new(); - all_expected_cameras.insert(ros_cam_name.clone()); + all_expected_cameras.insert(raw_cam_name.clone()); let this = Self::new( recon, @@ -233,26 +225,26 @@ impl ConnectedCamerasManager { signal_all_cams_synced, ); { - let orig_cam_name = orig_cam_name.clone(); + let raw_cam_name = raw_cam_name.clone(); let mut inner = this.inner.write(); assert!( - !inner.ccis.contains_key(&ros_cam_name), + !inner.ccis.contains_key(&raw_cam_name), "camera connecting again?" ); - let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&ros_cam_name) + let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&raw_cam_name) { debug!( "registering camera {}, which is in existing calibration", - ros_cam_name.as_str() + raw_cam_name.as_str() ); pre_existing } else { debug!( "registering camera {}, which is not in existing calibration", - ros_cam_name.as_str() + raw_cam_name.as_str() ); // unknown (and thus un-calibrated) camera let cam_num = inner.next_cam_num; @@ -261,11 +253,10 @@ impl ConnectedCamerasManager { }; inner.ccis.insert( - ros_cam_name.clone(), + raw_cam_name.clone(), ConnectedCameraInfo { cam_num, - orig_cam_name, - ros_cam_name, + raw_cam_name, sync_state: ConnectedCameraSyncState::Unsynchronized, http_camserver_info: http_camserver_info.clone(), frames_during_sync: 0, @@ -275,8 +266,8 @@ impl ConnectedCamerasManager { this } - pub fn remove(&mut self, ros_cam_name: &RosCamName) { - self.inner.write().ccis.remove(ros_cam_name); + pub fn remove(&mut self, raw_cam_name: &RawCamName) { + self.inner.write().ccis.remove(raw_cam_name); self.notify_cam_changed_listeners(); } @@ -286,34 +277,32 @@ impl ConnectedCamerasManager { /// added. pub fn register_new_camera( &mut self, - orig_cam_name: &RawCamName, - http_camserver_info: &StrandCamHttpServerInfo, - ros_cam_name: &RosCamName, + raw_cam_name: &RawCamName, + http_camserver_info: &BuiServerInfo, ) { - let orig_cam_name = orig_cam_name.clone(); - let ros_cam_name = ros_cam_name.clone(); + let raw_cam_name = raw_cam_name.clone(); let cam_num = { // This scope is for the write lock on self.inner. Keep it minimal. let mut inner = self.inner.write(); assert!( - !inner.ccis.contains_key(&ros_cam_name), - "camera {} already connected", - ros_cam_name + !inner.ccis.contains_key(&raw_cam_name), + "camera \"{}\" already connected", + raw_cam_name.as_str() ); - let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&ros_cam_name) + let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&raw_cam_name) { debug!( "registering camera {}, which is in existing calibration", - ros_cam_name.as_str() + raw_cam_name.as_str() ); pre_existing } else { if self.recon.is_some() { tracing::warn!( "Camera {} connected, but this is not in existing calibration.", - ros_cam_name.as_str() + raw_cam_name.as_str() ); } // unknown (and thus un-calibrated) camera @@ -323,11 +312,10 @@ impl ConnectedCamerasManager { }; inner.ccis.insert( - ros_cam_name.clone(), + raw_cam_name.clone(), ConnectedCameraInfo { cam_num: cam_num.clone(), - orig_cam_name: orig_cam_name.clone(), - ros_cam_name: ros_cam_name.clone(), + raw_cam_name: raw_cam_name.clone(), sync_state: ConnectedCameraSyncState::Unsynchronized, http_camserver_info: http_camserver_info.clone(), frames_during_sync: 0, @@ -336,10 +324,9 @@ impl ConnectedCamerasManager { cam_num }; info!( - "register_new_camera got original camera name \"{}\", \ - ROS camera name \"{}\", assigned camera number {}", - orig_cam_name.as_str(), - ros_cam_name.as_str(), + "register_new_camera got camera name \"{}\", \ + assigned camera number {}", + raw_cam_name.as_str(), cam_num ); self.notify_cam_changed_listeners(); @@ -361,7 +348,7 @@ impl ConnectedCamerasManager { { assert!(packet.framenumber >= 0); - let ros_cam_name = RosCamName::new(packet.cam_name.clone()); + let raw_cam_name = RawCamName::new(packet.cam_name.clone()); let cam_frame = packet.framenumber as u64; let mut synced_frame = None; @@ -371,7 +358,7 @@ impl ConnectedCamerasManager { let mut do_check_if_all_cameras_synchronized = false; { let inner = self.inner.read(); - if let Some(cci) = inner.ccis.get(&ros_cam_name) { + if let Some(cci) = inner.ccis.get(&raw_cam_name) { // We know this camera already. use crate::ConnectedCameraSyncState::*; match cci.sync_state { @@ -412,7 +399,7 @@ impl ConnectedCamerasManager { // raises the issue slightly earlier. panic!( "Impossible frame number. cam_name: {}, cam_frame: {}, frame0: {}", - ros_cam_name.as_str(), + raw_cam_name.as_str(), cam_frame, frame0, ); @@ -434,7 +421,7 @@ impl ConnectedCamerasManager { let frames_during_sync = { // This scope is for the write lock on self.inner. Keep it minimal. let mut inner = self.inner.write(); - let frames_during_sync = match inner.ccis.get_mut(&ros_cam_name) { + let frames_during_sync = match inner.ccis.get_mut(&raw_cam_name) { Some(cci) => { cci.frames_during_sync += 1; cci.frames_during_sync @@ -448,9 +435,9 @@ impl ConnectedCamerasManager { if frames_during_sync > 10 { error!( - "Many frames during sync period. Camera {} not \ + "Many frames during sync period. Camera \"{}\" not \ being externally triggered?", - ros_cam_name.as_str() + raw_cam_name.as_str() ); } } @@ -460,7 +447,7 @@ impl ConnectedCamerasManager { { // This scope is for the write lock on self.inner. Keep it minimal. let mut inner = self.inner.write(); - match inner.ccis.get_mut(&ros_cam_name) { + match inner.ccis.get_mut(&raw_cam_name) { Some(cci) => { cci.sync_state = ConnectedCameraSyncState::Synchronized(frame0); } @@ -475,8 +462,8 @@ impl ConnectedCamerasManager { // Do notifications associated with synchronization. send_new_frame_offset(frame0); info!( - "cam {} synchronized with frame offset: {}", - ros_cam_name.as_str(), + "cam \"{}\" synchronized with frame offset: {}", + raw_cam_name.as_str(), frame0, ); do_check_if_all_cameras_synchronized = true; @@ -485,8 +472,11 @@ impl ConnectedCamerasManager { if do_check_if_all_cameras_present && !self.inner.read().all_expected_cameras_are_present { let mut inner = self.inner.write(); let i2: &mut ConnectedCamerasManagerInner = &mut inner; - if i2.first_frame_arrived.insert(ros_cam_name.clone()) { - info!("first frame from camera {} arrived.", ros_cam_name); + if i2.first_frame_arrived.insert(raw_cam_name.clone()) { + info!( + "first frame from camera \"{}\" arrived.", + raw_cam_name.as_str() + ); if i2.first_frame_arrived == i2.all_expected_cameras { inner.all_expected_cameras_are_present = true; self.signal_all_cams_present.store(true, Ordering::SeqCst); @@ -502,13 +492,13 @@ impl ConnectedCamerasManager { { let mut inner = self.inner.write(); let i2: &mut ConnectedCamerasManagerInner = &mut inner; - // if i2.first_frame_arrived.insert(ros_cam_name.clone()) { - // info!("first frame from camera {} arrived.", ros_cam_name); + // if i2.first_frame_arrived.insert(raw_cam_name.clone()) { + // info!("first frame from camera {} arrived.", raw_cam_name); let mut all_synced = true; - for ros_cam_name in i2.all_expected_cameras.iter() { + for raw_cam_name in i2.all_expected_cameras.iter() { let this_sync = i2 .ccis - .get(ros_cam_name) + .get(raw_cam_name) .map(|cci| cci.sync_state.is_synchronized()) .unwrap_or(false); if !this_sync { @@ -528,40 +518,37 @@ impl ConnectedCamerasManager { synced_frame.map(SyncFno) } - pub fn get_ros_cam_name(&self, cam_num: CamNum) -> Option { + pub fn get_raw_cam_name(&self, cam_num: CamNum) -> Option { for cci in self.inner.read().ccis.values() { if cci.cam_num == cam_num { - return Some(cci.ros_cam_name.clone()); + return Some(cci.raw_cam_name.clone()); } } None } - pub fn all_ros_cam_names(&self) -> Vec { + pub fn all_raw_cam_names(&self) -> Vec { self.inner .read() .ccis .values() - .map(|cci| cci.ros_cam_name.clone()) + .map(|cci| cci.raw_cam_name.clone()) .collect() } - pub fn http_camserver_info( - &self, - ros_cam_name: &RosCamName, - ) -> Option { + pub fn http_camserver_info(&self, raw_cam_name: &RawCamName) -> Option { self.inner .read() .ccis - .get(ros_cam_name) + .get(raw_cam_name) .map(|cci| cci.http_camserver_info.clone()) } - pub fn cam_num(&self, ros_cam_name: &RosCamName) -> Option { + pub fn cam_num(&self, raw_cam_name: &RawCamName) -> Option { let inner = self.inner.read(); - match inner.ccis.get(ros_cam_name) { + match inner.ccis.get(raw_cam_name) { Some(cci) => Some(cci.cam_num), - None => inner.not_yet_connected.get(ros_cam_name).copied(), + None => inner.not_yet_connected.get(raw_cam_name).copied(), } } diff --git a/flydra2/src/error.rs b/flydra2/src/error.rs index cd832a2e9..ae767474d 100644 --- a/flydra2/src/error.rs +++ b/flydra2/src/error.rs @@ -49,13 +49,13 @@ pub enum Error { #[cfg(feature = "backtrace")] backtrace: Backtrace, }, - #[error("{source}")] - HyperError { - #[from] - source: hyper::Error, - #[cfg(feature = "backtrace")] - backtrace: Backtrace, - }, + // #[error("{source}")] + // HyperError { + // #[from] + // source: hyper::Error, + // #[cfg(feature = "backtrace")] + // backtrace: Backtrace, + // }, #[error("{source}")] TomlSerError { #[from] diff --git a/flydra2/src/flydra2.rs b/flydra2/src/flydra2.rs index 912cebecd..a4eb31d19 100644 --- a/flydra2/src/flydra2.rs +++ b/flydra2/src/flydra2.rs @@ -34,7 +34,7 @@ pub use braidz_types::BraidMetadata; use flydra_types::{ CamInfoRow, CamNum, ConnectedCameraSyncState, DataAssocRow, FlydraFloatTimestampLocal, - HostClock, KalmanEstimatesRow, RosCamName, SyncFno, TextlogRow, TrackingParams, + HostClock, KalmanEstimatesRow, RawCamName, SyncFno, TextlogRow, TrackingParams, TriggerClockInfoRow, Triggerbox, RECONSTRUCT_LATENCY_HLOG_FNAME, REPROJECTION_DIST_HLOG_FNAME, }; pub use flydra_types::{Data2dDistortedRow, Data2dDistortedRowF32}; @@ -59,7 +59,7 @@ mod tracking_core; mod mini_arenas; mod model_server; -pub use crate::model_server::{new_model_server, ModelServer, SendKalmanEstimatesRow, SendType}; +pub use crate::model_server::{new_model_server, SendKalmanEstimatesRow, SendType}; use crate::contiguous_stream::make_contiguous; use crate::frame_bundler::bundle_frames; @@ -307,7 +307,9 @@ fn to_world_point(vec6: &OVector) -> PointWorldFrame #[derive(Clone, Debug, PartialEq)] pub struct FrameData { /// camera name as kept by mvg::MultiCamSystem - pub cam_name: RosCamName, + /// + /// This can be any UTF-8 string. + pub cam_name: RawCamName, /// camera identification number pub cam_num: CamNum, /// framenumber after synchronization @@ -327,7 +329,7 @@ pub struct FrameData { impl FrameData { #[inline] pub fn new( - cam_name: RosCamName, + cam_name: RawCamName, cam_num: CamNum, synced_frame: SyncFno, trigger_timestamp: Option>, @@ -701,7 +703,7 @@ pub struct StartSavingCsvConfig { pub local: Option>, pub git_rev: String, pub fps: Option, - pub per_cam_data: BTreeMap, + pub per_cam_data: BTreeMap, pub print_stats: bool, pub save_performance_histograms: bool, } @@ -775,7 +777,6 @@ impl CoordProcessor { cam_manager: ConnectedCamerasManager, recon: Option>, metadata_builder: BraidMetadataBuilder, - valve: stream_cancel::Valve, ) -> Result { let CoordProcessorConfig { tracking_params, @@ -1058,7 +1059,7 @@ impl CoordProcessor { #[derive(Debug, Clone)] pub(crate) struct CamAndDist { - pub(crate) ros_cam_name: RosCamName, + pub(crate) raw_cam_name: RawCamName, /// The reprojection distance of the undistorted pixels. pub(crate) reproj_dist: MyFloat, } diff --git a/flydra2/src/frame_bundler.rs b/flydra2/src/frame_bundler.rs index 25d168b86..a3f1018d0 100644 --- a/flydra2/src/frame_bundler.rs +++ b/flydra2/src/frame_bundler.rs @@ -149,9 +149,9 @@ fn test_frame_bundler() { use crate::{FlydraFloatTimestampLocal, FrameData, SyncFno}; - let cam_name_1 = crate::RosCamName::new("cam1".into()); + let cam_name_1 = crate::RawCamName::new("cam1".into()); let cam_num_1 = crate::CamNum(1); - let cam_name_2 = crate::RosCamName::new("cam2".into()); + let cam_name_2 = crate::RawCamName::new("cam2".into()); let cam_num_2 = crate::CamNum(2); let trigger_timestamp = None; diff --git a/flydra2/src/model_server.rs b/flydra2/src/model_server.rs index 830706290..701b561a4 100644 --- a/flydra2/src/model_server.rs +++ b/flydra2/src/model_server.rs @@ -1,231 +1,77 @@ -use tracing::{debug, error, info}; +use tracing::{debug, info}; -use std::{future::Future, pin::Pin}; +use std::sync::{Arc, RwLock}; -use futures::sink::SinkExt; +use http_body::Frame; use serde::{Deserialize, Serialize}; -use futures::stream::StreamExt; -use http_body_util::BodyExt; -use hyper::header::ACCEPT; -use hyper::{Method, Response, StatusCode}; +use event_stream_types::{AcceptsEventStream, EventBroadcaster}; use crate::{Result, TimeDataPassthrough}; -use flydra_types::{FlydraFloatTimestampLocal, StaticMainbrainInfo, SyncFno, Triggerbox}; +use flydra_types::{FlydraFloatTimestampLocal, SyncFno, Triggerbox}; -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -include!(concat!(env!("OUT_DIR"), "/public.rs")); // Despite slash, this does work on Windows. +const EVENTS_PATH: &str = "/events"; -pub type EventChunkSender = tokio::sync::mpsc::Sender; +#[cfg(feature = "bundle_files")] +static ASSETS_DIR: include_dir::Dir<'static> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/static"); -#[derive(Debug)] -pub struct NewEventStreamConnection { - /// A sink for messages send to each connection (one per client tab). - pub chunk_sender: EventChunkSender, -} - -#[derive(Clone)] -struct ModelService { - events_path: String, - config_serve_filepath: String, - config_channel_size: usize, - tx_new_connection: futures::channel::mpsc::Sender, - info: StaticMainbrainInfo, - valve: stream_cancel::Valve, - rt_handle: tokio::runtime::Handle, -} - -impl ModelService { - fn new( - valve: stream_cancel::Valve, - tx_new_connection: futures::channel::mpsc::Sender, - info: StaticMainbrainInfo, - rt_handle: tokio::runtime::Handle, - ) -> Self { - Self { - valve, - events_path: "/events".to_string(), - config_serve_filepath: "static".to_string(), - config_channel_size: 100, - tx_new_connection, - info, - rt_handle, +async fn events_handler( + axum::extract::State(app_state): axum::extract::State, + _: AcceptsEventStream, +) -> impl axum::response::IntoResponse { + let key = { + let mut next_connection_id = app_state.next_connection_id.write().unwrap(); + let key = *next_connection_id; + *next_connection_id += 1; + key + }; + let (tx, body) = app_state.event_broadcaster.new_connection(key); + + // If we have a calibration, extract it. + let cal_data = { + // scope for read lock on app_state.current_calibration + let current_calibration = app_state.current_calibration.read().unwrap(); + if let Some((cal_data, tdpt)) = &*current_calibration { + let data = ( + SendType::CalibrationFlydraXml(cal_data.clone()), + tdpt.clone(), + ); + Some(data) + } else { + None } - } - - #[allow(dead_code)] - fn fullpath(&self, path: &str) -> String { - assert!(path.starts_with('/')); // security check - let path = std::path::PathBuf::from(path) - .strip_prefix("/") - .unwrap() - .to_path_buf(); - assert!(!path.starts_with("..")); // security check - - let base = std::path::PathBuf::from(self.config_serve_filepath.clone()); - let result = base.join(path); - result.into_os_string().into_string().unwrap() - } - - #[cfg(not(any(feature = "bundle_files", feature = "serve_files")))] - fn get_file_content(&self, _file_path: &str) -> Option> { - None - } + }; - #[cfg(feature = "bundle_files")] - fn get_file_content(&self, file_path: &str) -> Option> { - let fullpath = self.fullpath(file_path); - let r = PUBLIC.get(&fullpath); - match r { - Ok(s) => Some(s.into_owned()), - Err(_) => None, - } + // If we extracted a calibration above, send it already now. + if let Some(cal_data) = cal_data { + let cal_body = get_body(&cal_data); + tx.send(Ok(Frame::data(cal_body.into()))).await.unwrap(); } - #[cfg(feature = "serve_files")] - fn get_file_content(&self, file_path: &str) -> Option> { - let fullpath = self.fullpath(file_path); - let contents = match std::fs::read(&fullpath) { - Ok(contents) => contents, - Err(e) => { - error!("requested path {:?}, but got error {:?}", file_path, e); - return None; - } - }; - Some(contents) - } + body } -type MyBody = http_body_util::combinators::BoxBody; - -fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) +async fn info_handler() -> impl axum::response::IntoResponse { + tracing::trace!("info_handler"); + "info_handler" } -impl hyper::service::Service> for ModelService { - type Response = hyper::Response; - type Error = hyper::Error; - type Future = - Pin> + Send>>; - - fn call(&self, req: http::Request) -> Self::Future { - let resp = Response::builder(); - debug!("got request {:?}", req); - let resp_final = match (req.method(), req.uri().path()) { - (&Method::GET, path) => { - let path = if path == "/" { "/index.html" } else { path }; - - if path == "/info" { - let buf = serde_json::to_string_pretty(&self.info).unwrap(); - let len = buf.len(); - let body = body_from_buf(buf.as_bytes()); - resp.header(hyper::header::CONTENT_LENGTH, format!("{}", len).as_bytes()) - .header( - hyper::header::CONTENT_TYPE, - hyper::header::HeaderValue::from_str("application/json") - .expect("from_str"), - ) - .body(body) - .expect("response") // todo map err - } else if path == self.events_path { - let mut accepts_event_stream = false; - for value in req.headers().get_all(ACCEPT).iter() { - if value - .to_str() - .expect("to_str()") - .contains("text/event-stream") - { - accepts_event_stream = true; - } - } - - if accepts_event_stream { - let (tx_event_stream, rx_event_stream) = - tokio::sync::mpsc::channel(self.config_channel_size); - let tx_event_stream: EventChunkSender = tx_event_stream; // type annotation only - - let rx_event_stream = self - .valve - .wrap(tokio_stream::wrappers::ReceiverStream::new(rx_event_stream)); - - let rx_event_stream = rx_event_stream - .map(|data: bytes::Bytes| Ok::<_, _>(http_body::Frame::data(data))); - - { - let conn_info = NewEventStreamConnection { - chunk_sender: tx_event_stream, - }; - - let mut tx_new_connection2 = self.tx_new_connection.clone(); - let fut = async move { - match tx_new_connection2.send(conn_info).await { - Ok(()) => {} - Err(e) => error!("sending new connection info failed: {}", e), - } - }; - - self.rt_handle.spawn(fut); - } +#[derive(Clone)] +struct ModelServerAppState { + current_calibration: Arc>>, + event_broadcaster: EventBroadcaster, + next_connection_id: Arc>, +} - let body1 = http_body_util::StreamBody::new(rx_event_stream); - let body2 = http_body_util::BodyExt::boxed(body1); - - resp.header( - hyper::header::CONTENT_TYPE, - hyper::header::HeaderValue::from_str("text/event-stream") - .expect("from_str"), - ) - .body(body2) - .expect("response") // todo map err - } else { - let msg = r#" - - - - Error - bad request - - -

Error - bad request

- Event request does not specify 'Accept' HTTP Header or does not accept - the required 'text/event-stream'. (View event stream live in browser - here.) - -"# - .to_string(); - resp.status(StatusCode::BAD_REQUEST) - .body(body_from_buf(msg.as_bytes())) - .expect("response") // todo map err - } - } else { - // TODO read file asynchronously - match self.get_file_content(path) { - Some(buf) => { - let len = buf.len(); - let body = body_from_buf(&buf); - resp.header( - hyper::header::CONTENT_LENGTH, - format!("{}", len).as_bytes(), - ) - .body(body) - .expect("response") // todo map err - } - None => { - resp.status(StatusCode::NOT_FOUND) - .body(body_from_buf(b"")) - .expect("response") // todo map err - } - } - } - } - _ => { - resp.status(StatusCode::NOT_FOUND) - .body(body_from_buf(b"")) - .expect("response") // todo map err - } - }; - Box::pin(futures::future::ok(resp_final)) +impl Default for ModelServerAppState { + fn default() -> Self { + Self { + current_calibration: Arc::new(RwLock::new(None)), + event_broadcaster: Default::default(), + next_connection_id: Arc::new(RwLock::new(0)), + } } } @@ -301,73 +147,37 @@ pub struct ToListener { trigger_timestamp: Option>, } -#[derive(Clone)] -pub struct ModelServer { - local_addr: std::net::SocketAddr, -} - pub async fn new_model_server( - data_rx: tokio::sync::mpsc::Receiver<(SendType, TimeDataPassthrough)>, - valve: stream_cancel::Valve, - addr: &std::net::SocketAddr, - info: StaticMainbrainInfo, - rt_handle: tokio::runtime::Handle, -) -> Result { + mut data_rx: tokio::sync::mpsc::Receiver<(SendType, TimeDataPassthrough)>, + addr: std::net::SocketAddr, +) -> Result<()> { { - let channel_size = 2; - let (tx_new_connection, rx_new_connection) = futures::channel::mpsc::channel(channel_size); - - let service = ModelService::new( - valve.clone(), - tx_new_connection, - info.clone(), - rt_handle.clone(), - ); - - let service2 = service.clone(); + let app_state = ModelServerAppState::default(); let listener = tokio::net::TcpListener::bind(addr).await?; let local_addr = listener.local_addr()?; - let handle2 = rt_handle.clone(); - rt_handle.spawn(async move { - loop { - let (socket, _remote_addr) = listener.accept().await.unwrap(); - let model_service = service2.clone(); - - // Spawn a task to handle the connection. That way we can multiple connections - // concurrently. - handle2.spawn(async move { - // Hyper has its own `AsyncRead` and `AsyncWrite` traits and doesn't use tokio. - // `TokioIo` converts between them. - let socket = hyper_util::rt::TokioIo::new(socket); - let model_server = model_service.clone(); - - let hyper_service = hyper::service::service_fn( - move |request: hyper::Request| { - // Do we need to call `poll_ready`???? - use hyper::service::Service; - model_server.call(request) - }, - ); - - // `server::conn::auto::Builder` supports both http1 and http2. - // - // `TokioExecutor` tells hyper to use `tokio::spawn` to spawn tasks. - if let Err(err) = hyper_util::server::conn::auto::Builder::new( - hyper_util::rt::TokioExecutor::new(), - ) - // `serve_connection_with_upgrades` is required for websockets. If you don't need - // that you can use `serve_connection` instead. - .serve_connection_with_upgrades(socket, hyper_service) - .await - { - eprintln!("failed to serve connection: {err:#}"); - } - }); - } - }); + #[cfg(feature = "bundle_files")] + let serve_dir = tower_serve_static::ServeDir::new(&ASSETS_DIR); + + #[cfg(feature = "serve_files")] + let serve_dir = tower_http::services::fs::ServeDir::new( + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("static"), + ); + + // Create axum router. + let router = axum::Router::new() + .route(EVENTS_PATH, axum::routing::get(events_handler)) + .route("/info", axum::routing::get(info_handler)) + .nest_service("/", serve_dir) + .with_state(app_state.clone()); + + // create future for our app + let http_serve_future = { + use std::future::IntoFuture; + axum::serve(listener, router).into_future() + }; info!( "ModelServer at http://{}:{}/", @@ -379,68 +189,46 @@ pub async fn new_model_server( "ModelServer events at http://{}:{}{}", local_addr.ip(), local_addr.port(), - service.events_path + EVENTS_PATH, ); - let result = ModelServer { local_addr }; - - let mut rx_new_connection_valved = valve.wrap(rx_new_connection); - let mut data_rx = tokio_stream::wrappers::ReceiverStream::new(data_rx); - - let main_task = async move { - let mut connections: Vec = vec![]; - let mut current_calibration: Option<(SendType, TimeDataPassthrough)> = None; + // Infinite loop to process and forward data. + let app_state2 = app_state.clone(); + let new_data_processor_future = async move { + let app_state = app_state2; + // Wait for the next update time to arrive ... loop { - tokio::select! { - opt_new_connection = rx_new_connection_valved.next() => { - match opt_new_connection { - Some(new_connection) => { - - if let Some(data) = ¤t_calibration { - let bytes = get_body(data)?; - new_connection.chunk_sender.send(bytes.clone()).await.unwrap(); - } - - connections.push(new_connection); - } - None => { - // All senders done. (So the server has quit and so should we.) - break; - } + let opt_new_data = data_rx.recv().await; + match &opt_new_data { + Some(data) => { + if let (SendType::CalibrationFlydraXml(calib), tdpt) = &data { + let mut current_calibration = + app_state.current_calibration.write().unwrap(); + *current_calibration = Some((calib.clone(), tdpt.clone())); } + send_msg(data, &app_state).await?; } - opt_new_data = data_rx.next() => { - match &opt_new_data { - Some(data) => { - if let (SendType::CalibrationFlydraXml(_),_) = &data { - current_calibration = Some(data.clone()); - } - send_msg(data, &mut connections).await?; - } - None => { - // All senders done. No new data will be coming, so quit. - break; - } - } - - + None => { + // All senders done. No new data will be coming, so quit. + break; } } } Ok::<_, crate::Error>(()) }; - rt_handle.spawn(main_task); - Ok(result) - } -} -impl ModelServer { - pub fn local_addr(&self) -> &std::net::SocketAddr { - &self.local_addr + // Wait for one of our futures to finish... + tokio::select! { + result = new_data_processor_future => {result?} + _ = http_serve_future => {} + } + // ...then exit. + + Ok(()) } } -fn get_body(data: &(SendType, TimeDataPassthrough)) -> Result { +fn get_body(data: &(SendType, TimeDataPassthrough)) -> String { let (msg, tdpt) = data; let latency: f64 = if let Some(ref tt) = tdpt.trigger_timestamp() { let now_f64 = datetime_conversion::datetime_to_f64(&chrono::Local::now()); @@ -460,36 +248,17 @@ fn get_body(data: &(SendType, TimeDataPassthrough)) -> Result, + app_state: &ModelServerAppState, ) -> Result<()> { - let bytes = get_body(data)?; - - // Send to all listening connections. - let keep: Vec = futures::future::join_all( - connections - .iter_mut() - .map(|conn| async { conn.chunk_sender.send(bytes.clone()).await.is_ok() }), - ) - .await; - - assert_eq!(keep.len(), connections.len()); - - // Remove connections which resulted in error. - let mut index = 0; - connections.retain(|_| { - index += 1; - keep[index - 1] - }); - + let buf = get_body(data); + app_state.event_broadcaster.broadcast_frame(buf).await; Ok(()) } diff --git a/flydra2/src/new_object_test_2d.rs b/flydra2/src/new_object_test_2d.rs index 28a44c6b2..f79e2e526 100644 --- a/flydra2/src/new_object_test_2d.rs +++ b/flydra2/src/new_object_test_2d.rs @@ -1,7 +1,7 @@ use std::{collections::BTreeMap, sync::Arc}; use crate::{tracking_core::HypothesisTest, CamAndDist, HypothesisTestResult}; -use flydra_types::{MyFloat, RosCamName, TrackingParams}; +use flydra_types::{MyFloat, RawCamName, TrackingParams}; #[derive(Clone)] pub(crate) struct NewObjectTestFlat3D { @@ -22,7 +22,7 @@ impl NewObjectTestFlat3D { impl HypothesisTest for NewObjectTestFlat3D { fn hypothesis_test( &self, - good_points: &BTreeMap>, + good_points: &BTreeMap>, ) -> Option { let recon_ref = &self.recon; assert!(good_points.len() < 2, "cannot have >1 camera"); @@ -30,7 +30,7 @@ impl HypothesisTest for NewObjectTestFlat3D { let cam = recon_ref.cam_by_name(cam_name.as_str()).unwrap(); if let Some(surface_pt) = crate::flat_2d::distorted_2d_to_flat_3d(&cam, xy) { let cams_and_reproj_dist = vec![CamAndDist { - ros_cam_name: cam_name.clone(), + raw_cam_name: cam_name.clone(), reproj_dist: 0.0, }]; return Some(HypothesisTestResult { diff --git a/flydra2/src/new_object_test_3d.rs b/flydra2/src/new_object_test_3d.rs index a89ae0687..e49053f09 100644 --- a/flydra2/src/new_object_test_3d.rs +++ b/flydra2/src/new_object_test_3d.rs @@ -1,7 +1,7 @@ use std::{collections::BTreeMap, sync::Arc}; use tracing::error; -use flydra_types::{RosCamName, TrackingParams}; +use flydra_types::{RawCamName, TrackingParams}; use mvg::PointWorldFrameWithSumReprojError; @@ -12,8 +12,8 @@ use crate::{ const HTEST_MAX_N_CAMS: u8 = 3; -type CamComboKey = RosCamName; -type CamComboList = Vec>; +type CamComboKey = RawCamName; +type CamComboList = Vec>; #[derive(Clone)] pub(crate) struct NewObjectTestFull3D { @@ -33,7 +33,7 @@ impl NewObjectTestFull3D { { let mut useful_cams = BTreeMap::new(); for raw_cam_name in recon.cam_names() { - let name = RosCamName::new(raw_cam_name.to_string()); + let name = RawCamName::new(raw_cam_name.to_string()); let k: CamComboKey = name; useful_cams.insert(k, ()); } @@ -76,7 +76,7 @@ impl HypothesisTest for NewObjectTestFull3D { /// framenumber and timestamp. fn hypothesis_test( &self, - good_points: &BTreeMap>, + good_points: &BTreeMap>, ) -> Option { // TODO: convert this to use undistorted points and then remove // orig_distorted, also from the structure it is in. @@ -188,7 +188,7 @@ impl HypothesisTest for NewObjectTestFull3D { .iter() .zip(bssf.reproj_dists.iter()) .map(|(ros_cam_name, reproj_dist)| CamAndDist { - ros_cam_name: ros_cam_name.clone(), + raw_cam_name: ros_cam_name.clone(), reproj_dist: *reproj_dist, }) .collect(); diff --git a/flydra2/src/tracking_core.rs b/flydra2/src/tracking_core.rs index 49281d1e9..30f1478ff 100644 --- a/flydra2/src/tracking_core.rs +++ b/flydra2/src/tracking_core.rs @@ -18,7 +18,7 @@ use adskalman::{StateAndCovariance, TransitionModelLinearNoControl}; use flydra_types::{ CamNum, DataAssocRow, FlydraFloatTimestampLocal, FlydraRawUdpPoint, KalmanEstimatesRow, - RosCamName, SyncFno, TrackingParams, Triggerbox, + RawCamName, SyncFno, TrackingParams, Triggerbox, }; use crate::bundled_data::{MiniArenaPointPerCam, PerMiniArenaAllCamsOneFrameUndistorted}; @@ -229,9 +229,9 @@ impl LivingModel { let likes: Vec = if let Some(expected_observation) = eo { trace!( - "object {} {} expects ({},{})", + "object {} \"{}\" expects ({},{})", self.lmi.obj_id, - cam_name, + cam_name.as_str(), expected_observation.mean()[0], expected_observation.mean()[1] ); @@ -512,7 +512,7 @@ where pub(crate) trait HypothesisTest: Send + dyn_clone::DynClone { fn hypothesis_test( &self, - good_points: &BTreeMap>, + good_points: &BTreeMap>, ) -> Option; } @@ -721,8 +721,8 @@ impl ModelCollection { let cam_num = self.mcinner.cam_manager.cam_num(&cam_name).unwrap(); trace!( - "camera {} ({}): {} points", - cam_name, + "camera \"{}\" ({}): {} points", + cam_name.as_str(), cam_num, arena_data.len() ); @@ -1004,7 +1004,7 @@ impl ModelCollection { .iter() .map(|ci| { let pt_idx = 0; - let cam_num = self.mcinner.cam_manager.cam_num(&ci.ros_cam_name).unwrap(); + let cam_num = self.mcinner.cam_manager.cam_num(&ci.raw_cam_name).unwrap(); DataAssocInfo { pt_idx, cam_num, @@ -1090,7 +1090,7 @@ fn filter_points_and_take_first( // fdp_vec: &[FrameDataAndPoints], fdp_vec: &UnusedDataPerArena, minimum_pixel_abs_zscore: f64, -) -> BTreeMap> { +) -> BTreeMap> { fdp_vec .0 .per_cam diff --git a/flydra2/src/write_data.rs b/flydra2/src/write_data.rs index a982b8542..6d9a30520 100644 --- a/flydra2/src/write_data.rs +++ b/flydra2/src/write_data.rs @@ -719,7 +719,7 @@ mod test { FrameDataAndPoints { frame_data: FrameData { block_id: None, - cam_name: RosCamName::new("cam".to_string()), + cam_name: RawCamName::new("cam".to_string()), cam_num: CamNum(0), cam_received_timestamp: FlydraFloatTimestampLocal::from_f64(i as f64 + 0.123), device_timestamp: None, diff --git a/flytrax-csv-to-braidz/Cargo.toml b/flytrax-csv-to-braidz/Cargo.toml index ee4e98699..95dfc982b 100644 --- a/flytrax-csv-to-braidz/Cargo.toml +++ b/flytrax-csv-to-braidz/Cargo.toml @@ -23,9 +23,7 @@ itertools = "0.8" lazy_static = "1.4.0" futures = "0.3" tokio = {version="1.0.1", default-features=false, features=["macros"]} -includedir = { version = "0.5", optional = true } tempfile = "3.4.0" -phf = { version = "0.7.23", optional = true } anyhow = "1.0" image = { version = "0.24.2", default-features = false, features = ["jpeg", "png"] } opencv-ros-camera = { version = "0.14", features = ["serde-serialize"] } diff --git a/http-video-streaming/Cargo.toml b/http-video-streaming/Cargo.toml index f35be1963..9eec12001 100644 --- a/http-video-streaming/Cargo.toml +++ b/http-video-streaming/Cargo.toml @@ -7,8 +7,6 @@ rust-version="1.60" [dependencies] thiserror = "1.0.33" -bui-backend = {version="0.15", default-features = false} -bui-backend-types = "0.8" chrono = {version="0.4.23", default-features=false, features=["serde", "clock", "std", "wasmbind"]} base64 = "0.6" tokio = {version="1", features=["sync","time","macros"]} @@ -22,6 +20,11 @@ machine-vision-formats = "0.1" http-video-streaming-types = {path = "http-video-streaming-types"} basic-frame = {path="../basic-frame"} +rust-cam-bui-types = { path = "../rust-cam-bui-types" } +event-stream-types = { path = "../event-stream-types" } +bui-backend-session-types = { path = "../bui-backend-session/types" } +bytes = "1.5.0" +http-body = "1.0.0" [features] backtrace = ["convert-image/backtrace"] diff --git a/http-video-streaming/http-video-streaming-types/Cargo.toml b/http-video-streaming/http-video-streaming-types/Cargo.toml index 95a601f52..f02483ba1 100644 --- a/http-video-streaming/http-video-streaming-types/Cargo.toml +++ b/http-video-streaming/http-video-streaming-types/Cargo.toml @@ -7,7 +7,8 @@ license = "MIT/Apache-2.0" [dependencies] serde = {version="1.0", features=["derive"]} -bui-backend-types = "0.8" + +bui-backend-session-types = { path = "../../bui-backend-session/types" } [dev-dependencies] serde_yaml = "0.9" diff --git a/http-video-streaming/http-video-streaming-types/src/lib.rs b/http-video-streaming/http-video-streaming-types/src/lib.rs index ea8d37262..5d611f941 100644 --- a/http-video-streaming/http-video-streaming-types/src/lib.rs +++ b/http-video-streaming/http-video-streaming-types/src/lib.rs @@ -6,6 +6,7 @@ // copied, modified, or distributed except according to those terms. use serde::{Deserialize, Serialize}; +use bui_backend_session_types::ConnectionKey; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct Point { @@ -29,7 +30,7 @@ pub struct ToClient { pub annotations: Vec, pub fno: u64, pub ts_rfc3339: String, // timestamp in RFC3339 format - pub ck: bui_backend_types::ConnectionKey, + pub ck: ConnectionKey, pub name: Option, } @@ -75,7 +76,7 @@ pub enum Shape { // from client to server #[derive(Debug, Serialize, Deserialize, Clone)] pub struct FirehoseCallbackInner { - pub ck: bui_backend_types::ConnectionKey, + pub ck: ConnectionKey, pub fno: usize, pub ts_rfc3339: String, pub name: Option, diff --git a/http-video-streaming/src/lib.rs b/http-video-streaming/src/lib.rs index c5adca5e0..2bd613454 100644 --- a/http-video-streaming/src/lib.rs +++ b/http-video-streaming/src/lib.rs @@ -8,13 +8,9 @@ use std::{collections::HashMap, sync::Arc}; use tokio_stream::StreamExt; -use bui_backend::{ - highlevel::{ConnectionEvent, ConnectionEventType}, - lowlevel::EventChunkSender, -}; -use bui_backend_types::ConnectionKey; - use basic_frame::DynamicFrame; +use bui_backend_session_types::ConnectionKey; +use event_stream_types::{ConnectionEvent, ConnectionEventType, EventChunkSender}; pub use http_video_streaming_types::{ CircleParams, DrawableShape, FirehoseCallbackInner, Point, Shape, ToClient, @@ -168,9 +164,9 @@ impl PerSender { http_video_streaming_types::VIDEO_STREAM_EVENT_NAME, buf ); - let hc = buf.into(); + let hc = http_body::Frame::data(bytes::Bytes::from(buf)); - match self.out.send(hc).await { + match self.out.send(Ok(hc)).await { Ok(()) => {} Err(_) => { info!("failed to send data to connection. dropping."); diff --git a/rust-cam-bui-types/Cargo.toml b/rust-cam-bui-types/Cargo.toml index 6ffaeeed3..11aeb9f06 100644 --- a/rust-cam-bui-types/Cargo.toml +++ b/rust-cam-bui-types/Cargo.toml @@ -3,8 +3,11 @@ name = "rust-cam-bui-types" version = "0.1.0" authors = ["Andrew Straw "] license = "MIT/Apache-2.0" +edition = "2021" [dependencies] -serde = "1.0" -serde_derive = "1.0" +serde = {version = "1.0", features = [ "derive" ] } chrono = {version="0.4.23", default-features=false, features=["serde", "clock", "std", "wasmbind"]} +tokio = "1" + +bui-backend-session-types = { path = "../bui-backend-session/types" } diff --git a/rust-cam-bui-types/src/lib.rs b/rust-cam-bui-types/src/lib.rs index a9c38bf76..5cb974306 100644 --- a/rust-cam-bui-types/src/lib.rs +++ b/rust-cam-bui-types/src/lib.rs @@ -5,10 +5,7 @@ // or http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. -extern crate serde; -#[macro_use] -extern crate serde_derive; -extern crate chrono; +use serde::{Serialize, Deserialize}; #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct RecordingPath { diff --git a/strand-cam/Cargo.toml b/strand-cam/Cargo.toml index 4bbd773f4..e4786f6c6 100644 --- a/strand-cam/Cargo.toml +++ b/strand-cam/Cargo.toml @@ -12,19 +12,14 @@ path = "src/strand-cam.rs" [dependencies] jemallocator = {version="0.3", optional=true} -async-change-tracker = "0.3" -bui-backend-types = "0.8" -bui-backend = {version="0.15", default-features = false} +async-change-tracker = "0.3.4" qrcodegen = "1.4" log = { version = "0.4.5", features = ["release_max_level_debug"] } -ctrlc = { version = "3.1.3", features = ["termination"] } stream-cancel = "0.8" csv = {version="1.1", optional=true} libflate = {version="1.0", optional=true} env-tracing-logger = {path="../env-tracing-logger"} -includedir = { version = "0.6", optional = true } -phf = { version = "0.8", optional = true } serde = {version="1.0.79",features=["derive"]} serde_json = "1.0.29" serde_yaml = "0.9" @@ -69,7 +64,7 @@ shellexpand = "2" imops = {path="../imops"} led-box = {path="../led-box"} led-box-comms = {path="../led-box-comms"} -flydra-types = {path="../flydra-types", features=["with-dns"]} +flydra-types = {path="../flydra-types", features = [ "start-listener", "build-urls" ] } flydra2 = {path="../flydra2", default-features = false, optional=true} mvg = {path="../mvg", optional=true} flydra-mvg = {path="../flydra-mvg", optional=true} @@ -83,7 +78,18 @@ byteorder = "1.4" target = "2.0.0" hyper-util = { version = "0.1.1", features = ["full"] } http-body-util = "0.1.0" - +tower = "0.4.13" +axum = "0.7.4" +tracing = "0.1.40" +axum-token-auth = "0.1.0" +tower-http = { version = "0.5.0", features = ["fs"], optional = true } +tower-serve-static = { version = "0.1", optional = true } +include_dir = { version = "0.7.3", optional = true } +http-body = "1.0.0" +http = "1.0.0" +cookie = "0.18.0" + +bui-backend-session-types = { path = "../bui-backend-session/types" } braid-config-data = {path="../braid-config-data"} opencv-calibrate = {path="../opencv-calibrate", optional=true} camcal = {path="../camcal", optional=true} @@ -99,6 +105,7 @@ ads-apriltag = {path="../apriltag", optional=true} channellib = {path="../channellib", optional=true} braid-http-session = {path="../braid-http-session"} bui-backend-session = { path = "../bui-backend-session" } +event-stream-types = { path = "../event-stream-types" } [build-dependencies] build-util = {path="../build-util"} @@ -121,8 +128,10 @@ flydra-uds = ["flydra-feature-detector?/flydra-uds"] posix_sched_fifo = ["posix-scheduler", "posix-scheduler/linux"] # Serve style -bundle_files = ["bui-backend/bundle_files", "build-util/bundle_files", "includedir", "phf", "flydra2?/bundle_files" ] -serve_files = ["bui-backend/serve_files", "build-util/serve_files", "flydra2?/serve_files"] +## Bundle files into executable +bundle_files = ["flydra2?/bundle_files", "tower-serve-static", "include_dir" ] +## Serve files from disk at runtime +serve_files = ["flydra2?/serve_files", "tower-http"] imtrack-dark-circle = [] imtrack-absdiff = [] diff --git a/strand-cam/build.rs b/strand-cam/build.rs index 136b6146e..27317c3c1 100644 --- a/strand-cam/build.rs +++ b/strand-cam/build.rs @@ -1,19 +1,23 @@ +#[cfg(not(any(feature = "bundle_files", feature = "serve_files")))] +compile_error!("Need cargo feature \"bundle_files\" or \"serve_files\""); + fn main() -> Result<(), Box<(dyn std::error::Error)>> { build_util::git_hash(env!("CARGO_PKG_VERSION"))?; - let frontend_dir = std::path::PathBuf::from("yew_frontend"); - let frontend_pkg_dir = frontend_dir.join("pkg"); - #[cfg(feature = "bundle_files")] - if !frontend_pkg_dir.join("strand_cam_frontend_yew.js").exists() { - return Err(format!( - "The frontend is required but not built. Hint: go to {} and \ - run `build.sh` (or on Windows, `build.bat`).", - frontend_dir.display() - ) - .into()); + { + let frontend_dir = std::path::PathBuf::from("yew_frontend"); + let frontend_pkg_dir = frontend_dir.join("pkg"); + + if !frontend_pkg_dir.join("strand_cam_frontend_yew.js").exists() { + return Err(format!( + "The frontend is required but not built. Hint: go to {} and \ + run `build.sh` (or on Windows, `build.bat`).", + frontend_dir.display() + ) + .into()); + } } - build_util::bui_backend_generate_code(&frontend_pkg_dir, "frontend.rs")?; Ok(()) } diff --git a/strand-cam/src/cli_app.rs b/strand-cam/src/cli_app.rs index dab9ae97c..96172fed1 100644 --- a/strand-cam/src/cli_app.rs +++ b/strand-cam/src/cli_app.rs @@ -6,7 +6,7 @@ use std::path::PathBuf; use clap::{Arg, ArgAction}; -use crate::{run_app, StrandCamArgs}; +use crate::{run_app, BraidArgs, StandaloneArgs, StandaloneOrBraid, StrandCamArgs}; use crate::APP_INFO; @@ -25,7 +25,7 @@ pub fn cli_main( app_name: &'static str, ) -> Result<()> where - M: ci2::CameraModule, + M: ci2::CameraModule + 'static, C: 'static + ci2::Camera + Send, { dotenv::dotenv().ok(); @@ -39,33 +39,8 @@ where env_tracing_logger::init(); - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .worker_threads(4) - .thread_name("strand-cam-runtime") - .thread_stack_size(3 * 1024 * 1024) - .build()?; - - let handle = runtime.handle(); - - let args = parse_args(handle, app_name)?; - - // run_app(mymod, args, app_name).map_err(|e| { - // #[cfg(feature = "backtrace")] - // match std::error::Error::backtrace(&e) { - // None => log::error!("no backtrace in upcoming error {}", e), - // Some(bt) => log::error!("backtrace in upcoming error {}: {}", e, bt), - // } - // #[cfg(not(feature = "backtrace"))] - // { - // log::error!( - // "compiled without backtrace support. No backtrace in upcoming error {}", - // e - // ); - // } - // anyhow::Error::new(e) - // }) - run_app(mymod, args, app_name).map_err(anyhow::Error::new) + let args = parse_args(app_name)?; + run_app(mymod, args, app_name) } fn get_cli_args() -> Vec { @@ -110,10 +85,7 @@ fn get_tracker_cfg(_matches: &clap::ArgMatches) -> Result std::result::Result { +fn parse_args(app_name: &str) -> anyhow::Result { let cli_args = get_cli_args(); let arg_default_box: Box = Default::default(); @@ -123,7 +95,6 @@ fn parse_args( let app_name: &'static clap::builder::Str = Box::leak(app_name_box); let matches = { - #[allow(unused_mut)] let mut parser = clap::Command::new(app_name) .version(env!("CARGO_PKG_VERSION")) .arg( @@ -181,9 +152,7 @@ fn parse_args( .default_value("~/DATA"), ); - // #[cfg(not(feature = "braid-config"))] - { - parser = parser + parser = parser .arg( Arg::new("pixel_format") .long("pixel-format") @@ -206,16 +175,12 @@ fn parse_args( .action(clap::ArgAction::Count) .help("Force the camera to synchronize to external trigger. (incompatible with braid)."), ); - } - // #[cfg(feature = "braid-config")] - { - parser = parser.arg( - Arg::new("braid_addr") - .long("braid_addr") - .help("Braid HTTP API address (e.g. 'http://host:port/')"), - ); - } + parser = parser.arg( + Arg::new("braid_url") + .long("braid-url") + .help("Braid HTTP URL address (e.g. 'http://host:port/')"), + ); #[cfg(feature = "posix_sched_fifo")] { @@ -227,13 +192,11 @@ fn parse_args( .help("The scheduler priority (integer, e.g. 99). Requires also sched-policy.")) } - { - parser = parser.arg( - Arg::new("led_box_device") - .long("led-box") - .help("The filename of the LED box device"), - ) - } + parser = parser.arg( + Arg::new("led_box_device") + .long("led-box") + .help("The filename of the LED box device"), + ); #[cfg(feature = "flydratrax")] { @@ -348,27 +311,16 @@ fn parse_args( let led_box_device_path = parse_led_box_device(&matches); - let braid_addr: Option = matches.get_one::("braid_addr").map(Into::into); + let braid_url: Option = matches.get_one::("braid_url").map(Into::into); - let ( - mainbrain_internal_addr, - camdata_addr, - pixel_format, - force_camera_sync_mode, - software_limit_framerate, - tracker_cfg_src, - acquisition_duration_allowed_imprecision_msec, - http_server_addr, - no_browser, - show_url, - ) = if let Some(braid_addr) = braid_addr { + let standalone_or_braid = if let Some(braid_url) = braid_url { for argname in &[ "pixel_format", "JWT_SECRET", "camera_settings_filename", "http_server_addr", ] { - // Typically these values are not relevant or are set via + // These values are not relevant or are set via // [flydra_types::RemoteCameraInfoResponse]. if matches.contains_id(argname) { anyhow::bail!( @@ -385,90 +337,40 @@ fn parse_args( ); } - let (mainbrain_internal_addr, camdata_addr, tracker_cfg_src, config_from_braid) = { - log::info!("Will connect to braid at \"{}\"", braid_addr); - let mainbrain_internal_addr = flydra_types::MainbrainBuiLocation( - flydra_types::StrandCamBuiServerInfo::parse_url_with_token(&braid_addr)?, - ); - - let mut mainbrain_session = handle.block_on( - braid_http_session::mainbrain_future_session(mainbrain_internal_addr.clone()), - )?; - - let camera_name = camera_name - .as_ref() - .ok_or(crate::StrandCamError::CameraNameRequired)?; - - let camera_name = flydra_types::RawCamName::new(camera_name.to_string()); - - let config_from_braid: flydra_types::RemoteCameraInfoResponse = - handle.block_on(mainbrain_session.get_remote_info(&camera_name))?; - - let camdata_addr = { - let camdata_addr = config_from_braid - .camdata_addr - .parse::()?; - let addr_info_ip = flydra_types::AddrInfoIP::from_socket_addr(&camdata_addr); - - Some(flydra_types::RealtimePointsDestAddr::IpAddr(addr_info_ip)) - }; - - let tracker_cfg_src = crate::ImPtDetectCfgSource::ChangesNotSavedToDisk( - config_from_braid.config.point_detection_config.clone(), - ); - - ( - Some(mainbrain_internal_addr), - camdata_addr, - tracker_cfg_src, - config_from_braid, + let camera_name = camera_name.ok_or_else(|| { + anyhow::anyhow!( + "camera name must be set using command-line argument when running with braid" ) - }; - - let pixel_format = config_from_braid.config.pixel_format; - let force_camera_sync_mode = config_from_braid.force_camera_sync_mode; - let software_limit_framerate = config_from_braid.software_limit_framerate; - let acquisition_duration_allowed_imprecision_msec = config_from_braid - .config - .acquisition_duration_allowed_imprecision_msec; - - ( - mainbrain_internal_addr, - camdata_addr, - pixel_format, - force_camera_sync_mode, - software_limit_framerate, - tracker_cfg_src, - acquisition_duration_allowed_imprecision_msec, - Some("127.0.0.1:0".to_string()), - true, - false, - ) + })?; + + StandaloneOrBraid::Braid(BraidArgs { + braid_url: braid_url, + camera_name, + }) } else { // not braid - - let mainbrain_internal_addr = None; - let camdata_addr = None; let pixel_format = matches.get_one::("pixel_format").map(Into::into); let force_camera_sync_mode = !matches!(matches.get_count("force_camera_sync_mode"), 0); let software_limit_framerate = flydra_types::StartSoftwareFrameRateLimit::NoChange; - let tracker_cfg_src = get_tracker_cfg(&matches)?; - let acquisition_duration_allowed_imprecision_msec = flydra_types::DEFAULT_ACQUISITION_DURATION_ALLOWED_IMPRECISION_MSEC; - ( - mainbrain_internal_addr, - camdata_addr, + + let tracker_cfg_src = get_tracker_cfg(&matches)?; + + #[cfg(not(feature = "flydra_feat_detect"))] + let _ = tracker_cfg_src; // This is unused without `flydra_feat_detect` feature. + + StandaloneOrBraid::Standalone(StandaloneArgs { + camera_name, pixel_format, force_camera_sync_mode, software_limit_framerate, - tracker_cfg_src, acquisition_duration_allowed_imprecision_msec, - http_server_addr, - no_browser, - true, - ) + camera_settings_filename, + #[cfg(feature = "flydra_feat_detect")] + tracker_cfg_src, + }) }; let raise_grab_thread_priority = process_frame_priority.is_some(); @@ -477,31 +379,20 @@ fn parse_args( let apriltag_csv_filename_template = strand_cam_storetype::APRILTAG_CSV_TEMPLATE_DEFAULT.to_string(); - #[cfg(not(feature = "flydra_feat_detect"))] - std::mem::drop(tracker_cfg_src); // prevent compiler warning of unused variable - - let defaults = StrandCamArgs::default(); - Ok(StrandCamArgs { - handle: Some(handle.clone()), + standalone_or_braid, secret, - camera_name, - pixel_format, http_server_addr, no_browser, mp4_filename_template: mkv_filename_template, fmf_filename_template, ufmf_filename_template, - #[cfg(feature = "flydra_feat_detect")] - tracker_cfg_src, + csv_save_dir, raise_grab_thread_priority, led_box_device_path, #[cfg(feature = "posix_sched_fifo")] process_frame_priority, - mainbrain_internal_addr, - camdata_addr, - show_url, #[cfg(feature = "flydratrax")] flydratrax_calibration_source, #[cfg(feature = "flydratrax")] @@ -510,10 +401,6 @@ fn parse_args( model_server_addr, #[cfg(feature = "fiducial")] apriltag_csv_filename_template, - force_camera_sync_mode, - software_limit_framerate, - camera_settings_filename, - acquisition_duration_allowed_imprecision_msec, - ..defaults + ..Default::default() }) } diff --git a/strand-cam/src/flydratrax_handle_msg.rs b/strand-cam/src/flydratrax_handle_msg.rs index f773f85a8..be76fd5e2 100644 --- a/strand-cam/src/flydratrax_handle_msg.rs +++ b/strand-cam/src/flydratrax_handle_msg.rs @@ -1,4 +1,5 @@ use crate::*; +use parking_lot::RwLock; use flydra2::{SendKalmanEstimatesRow, SendType}; diff --git a/strand-cam/src/strand-cam.rs b/strand-cam/src/strand-cam.rs index 07602d398..6ec238c42 100644 --- a/strand-cam/src/strand-cam.rs +++ b/strand-cam/src/strand-cam.rs @@ -18,9 +18,16 @@ use anyhow::Context; #[cfg(feature = "fiducial")] use ads_apriltag as apriltag; +use event_stream_types::{ + AcceptsEventStream, ConnectionEvent, ConnectionEventType, ConnectionSessionKey, + EventBroadcaster, TolerantJson, +}; +use http::StatusCode; use http_video_streaming as video_streaming; use machine_vision_formats as formats; +use bui_backend_session_types::AccessToken; + #[cfg(feature = "flydratrax")] use nalgebra as na; @@ -29,7 +36,7 @@ use libflate::finish::AutoFinishUnchecked; #[cfg(feature = "fiducial")] use libflate::gzip::Encoder; -use futures::{channel::mpsc, sink::SinkExt, stream::StreamExt}; +use futures::{sink::SinkExt, stream::StreamExt}; use serde::{Deserialize, Serialize}; use hyper_tls::HttpsConnector; @@ -47,25 +54,26 @@ use basic_frame::{match_all_dynamic_fmts, DynamicFrame}; use formats::PixFmt; use timestamped_frame::ExtraTimeData; -use bui_backend::highlevel::{create_bui_app_inner, BuiAppInner}; -use bui_backend::{AccessControl, CallbackHandler}; -use bui_backend_types::CallbackDataAndSession; +// use bui_backend::highlevel::{create_bui_app_inner, BuiAppInner}; +// use bui_backend::{AccessControl, CallbackHandler}; +use bui_backend_session_types::{ConnectionKey, SessionKey}; #[cfg(feature = "flydratrax")] use http_video_streaming_types::{DrawableShape, StrokeStyle}; use video_streaming::{AnnotatedFrame, FirehoseCallback}; -use std::{error::Error as StdError, future::Future, path::Path, pin::Pin}; +use std::{path::Path, result::Result as StdResult}; #[cfg(feature = "flydra_feat_detect")] use ci2_remote_control::CsvSaveConfig; use ci2_remote_control::{ CamArg, CodecSelection, Mp4Codec, Mp4RecordingConfig, NvidiaH264Options, RecordingFrameRate, }; +#[cfg(feature = "flydratrax")] +use flydra_types::BuiServerAddrInfo; use flydra_types::{ - MainbrainBuiLocation, RawCamName, RealtimePointsDestAddr, RosCamName, - StartSoftwareFrameRateLimit, StrandCamBuiServerInfo, StrandCamHttpServerInfo, + BuiServerInfo, RawCamName, RealtimePointsDestAddr, StartSoftwareFrameRateLimit, }; use flydra_feature_detector_types::ImPtDetectCfg; @@ -80,8 +88,9 @@ use strand_cam_csv_config_types::{FullCfgFview2_0_26, SaveCfgFview2_0_25}; #[cfg(feature = "fiducial")] use strand_cam_storetype::ApriltagState; -use strand_cam_storetype::ToLedBoxDevice; -use strand_cam_storetype::{CallbackType, ImOpsState, RangedValue, StoreType}; +use strand_cam_storetype::{ + CallbackType, ImOpsState, RangedValue, StoreType, ToLedBoxDevice, STRAND_CAM_EVENT_NAME, +}; use strand_cam_storetype::{KalmanTrackingConfig, LedProgramConfig}; @@ -93,7 +102,6 @@ use strand_cam_pseudo_cal::PseudoCameraCalibrationData; use rust_cam_bui_types::RecordingPath; -use parking_lot::RwLock; use std::fs::File; use std::io::Write; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, ToSocketAddrs, UdpSocket}; @@ -121,7 +129,9 @@ pub use flydra_pt_detect_cfg::default_absdiff as default_im_pt_detect; #[cfg(feature = "imtrack-dark-circle")] pub use flydra_pt_detect_cfg::default_dark_circle as default_im_pt_detect; -include!(concat!(env!("OUT_DIR"), "/frontend.rs")); // Despite slash, this does work on Windows. +#[cfg(feature = "bundle_files")] +static ASSETS_DIR: include_dir::Dir<'static> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/yew_frontend/pkg"); #[cfg(feature = "flydratrax")] const KALMAN_TRACKING_PREFS_KEY: &'static str = "kalman-tracking"; @@ -185,8 +195,8 @@ pub enum StrandCamError { }, #[error("try send error")] TrySendError, - #[error("BUI backend error: {0}")] - BuiBackendError(#[from] bui_backend::Error), + // #[error("BUI backend error: {0}")] + // BuiBackendError(#[from] bui_backend::Error), #[error("BUI backend session error: {0}")] BuiBackendSessionError(#[from] bui_backend_session::Error), #[error("Braid HTTP session error: {0}")] @@ -203,11 +213,11 @@ pub enum StrandCamError { PluginDisconnected, #[error("video streaming error")] VideoStreamingError(#[from] video_streaming::Error), - #[error( - "The --jwt-secret argument must be passed or the JWT_SECRET environment \ - variable must be set." - )] - JwtError, + // #[error( + // "The --jwt-secret argument must be passed or the JWT_SECRET environment \ + // variable must be set." + // )] + // JwtError, #[cfg(feature = "flydratrax")] #[error("MVG error: {0}")] MvgError( @@ -313,7 +323,7 @@ impl CloseAppOnThreadExit { } } - fn check(&self, result: std::result::Result) -> T + fn check(&self, result: StdResult) -> T where E: std::convert::Into, { @@ -399,7 +409,7 @@ pub(crate) enum Msg { SetIsSavingObjDetectionCsv(CsvSaveConfig), #[cfg(feature = "flydra_feat_detect")] SetExpConfig(ImPtDetectCfg), - Store(Arc>>), + Store(Arc>>), #[cfg(feature = "flydra_feat_detect")] TakeCurrentImageAsBackground, #[cfg(feature = "flydra_feat_detect")] @@ -412,7 +422,7 @@ pub(crate) enum Msg { } impl std::fmt::Debug for Msg { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> StdResult<(), std::fmt::Error> { write!(f, "strand_cam::Msg{{..}}") } } @@ -636,33 +646,19 @@ struct FlydraConfigState { } #[cfg(feature = "checkercal")] -type CollectedCornersArc = Arc>>>; - -async fn register_node_and_update_image( - api_http_address: flydra_types::MainbrainBuiLocation, - msg: flydra_types::RegisterNewCamera, - mut transmit_msg_rx: mpsc::Receiver, -) -> Result<()> { - let mut mainbrain_session = - braid_http_session::mainbrain_future_session(api_http_address).await?; - mainbrain_session.register_flydra_camnode(&msg).await?; - while let Some(msg) = transmit_msg_rx.next().await { - mainbrain_session.send_message(msg).await?; - } - Ok(()) -} +type CollectedCornersArc = Arc>>>; async fn convert_stream( - ros_cam_name: RosCamName, + raw_cam_name: RawCamName, mut transmit_feature_detect_settings_rx: tokio::sync::mpsc::Receiver< flydra_feature_detector_types::ImPtDetectCfg, >, - mut transmit_msg_tx: mpsc::Sender, + transmit_msg_tx: tokio::sync::mpsc::Sender, ) -> Result<()> { while let Some(val) = transmit_feature_detect_settings_rx.recv().await { let msg = - flydra_types::HttpApiCallback::UpdateFeatureDetectSettings(flydra_types::PerCam { - ros_cam_name: ros_cam_name.clone(), + flydra_types::BraidHttpApiCallback::UpdateFeatureDetectSettings(flydra_types::PerCam { + raw_cam_name: raw_cam_name.clone(), inner: flydra_types::UpdateFeatureDetectSettings { current_feature_detect_settings: val, }, @@ -672,19 +668,18 @@ async fn convert_stream( Ok(()) } -struct MainbrainInfo { - mainbrain_internal_addr: MainbrainBuiLocation, - transmit_msg_rx: mpsc::Receiver, - transmit_msg_tx: mpsc::Sender, -} +// struct MainbrainInfo { +// mainbrain_internal_addr: MainbrainBuiLocation, +// transmit_msg_rx: mpsc::Receiver, +// transmit_msg_tx: mpsc::Sender, +// } // We perform image analysis in its own task. async fn frame_process_task( - my_runtime: tokio::runtime::Handle, - #[cfg(feature = "flydratrax")] flydratrax_model_server: ( - tokio::sync::mpsc::Sender<(flydra2::SendType, flydra2::TimeDataPassthrough)>, - flydra2::ModelServer, - ), + #[cfg(feature = "flydratrax")] model_server_data_tx: tokio::sync::mpsc::Sender<( + flydra2::SendType, + flydra2::TimeDataPassthrough, + )>, #[cfg(feature = "flydratrax")] flydratrax_calibration_source: CalSource, cam_name: RawCamName, #[cfg(feature = "flydra_feat_detect")] camera_cfg: CameraCfgFview2_0_26, @@ -704,11 +699,11 @@ async fn frame_process_task( #[cfg(feature = "flydratrax")] led_box_tx_std: tokio::sync::mpsc::Sender, mut quit_rx: tokio::sync::oneshot::Receiver<()>, is_starting_tx: tokio::sync::oneshot::Sender<()>, - #[cfg(feature = "flydratrax")] http_camserver_info: StrandCamBuiServerInfo, + #[cfg(feature = "flydratrax")] http_camserver_info: BuiServerAddrInfo, process_frame_priority: Option<(i32, i32)>, - mainbrain_info: Option, + transmit_msg_tx: Option>, camdata_addr: Option, - led_box_heartbeat_update_arc: Arc>>, + led_box_heartbeat_update_arc: Arc>>, #[cfg(feature = "plugin-process-frame")] do_process_frame_callback: bool, #[cfg(feature = "checkercal")] collected_corners_arc: CollectedCornersArc, #[cfg(feature = "flydratrax")] save_empty_data2d: SaveEmptyData2dType, @@ -720,9 +715,11 @@ async fn frame_process_task( frame_info_extractor: &dyn ci2::ExtractFrameInfo, #[cfg(feature = "flydra_feat_detect")] app_name: &'static str, ) -> anyhow::Result<()> { + let my_runtime: tokio::runtime::Handle = tokio::runtime::Handle::current(); + let is_braid = camdata_addr.is_some(); - let ros_cam_name: RosCamName = new_cam_data.ros_cam_name.clone(); + let raw_cam_name: RawCamName = new_cam_data.raw_cam_name.clone(); #[cfg(feature = "posix_sched_fifo")] { @@ -792,35 +789,28 @@ async fn frame_process_task( Some(0) }; - let (transmit_feature_detect_settings_tx, transmit_msg_tx) = if let Some(info) = mainbrain_info - { - let addr = info.mainbrain_internal_addr; - let transmit_msg_tx = info.transmit_msg_tx.clone(); - - let (transmit_feature_detect_settings_tx, transmit_feature_detect_settings_rx) = - tokio::sync::mpsc::channel::(10); - - my_runtime.spawn(convert_stream( - ros_cam_name.clone(), - transmit_feature_detect_settings_rx, - transmit_msg_tx, - )); - - let transmit_msg_rx = info.transmit_msg_rx; - my_runtime.spawn(register_node_and_update_image( - addr, - new_cam_data, - // current_image_png, - transmit_msg_rx, - )); - - ( - Some(transmit_feature_detect_settings_tx), - Some(info.transmit_msg_tx), - ) - } else { - (None, None) - }; + let (transmit_feature_detect_settings_tx, mut transmit_msg_tx) = + if let Some(transmit_msg_tx) = transmit_msg_tx.as_ref() { + let (transmit_feature_detect_settings_tx, transmit_feature_detect_settings_rx) = + tokio::sync::mpsc::channel::(10); + + my_runtime.spawn(convert_stream( + raw_cam_name.clone(), + transmit_feature_detect_settings_rx, + transmit_msg_tx.clone(), + )); + + transmit_msg_tx + .send(flydra_types::BraidHttpApiCallback::NewCamera(new_cam_data)) + .await?; + + ( + Some(transmit_feature_detect_settings_tx), + Some(transmit_msg_tx), + ) + } else { + (None, None) + }; #[cfg(not(feature = "flydra_feat_detect"))] std::mem::drop(transmit_feature_detect_settings_tx); @@ -850,7 +840,7 @@ async fn frame_process_task( )?; #[cfg(feature = "flydra_feat_detect")] let mut csv_save_state = SavingState::NotSaving; - let mut shared_store_arc: Option>>> = None; + let mut shared_store_arc: Option>>> = None; let mut fps_calc = FpsCalc::new(100); // average 100 frames to get mean fps #[cfg(feature = "flydratrax")] let mut kalman_tracking_config = KalmanTrackingConfig::default(); // this is replaced below @@ -870,7 +860,7 @@ async fn frame_process_task( #[cfg(feature = "flydratrax")] let red_style = StrokeStyle::from_rgb(255, 100, 100); - let expected_framerate_arc = Arc::new(RwLock::new(None)); + let expected_framerate_arc = Arc::new(parking_lot::RwLock::new(None)); is_starting_tx.send(()).ok(); // signal that we are we are no longer starting @@ -902,7 +892,7 @@ async fn frame_process_task( #[cfg(feature = "checkercal")] let mut checkerboard_loop_dur = std::time::Duration::from_millis(500); - let current_image_timer_arc = Arc::new(RwLock::new(std::time::Instant::now())); + let current_image_timer_arc = Arc::new(parking_lot::RwLock::new(std::time::Instant::now())); let mut im_ops_socket: Option = None; @@ -1020,9 +1010,9 @@ async fn frame_process_task( let expected_framerate_arc2 = expected_framerate_arc.clone(); let cam_name2 = cam_name.clone(); let http_camserver = - StrandCamHttpServerInfo::Server(http_camserver_info.clone()); + BuiServerInfo::Server(http_camserver_info.clone()); let recon2 = recon.clone(); - let flydratrax_model_server2 = flydratrax_model_server.clone(); + let model_server_data_tx2 = model_server_data_tx.clone(); let valve2 = valve.clone(); let cam_manager = flydra2::ConnectedCamerasManager::new_single_cam( @@ -1046,7 +1036,6 @@ async fn frame_process_task( flydra2::BraidMetadataBuilder::saving_program_name( "strand-cam", ), - valve.clone(), ) .expect("create CoordProcessor"); @@ -1055,8 +1044,7 @@ async fn frame_process_task( opt_braidz_write_tx_weak = Some(braidz_write_tx_weak); - let (model_server_data_tx, _model_server) = - flydratrax_model_server2; + let model_server_data_tx = model_server_data_tx2; coord_processor.add_listener(model_sender); // the local LED control thing coord_processor.add_listener(model_server_data_tx); // the HTTP thing @@ -1583,7 +1571,7 @@ async fn frame_process_task( datetime_conversion::datetime_to_f64(&process_new_frame_start); let tracker_annotation = flydra_types::FlydraRawUdpPacket { - cam_name: ros_cam_name.as_str().to_string(), + cam_name: raw_cam_name.as_str().to_string(), timestamp: opt_trigger_stamp, cam_received_time: acquire_stamp, device_timestamp, @@ -1675,7 +1663,7 @@ async fn frame_process_task( let cam_num = 0.into(); // Only one camera, so this must be correct. let frame_data = flydra2::FrameData::new( - ros_cam_name.clone(), + raw_cam_name.clone(), cam_num, SyncFno( frame.extra().host_framenumber().try_into().unwrap(), @@ -1945,30 +1933,43 @@ async fn frame_process_task( { // send current image every 2 seconds - let mut timer = current_image_timer_arc.write(); - let elapsed = timer.elapsed(); - if elapsed > std::time::Duration::from_millis(2000) { - *timer = std::time::Instant::now(); + let send_msg = { + let mut timer = current_image_timer_arc.write(); + let elapsed = timer.elapsed(); + let mut send_msg = false; + if elapsed > std::time::Duration::from_millis(2000) { + *timer = std::time::Instant::now(); + send_msg = true; + } + send_msg + }; + + if send_msg { // encode frame to png buf - if let Some(mut transmit_msg_tx) = transmit_msg_tx.clone() { - let ros_cam_name = ros_cam_name.clone(); + if let Some(cb_sender) = transmit_msg_tx.as_ref() { let current_image_png = match_all_dynamic_fmts!(&frame, x, { convert_image::frame_to_image(x, convert_image::ImageOptions::Png) .unwrap() }); - my_runtime.spawn(async move { - let msg = flydra_types::HttpApiCallback::UpdateCurrentImage( - flydra_types::PerCam { - ros_cam_name, - inner: flydra_types::UpdateImage { - current_image_png: current_image_png.into(), - }, + let raw_cam_name = raw_cam_name.clone(); + + let msg = flydra_types::BraidHttpApiCallback::UpdateCurrentImage( + flydra_types::PerCam { + raw_cam_name, + inner: flydra_types::UpdateImage { + current_image_png: current_image_png.into(), }, - ); - transmit_msg_tx.send(msg).await.unwrap(); - }); + }, + ); + match cb_sender.send(msg).await { + Ok(()) => {} + Err(e) => { + tracing::error!("While sending current image: {e}"); + transmit_msg_tx = None; + } + }; } } } @@ -2242,9 +2243,7 @@ trait IgnoreSendError { fn ignore_send_error(self); } -impl IgnoreSendError - for std::result::Result<(), tokio::sync::mpsc::error::SendError> -{ +impl IgnoreSendError for StdResult<(), tokio::sync::mpsc::error::SendError> { fn ignore_send_error(self) { match self { Ok(()) => {} @@ -2256,7 +2255,7 @@ impl IgnoreSendError } #[derive(Clone)] -struct MyCallbackHandler { +struct StrandCamCallbackSenders { firehose_callback_tx: tokio::sync::mpsc::Sender, cam_args_tx: tokio::sync::mpsc::Sender, led_box_tx_std: tokio::sync::mpsc::Sender, @@ -2264,169 +2263,12 @@ struct MyCallbackHandler { tx_frame: tokio::sync::mpsc::Sender, } -impl CallbackHandler for MyCallbackHandler { - type Data = CallbackType; - - /// HTTP request to "/callback" has been made with payload which as been - /// deserialized into `Self::Data` and session data stored in - /// [CallbackDataAndSession]. - fn call<'a>( - &'a self, - data_sess: CallbackDataAndSession, - ) -> Pin>> + Send + 'a>> - { - let payload = data_sess.payload; - let fut = async { - match payload { - CallbackType::ToCamera(cam_arg) => { - debug!("in cb: {:?}", cam_arg); - self.cam_args_tx.send(cam_arg).await.ignore_send_error(); - } - CallbackType::FirehoseNotify(inner) => { - let arrival_time = chrono::Utc::now(); - let fc = FirehoseCallback { - arrival_time, - inner, - }; - self.firehose_callback_tx.send(fc).await.ignore_send_error(); - } - CallbackType::TakeCurrentImageAsBackground => { - #[cfg(feature = "flydra_feat_detect")] - self.tx_frame - .send(Msg::TakeCurrentImageAsBackground) - .await - .ignore_send_error(); - } - CallbackType::ClearBackground(value) => { - #[cfg(feature = "flydra_feat_detect")] - self.tx_frame - .send(Msg::ClearBackground(value)) - .await - .ignore_send_error(); - #[cfg(not(feature = "flydra_feat_detect"))] - let _ = value; - } - CallbackType::ToLedBox(led_box_arg) => futures::executor::block_on(async { - // todo: make this whole block async and remove the `futures::executor::block_on` aspect here. - info!("in led_box callback: {:?}", led_box_arg); - self.led_box_tx_std - .send(led_box_arg) - .await - .ignore_send_error(); - }), - } - }; - Box::pin(async { - fut.await; - Ok(()) - }) - } -} - -pub struct StrandCamApp { - inner: BuiAppInner, -} - -impl StrandCamApp { - async fn new( - rt_handle: tokio::runtime::Handle, - shared_store_arc: Arc>>, - secret: Option>, - http_server_addr: &str, - config: Config, - cam_args_tx: tokio::sync::mpsc::Sender, - led_box_tx_std: tokio::sync::mpsc::Sender, - tx_frame: tokio::sync::mpsc::Sender, - shutdown_rx: tokio::sync::oneshot::Receiver<()>, - ) -> std::result::Result< - ( - tokio::sync::mpsc::Receiver, - Self, - tokio::sync::mpsc::Receiver, - ), - StrandCamError, - > { - let chan_size = 10; - - let addr: std::net::SocketAddr = http_server_addr.parse().unwrap(); - let auth = if let Some(ref secret) = secret { - bui_backend::highlevel::generate_random_auth(addr, secret.clone())? - } else if addr.ip().is_loopback() { - AccessControl::Insecure(addr) - } else { - return Err(StrandCamError::JwtError); - }; - - // A channel for the data sent from the client browser. - let (firehose_callback_tx, firehose_callback_rx) = tokio::sync::mpsc::channel(10); - - let callback_handler = Box::new(MyCallbackHandler { - cam_args_tx, - firehose_callback_tx, - led_box_tx_std, - tx_frame, - }); - - let (rx_conn, bui_server) = bui_backend::lowlevel::launcher( - config.clone(), - &auth, - chan_size, - strand_cam_storetype::STRAND_CAM_EVENTS_URL_PATH, - None, - callback_handler, - ); - - let (new_conn_rx, inner) = create_bui_app_inner( - rt_handle.clone(), - Some(shutdown_rx), - &auth, - shared_store_arc, - Some(strand_cam_storetype::STRAND_CAM_EVENT_NAME.to_string()), - rx_conn, - bui_server, - ) - .await?; - - // let mut new_conn_rx_valved = valve.wrap(new_conn_rx); - // let new_conn_future = async move { - // while let Some(msg) = new_conn_rx_valved.next().await { - // connection_callback_tx.send(msg).await.unwrap(); - // } - // debug!("new_conn_future closing {}:{}", file!(), line!()); - // }; - // let txers = Arc::new(RwLock::new(HashMap::new())); - // let txers2 = txers.clone(); - // let mut new_conn_rx_valved = valve.wrap(new_conn_rx); - // let new_conn_future = async move { - // while let Some(msg) = new_conn_rx_valved.next().await { - // let mut txers = txers2.write(); - // match msg.typ { - // ConnectionEventType::Connect(chunk_sender) => { - // txers.insert( - // msg.connection_key, - // (msg.session_key, chunk_sender, msg.path), - // ); - // } - // ConnectionEventType::Disconnect => { - // txers.remove(&msg.connection_key); - // } - // } - // } - // debug!("new_conn_future closing {}:{}", file!(), line!()); - // }; - // let _task_join_handle = rt_handle.spawn(new_conn_future); - - let my_app = StrandCamApp { inner }; - - Ok((firehose_callback_rx, my_app, new_conn_rx)) - } - - fn inner(&self) -> &BuiAppInner { - &self.inner - } - // fn inner_mut(&mut self) -> &mut BuiAppInner { - // &mut self.inner - // } +#[derive(Clone)] +struct StrandCamAppState { + event_broadcaster: EventBroadcaster, + callback_senders: StrandCamCallbackSenders, + tx_new_connection: tokio::sync::mpsc::Sender, + shared_store_arc: Arc>>, } #[cfg(feature = "fiducial")] @@ -2462,7 +2304,7 @@ async fn check_version( MyBody, // http_body_util::Empty, >, - known_version: Arc>, + known_version: Arc>, app_name: &'static str, ) -> Result<()> { let url = format!("https://version-check.strawlab.org/{app_name}"); @@ -2493,7 +2335,7 @@ async fn check_version( let known_version3 = known_version2.clone(); let body = res.into_body(); - let chunks: std::result::Result, _> = { + let chunks: StdResult, _> = { use http_body_util::BodyExt; body.collect().await }; @@ -2542,7 +2384,7 @@ fn display_qr_url(url: &str) { writeln!(stdout_handle).expect("write failed"); } -#[derive(Debug)] +#[derive(Debug, Clone)] /// Defines whether runtime changes from the user are persisted to disk. /// /// If they are persisted to disk, upon program re-start, the disk @@ -2553,6 +2395,13 @@ pub enum ImPtDetectCfgSource { ChangedSavedToDisk((&'static AppInfo, String)), } +#[cfg(feature = "flydra_feat_detect")] +impl Default for ImPtDetectCfgSource { + fn default() -> Self { + ImPtDetectCfgSource::ChangesNotSavedToDisk(default_im_pt_detect()) + } +} + #[cfg(feature = "plugin-process-frame")] pub struct ProcessFrameCbData { pub func_ptr: plugin_defs::ProcessFrameFunc, @@ -2603,31 +2452,71 @@ enum ToDevice { Centroid(MomentCentroid), } -// #[derive(Debug, Serialize, Deserialize)] +/// CLI args for the case when we will connect to Braid. +/// +/// Prior to the connection, we don't know much about what our configuration +/// should be. +#[derive(Debug, Default, Clone)] +pub struct BraidArgs { + pub braid_url: String, + pub camera_name: String, +} + +/// CLI args for the case when we run standalone. +#[derive(Debug, Clone, Default)] +pub struct StandaloneArgs { + pub camera_name: Option, + pub pixel_format: Option, + /// If set, camera acquisition will external trigger. + pub force_camera_sync_mode: bool, + /// If enabled, limit framerate (FPS) at startup. + /// + /// Despite the name ("software"), this actually sets the hardware + /// acquisition rate via the `AcquisitionFrameRate` camera parameter. + pub software_limit_framerate: StartSoftwareFrameRateLimit, + /// Threshold duration before logging error (msec). + /// + /// If the image acquisition timestamp precedes the computed trigger + /// timestamp, clearly an error has happened. This error must lie in the + /// computation of the trigger timestamp. This specifies the threshold error + /// at which an error is logged. (The underlying source of such errors + /// remains unknown.) + pub acquisition_duration_allowed_imprecision_msec: Option, + /// Filename of vendor-specific camera settings file. + pub camera_settings_filename: Option, + #[cfg(feature = "flydra_feat_detect")] + pub tracker_cfg_src: ImPtDetectCfgSource, +} + +#[derive(Debug)] +pub enum StandaloneOrBraid { + Standalone(StandaloneArgs), + Braid(BraidArgs), +} + +impl Default for StandaloneOrBraid { + fn default() -> Self { + Self::Standalone(Default::default()) + } +} + #[derive(Debug)] pub struct StrandCamArgs { - /// A handle to the tokio runtime. - pub handle: Option, /// Is Strand Cam running inside Braid context? - pub is_braid: bool, + pub standalone_or_braid: StandaloneOrBraid, pub secret: Option>, - pub camera_name: Option, - pub pixel_format: Option, pub http_server_addr: Option, pub no_browser: bool, pub mp4_filename_template: String, pub fmf_filename_template: String, pub ufmf_filename_template: String, - #[cfg(feature = "flydra_feat_detect")] - pub tracker_cfg_src: ImPtDetectCfgSource, pub csv_save_dir: String, pub raise_grab_thread_priority: bool, #[cfg(feature = "posix_sched_fifo")] pub process_frame_priority: Option<(i32, i32)>, pub led_box_device_path: Option, - pub mainbrain_internal_addr: Option, - pub camdata_addr: Option, - pub show_url: bool, + // pub mainbrain_internal_addr: Option, + // pub camdata_addr: Option, #[cfg(feature = "plugin-process-frame")] pub process_frame_callback: Option, #[cfg(feature = "plugin-process-frame")] @@ -2640,27 +2529,6 @@ pub struct StrandCamArgs { pub flydratrax_calibration_source: CalSource, #[cfg(feature = "fiducial")] pub apriltag_csv_filename_template: String, - - /// If set, camera acquisition will external trigger. - pub force_camera_sync_mode: bool, - - /// If enabled, limit framerate (FPS) at startup. - /// - /// Despite the name ("software"), this actually sets the hardware - /// acquisition rate via the `AcquisitionFrameRate` camera parameter. - pub software_limit_framerate: StartSoftwareFrameRateLimit, - - /// Filename of vendor-specific camera settings file. - pub camera_settings_filename: Option, - - /// Threshold duration before logging error (msec). - /// - /// If the image acquisition timestamp precedes the computed trigger - /// timestamp, clearly an error has happened. This error must lie in the - /// computation of the trigger timestamp. This specifies the threshold error - /// at which an error is logged. (The underlying source of such errors - /// remains unknown.) - pub acquisition_duration_allowed_imprecision_msec: Option, } pub type SaveEmptyData2dType = bool; @@ -2678,11 +2546,8 @@ pub enum CalSource { impl Default for StrandCamArgs { fn default() -> Self { Self { - handle: None, - is_braid: false, + standalone_or_braid: Default::default(), secret: None, - camera_name: None, - pixel_format: None, http_server_addr: None, no_browser: true, mp4_filename_template: "movie%Y%m%d_%H%M%S.%f_{CAMNAME}.mp4".to_string(), @@ -2691,31 +2556,21 @@ impl Default for StrandCamArgs { #[cfg(feature = "fiducial")] apriltag_csv_filename_template: strand_cam_storetype::APRILTAG_CSV_TEMPLATE_DEFAULT .to_string(), - #[cfg(feature = "flydra_feat_detect")] - tracker_cfg_src: ImPtDetectCfgSource::ChangesNotSavedToDisk(default_im_pt_detect()), csv_save_dir: "/dev/null".to_string(), raise_grab_thread_priority: false, #[cfg(feature = "posix_sched_fifo")] process_frame_priority: None, led_box_device_path: None, - mainbrain_internal_addr: None, - camdata_addr: None, - show_url: true, #[cfg(feature = "plugin-process-frame")] process_frame_callback: None, #[cfg(feature = "plugin-process-frame")] plugin_wait_dur: std::time::Duration::from_millis(5), - force_camera_sync_mode: false, - software_limit_framerate: StartSoftwareFrameRateLimit::NoChange, - camera_settings_filename: None, #[cfg(feature = "flydratrax")] flydratrax_calibration_source: CalSource::PseudoCal, #[cfg(feature = "flydratrax")] save_empty_data2d: true, #[cfg(feature = "flydratrax")] model_server_addr: flydra_types::DEFAULT_MODEL_SERVER_ADDR.parse().unwrap(), - acquisition_duration_allowed_imprecision_msec: - flydra_types::DEFAULT_ACQUISITION_DURATION_ALLOWED_IMPRECISION_MSEC, } } } @@ -2768,57 +2623,244 @@ fn test_nvenc_save(frame: DynamicFrame) -> Result { Ok(true) } +fn to_event_frame(state: &StoreType) -> String { + let buf = serde_json::to_string(&state).unwrap(); + let frame_string = format!("event: {STRAND_CAM_EVENT_NAME}\ndata: {buf}\n\n"); + frame_string +} + +async fn events_handler( + axum::extract::State(app_state): axum::extract::State, + session_key: axum_token_auth::SessionKey, + axum::extract::ConnectInfo(addr): axum::extract::ConnectInfo, + _: AcceptsEventStream, + req: axum::extract::Request, +) -> impl axum::response::IntoResponse { + tracing::trace!("events"); + // Connection wants to subscribe to event stream. + + let key = ConnectionSessionKey::new(session_key.0, addr); + let (tx, body) = app_state.event_broadcaster.new_connection(key); + + // Send an initial copy of our state. + let shared_store = app_state.shared_store_arc.read().as_ref().clone(); + let frame_string = to_event_frame(&shared_store); + match tx + .send(Ok(http_body::Frame::data(frame_string.into()))) + .await + { + Ok(()) => {} + Err(tokio::sync::mpsc::error::SendError(_)) => { + // The receiver was dropped because the connection closed. Should probably do more here. + tracing::debug!("initial send error"); + } + } + + // Create a new channel in which the receiver is used to send responses to + // the new connection. The sender receives changes from a global change + // receiver. + let typ = ConnectionEventType::Connect(tx); + let path = req.uri().path().to_string(); + let connection_key = ConnectionKey { addr }; + let session_key = SessionKey(session_key.0); + + match app_state + .tx_new_connection + .send(ConnectionEvent { + typ, + session_key, + connection_key, + path, + }) + .await + { + Ok(()) => Ok(body), + Err(_) => Err(( + StatusCode::INTERNAL_SERVER_ERROR, + "sending new connection failed", + )), + } +} + +async fn callback_handler( + axum::extract::State(app_state): axum::extract::State, + _session_key: axum_token_auth::SessionKey, + TolerantJson(payload): TolerantJson, +) -> impl axum::response::IntoResponse { + tracing::trace!("callback"); + match payload { + CallbackType::ToCamera(cam_arg) => { + debug!("in cb: {:?}", cam_arg); + app_state + .callback_senders + .cam_args_tx + .send(cam_arg) + .await + .ignore_send_error(); + } + CallbackType::FirehoseNotify(inner) => { + let arrival_time = chrono::Utc::now(); + let fc = FirehoseCallback { + arrival_time, + inner, + }; + app_state + .callback_senders + .firehose_callback_tx + .send(fc) + .await + .ignore_send_error(); + } + CallbackType::TakeCurrentImageAsBackground => { + #[cfg(feature = "flydra_feat_detect")] + app_state + .callback_senders + .tx_frame + .send(Msg::TakeCurrentImageAsBackground) + .await + .ignore_send_error(); + } + CallbackType::ClearBackground(value) => { + #[cfg(feature = "flydra_feat_detect")] + app_state + .callback_senders + .tx_frame + .send(Msg::ClearBackground(value)) + .await + .ignore_send_error(); + #[cfg(not(feature = "flydra_feat_detect"))] + let _ = value; + } + CallbackType::ToLedBox(led_box_arg) => futures::executor::block_on(async { + info!("in led_box callback: {:?}", led_box_arg); + app_state + .callback_senders + .led_box_tx_std + .send(led_box_arg) + .await + .ignore_send_error(); + }), + } + Ok::<_, axum::extract::rejection::JsonRejection>(axum::Json(())) +} + +async fn handle_auth_error(err: tower::BoxError) -> (StatusCode, &'static str) { + match err.downcast::() { + Ok(err) => { + tracing::error!( + "Validation error(s): {:?}", + err.errors().collect::>() + ); + (StatusCode::UNAUTHORIZED, "Request is not authorized") + } + Err(orig_err) => { + tracing::error!("Unhandled internal error: {orig_err}"); + (StatusCode::INTERNAL_SERVER_ERROR, "internal server error") + } + } +} + +struct BraidInfo { + mainbrain_session: braid_http_session::MainbrainSession, + camdata_addr: flydra_types::RealtimePointsDestAddr, + tracker_cfg_src: ImPtDetectCfgSource, + config_from_braid: flydra_types::RemoteCameraInfoResponse, +} + +// ----------- + +/// top-level function once args are parsed from CLI. pub fn run_app( mymod: ci2_async::ThreadedAsyncCameraModule, args: StrandCamArgs, app_name: &'static str, -) -> Result<()> +) -> anyhow::Result<()> where - M: ci2::CameraModule, + M: ci2::CameraModule + 'static, + C: 'static + ci2::Camera + Send, +{ + // Start tokio runtime here. + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(4) + .thread_name("strand-cam-runtime") + .thread_stack_size(3 * 1024 * 1024) + .build()?; + + runtime.block_on(run_after_maybe_connecting_to_braid(mymod, args, app_name))?; + + info!("done"); + Ok(()) +} + +/// First, connect to Braid if requested, then run. +async fn run_after_maybe_connecting_to_braid( + mymod: ci2_async::ThreadedAsyncCameraModule, + args: StrandCamArgs, + app_name: &'static str, +) -> anyhow::Result<()> +where + M: ci2::CameraModule + 'static, C: 'static + ci2::Camera + Send, { - let handle = args - .handle - .clone() - .ok_or_else(|| anyhow::anyhow!("no tokio runtime handle"))?; + // If connecting to braid, do it here. + let res_braid: std::result::Result = { + match &args.standalone_or_braid { + StandaloneOrBraid::Braid(braid_args) => { + log::info!("Will connect to braid at \"{}\"", braid_args.braid_url); + let mainbrain_internal_addr = flydra_types::MainbrainBuiLocation( + flydra_types::BuiServerAddrInfo::parse_url_with_token(&braid_args.braid_url)?, + ); - let my_handle = handle.clone(); + let mut mainbrain_session = + braid_http_session::mainbrain_future_session(mainbrain_internal_addr.clone()) + .await?; - let (_bui_server_info, tx_cam_arg2, fut, _my_app) = - handle.block_on(setup_app(mymod, my_handle, args, app_name))?; + let camera_name = flydra_types::RawCamName::new(braid_args.camera_name.clone()); - ctrlc::set_handler(move || { - info!("got Ctrl-C, shutting down"); + let config_from_braid: flydra_types::RemoteCameraInfoResponse = + mainbrain_session.get_remote_info(&camera_name).await?; - // Send quit message. - debug!("starting to send quit message {}:{}", file!(), line!()); - match tx_cam_arg2.blocking_send(CamArg::DoQuit) { - Ok(()) => {} - Err(e) => { - error!("failed sending quit command: {}", e); + let camdata_addr = { + let camdata_addr = config_from_braid + .camdata_addr + .parse::()?; + let addr_info_ip = flydra_types::AddrInfoIP::from_socket_addr(&camdata_addr); + + flydra_types::RealtimePointsDestAddr::IpAddr(addr_info_ip) + }; + + let tracker_cfg_src = crate::ImPtDetectCfgSource::ChangesNotSavedToDisk( + config_from_braid.config.point_detection_config.clone(), + ); + + Ok(BraidInfo { + mainbrain_session, + config_from_braid, + camdata_addr, + tracker_cfg_src, + }) } + StandaloneOrBraid::Standalone(standalog_args) => Err(standalog_args.clone()), } - debug!("done sending quit message {}:{}", file!(), line!()); - }) - .expect("Error setting Ctrl-C handler"); - - handle.block_on(fut)?; + }; - info!("done"); - Ok(()) + run_until_done(mymod, args, app_name, res_braid).await } -pub async fn setup_app( +// ----------- + +/// This is the main function where we spend all time after parsing startup args +/// and, in case of connecting to braid, getting the inital connection +/// information. +/// +/// This function is way too huge and should be refactored. +async fn run_until_done( mut mymod: ci2_async::ThreadedAsyncCameraModule, - rt_handle: tokio::runtime::Handle, args: StrandCamArgs, app_name: &'static str, -) -> anyhow::Result<( - StrandCamBuiServerInfo, - tokio::sync::mpsc::Sender, - impl futures::Future>, - StrandCamApp, -)> + res_braid: std::result::Result, +) -> anyhow::Result<()> where M: ci2::CameraModule, C: 'static + ci2::Camera + Send, @@ -2830,7 +2872,12 @@ where warn!("Package 'imops' was not compiled with simd support. Image processing with imops will be slow."); } - debug!("CLI request for camera {:?}", args.camera_name); + let requested_camera_name = match &args.standalone_or_braid { + StandaloneOrBraid::Standalone(args) => args.camera_name.clone(), + StandaloneOrBraid::Braid(args) => Some(args.camera_name.clone()), + }; + + debug!("Request for camera \"{requested_camera_name:?}\""); // ----------------------------------------------- @@ -2845,7 +2892,7 @@ where info!(" camera {:?} detected", cam_info.name()); } - let name = match args.camera_name { + let use_camera_name = match requested_camera_name { Some(ref name) => name, None => cam_infos[0].name(), }; @@ -2853,7 +2900,7 @@ where let frame_info_extractor = mymod.frame_info_extractor(); let settings_file_ext = mymod.settings_file_extension().to_string(); - let mut cam = match mymod.threaded_async_camera(name) { + let mut cam = match mymod.threaded_async_camera(use_camera_name) { Ok(cam) => cam, Err(e) => { let msg = format!("{e}"); @@ -2864,8 +2911,8 @@ where let raw_name = cam.name().to_string(); info!(" got camera {}", raw_name); - let cam_name = RawCamName::new(raw_name); - let ros_cam_name = cam_name.to_ros(); + let raw_cam_name = RawCamName::new(raw_name); + let ros_cam_name = raw_cam_name.to_ros(); let camera_gamma = cam .feature_float("Gamma") @@ -2873,15 +2920,35 @@ where .ok() .map(|x: f64| x as f32); + let camera_settings_filename = match &res_braid { + Ok(bi) => bi.config_from_braid.config.camera_settings_filename.clone(), + Err(a) => a.camera_settings_filename.clone(), + }; + + let pixel_format = match &res_braid { + Ok(bi) => bi.config_from_braid.config.pixel_format.clone(), + Err(a) => a.pixel_format.clone(), + }; + + let acquisition_duration_allowed_imprecision_msec = match &res_braid { + Ok(bi) => bi + .config_from_braid + .config + .acquisition_duration_allowed_imprecision_msec + .clone(), + Err(a) => a.acquisition_duration_allowed_imprecision_msec.clone(), + }; + #[cfg(not(feature = "flydra_feat_detect"))] + let _ = acquisition_duration_allowed_imprecision_msec; + let (frame_rate_limit_supported, mut frame_rate_limit_enabled) = - if let Some(camera_settings_filename) = &args.camera_settings_filename { - let settings = - std::fs::read_to_string(camera_settings_filename).with_context(|| { - format!( - "Failed to read camera settings from file \"{}\"", - camera_settings_filename.display() - ) - })?; + if let Some(fname) = &camera_settings_filename { + let settings = std::fs::read_to_string(&fname).with_context(|| { + format!( + "Failed to read camera settings from file \"{}\"", + fname.display() + ) + })?; cam.node_map_load(&settings)?; (false, false) @@ -2890,7 +2957,7 @@ where debug!(" possible pixel format: {}", pixfmt); } - if let Some(ref pixfmt_str) = args.pixel_format { + if let Some(ref pixfmt_str) = pixel_format { use std::str::FromStr; let pixfmt = PixFmt::from_str(pixfmt_str) .map_err(|e: &str| StrandCamError::StringError(e.to_string()))?; @@ -2954,7 +3021,6 @@ where // Buffer 20 frames to be processed before dropping them. let (tx_frame, rx_frame) = tokio::sync::mpsc::channel::(20); let tx_frame2 = tx_frame.clone(); - let tx_frame3 = tx_frame.clone(); // Get initial frame to determine width, height and pixel_format. debug!(" started acquisition, waiting for first frame"); @@ -2991,14 +3057,30 @@ where let raise_grab_thread_priority = args.raise_grab_thread_priority; - #[cfg(feature = "flydra_feat_detect")] - let tracker_cfg_src = args.tracker_cfg_src; - #[cfg(feature = "flydratrax")] let save_empty_data2d = args.save_empty_data2d; #[cfg(feature = "flydra_feat_detect")] - let tracker_cfg = match &tracker_cfg_src { + let tracker_cfg_src = match &res_braid { + Ok(bi) => bi.tracker_cfg_src.clone(), + Err(a) => a.tracker_cfg_src.clone(), + }; + + #[cfg(not(feature = "flydra_feat_detect"))] + match &res_braid { + Ok(bi) => { + let _ = bi.tracker_cfg_src.clone(); // silence unused field warning. + } + Err(_) => {} + }; + + // Here we just create some default, it does not matter what, because it + // will not be used for anything. + #[cfg(not(feature = "flydra_feat_detect"))] + let im_pt_detect_cfg = flydra_pt_detect_cfg::default_absdiff(); + + #[cfg(feature = "flydra_feat_detect")] + let im_pt_detect_cfg = match &tracker_cfg_src { ImPtDetectCfgSource::ChangedSavedToDisk(src) => { // Retrieve the saved preferences let (app_info, ref prefs_key) = src; @@ -3016,25 +3098,44 @@ where ImPtDetectCfgSource::ChangesNotSavedToDisk(cfg) => cfg.clone(), }; - #[cfg(feature = "flydra_feat_detect")] - let im_pt_detect_cfg = tracker_cfg.clone(); + let force_camera_sync_mode = match &res_braid { + Ok(bi) => bi.config_from_braid.force_camera_sync_mode, + Err(a) => a.force_camera_sync_mode, + }; - let mainbrain_info = args.mainbrain_internal_addr.map(|addr| { - let (transmit_msg_tx, transmit_msg_rx) = mpsc::channel::(10); + let camdata_addr = match &res_braid { + Ok(bi) => Some(bi.camdata_addr.clone()), + Err(_a) => None, + }; - MainbrainInfo { - mainbrain_internal_addr: addr, - transmit_msg_rx, - transmit_msg_tx, - } - }); + let software_limit_framerate = match &res_braid { + Ok(bi) => bi.config_from_braid.software_limit_framerate.clone(), + Err(a) => a.software_limit_framerate.clone(), + }; + + let mainbrain_session = match res_braid { + Ok(bi) => Some(bi.mainbrain_session), + Err(_a) => None, + }; - let transmit_msg_tx = mainbrain_info.as_ref().map(|i| i.transmit_msg_tx.clone()); + // spawn channel to send data to mainbrain + let (transmit_msg_tx, mainbrain_transmitter_fut) = + if let Some(mut mainbrain_session) = mainbrain_session { + let (transmit_msg_tx, mut transmit_msg_rx) = tokio::sync::mpsc::channel(10); + let mainbrain_transmitter_fut = async move { + while let Some(msg) = transmit_msg_rx.recv().await { + mainbrain_session.post_callback_message(msg).await.unwrap(); + } + }; + (Some(transmit_msg_tx), Some(mainbrain_transmitter_fut)) + } else { + (None, None) + }; let (cam_args_tx, cam_args_rx) = tokio::sync::mpsc::channel(100); let (led_box_tx_std, mut led_box_rx) = tokio::sync::mpsc::channel(20); - let led_box_heartbeat_update_arc = Arc::new(RwLock::new(None)); + let led_box_heartbeat_update_arc = Arc::new(parking_lot::RwLock::new(None)); let gain_ranged = RangedValue { name: "gain".into(), @@ -3068,19 +3169,21 @@ where let current_cam_settings_extension = settings_file_ext.to_string(); - if args.force_camera_sync_mode { + if force_camera_sync_mode { cam.start_default_external_triggering().unwrap(); - send_cam_settings_to_braid( - &cam.node_map_save()?, - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = transmit_msg_tx.as_ref() { + send_cam_settings_to_braid( + &cam.node_map_save()?, + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await?; + } } - if args.camera_settings_filename.is_none() { - if let StartSoftwareFrameRateLimit::Enable(fps_limit) = &args.software_limit_framerate { + if camera_settings_filename.is_none() { + if let StartSoftwareFrameRateLimit::Enable(fps_limit) = &software_limit_framerate { // Set the camera. cam.set_software_frame_rate_limit(*fps_limit).unwrap(); // Store the values we set. @@ -3194,18 +3297,16 @@ where let im_ops_state = ImOpsState::default(); - // Here we just create some default, it does not matter what, because it - // will not be used for anything. - #[cfg(not(feature = "flydra_feat_detect"))] - let im_pt_detect_cfg = flydra_pt_detect_cfg::default_absdiff(); - #[cfg(feature = "flydra_feat_detect")] let has_image_tracker_compiled = true; #[cfg(not(feature = "flydra_feat_detect"))] let has_image_tracker_compiled = false; - let is_braid = args.is_braid; + let is_braid = match &args.standalone_or_braid { + StandaloneOrBraid::Braid(_) => true, + StandaloneOrBraid::Standalone(_) => false, + }; // ----------------------------------------------- // Check if we can use nv h264 and, if so, set that as default. @@ -3220,18 +3321,18 @@ where let mp4_filename_template = args .mp4_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", raw_cam_name.as_str()); let fmf_filename_template = args .fmf_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", raw_cam_name.as_str()); let ufmf_filename_template = args .ufmf_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", raw_cam_name.as_str()); #[cfg(feature = "fiducial")] let format_str_apriltag_csv = args .apriltag_csv_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", use_camera_name); #[cfg(not(feature = "fiducial"))] let format_str_apriltag_csv = "".into(); @@ -3273,7 +3374,7 @@ where measured_fps: 0.0, is_saving_im_pt_detect_csv: None, has_image_tracker_compiled, - im_pt_detect_cfg, + im_pt_detect_cfg: im_pt_detect_cfg.clone(), has_flydratrax_compiled, kalman_tracking_config, led_program_config, @@ -3294,63 +3395,135 @@ where camera_calibration: None, }); - let frame_processing_error_state = Arc::new(RwLock::new(FrameProcessingErrorState::default())); + let frame_processing_error_state = Arc::new(parking_lot::RwLock::new( + FrameProcessingErrorState::default(), + )); - let camdata_addr = args.camdata_addr; + // let mut config = get_default_config(); + // config.cookie_name = "strand-camclient".to_string(); - let mut config = get_default_config(); - config.cookie_name = "strand-camclient".to_string(); + let mut shared_store_changes_rx = shared_store.get_changes(1); - let shared_store_arc = Arc::new(RwLock::new(shared_store)); + // A channel for the data sent from the client browser. + let (firehose_callback_tx, firehose_callback_rx) = tokio::sync::mpsc::channel(10); - let cam_args_tx2 = cam_args_tx.clone(); - let secret = args.secret.clone(); + let callback_senders = StrandCamCallbackSenders { + cam_args_tx: cam_args_tx.clone(), + firehose_callback_tx, + led_box_tx_std: led_box_tx_std.clone(), + tx_frame: tx_frame.clone(), + }; - // todo: integrate with quit_channel and quit_rx elsewhere. - let (quit_trigger, valve) = stream_cancel::Valve::new(); + let (tx_new_connection, rx_new_connection) = tokio::sync::mpsc::channel(10); - let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); + let shared_state = Arc::new(parking_lot::RwLock::new(shared_store)); + let shared_store_arc = shared_state.clone(); + + // Create our app state. + let app_state = StrandCamAppState { + event_broadcaster: Default::default(), + callback_senders, + tx_new_connection, + shared_store_arc, + }; - let http_server_addr = if let Some(http_server_addr) = args.http_server_addr.as_ref() { - // In braid, this will be `127.0.0.1:0` to get a free port. - http_server_addr.clone() + let shared_store_arc = shared_state.clone(); + + // This future will send state updates to all connected event listeners. + let event_broadcaster = app_state.event_broadcaster.clone(); + let send_updates_future = async move { + while let Some((_prev_state, next_state)) = shared_store_changes_rx.next().await { + let frame_string = to_event_frame(&next_state); + event_broadcaster.broadcast_frame(frame_string).await; + } + }; + + #[cfg(feature = "bundle_files")] + let serve_dir = tower_serve_static::ServeDir::new(&ASSETS_DIR); + + #[cfg(feature = "serve_files")] + let serve_dir = tower_http::services::fs::ServeDir::new( + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("yew_frontend") + .join("pkg"), + ); + + let address_string = if let Some(address) = args.http_server_addr.as_ref() { + // In braid, the port will be 0 to get a free port. + address.clone() } else { - // This will be `127.0.0.1:3440` to get a free port. DEFAULT_HTTP_ADDR.to_string() }; - let (firehose_callback_rx, my_app, connection_callback_rx) = StrandCamApp::new( - rt_handle.clone(), - shared_store_arc.clone(), - secret, - &http_server_addr, - config, - cam_args_tx2.clone(), - led_box_tx_std.clone(), - tx_frame3, - shutdown_rx, - ) - .await?; - - // The value `args.http_server_addr` is transformed to - // `local_addr` by doing things like replacing port 0 - // with the actual open port number. - - let (is_loopback, http_camserver_info) = { - let local_addr = *my_app.inner().local_addr(); - let is_loopback = local_addr.ip().is_loopback(); - let token = my_app.inner().token(); - (is_loopback, StrandCamBuiServerInfo::new(local_addr, token)) + let (listener, http_camserver_info) = flydra_types::start_listener(&address_string).await?; + + let persistent_secret = if let Some(_secret) = args.secret { + todo!(); + } else { + tracing::warn!("Using newly generated persistent secret. All previously issued session keys will be invalidated. FIXME todo!"); + cookie::Key::generate() }; - let url = http_camserver_info.guess_base_url_with_token(); + // Setup our auth layer. + let token_config = match http_camserver_info.token() { + AccessToken::PreSharedToken(value) => Some(axum_token_auth::TokenConfig { + name: "token".to_string(), + value: value.clone(), + }), + AccessToken::NoToken => None, + }; + let cfg = axum_token_auth::AuthConfig { + token_config: token_config, + persistent_secret, + cookie_name: "strand-cam-session", + ..Default::default() + }; + + let auth_layer = cfg.into_layer(); + // Create axum router. + let router = axum::Router::new() + .route("/strand-cam-events", axum::routing::get(events_handler)) + .route("/callback", axum::routing::post(callback_handler)) + .nest_service("/", serve_dir) + .layer( + tower::ServiceBuilder::new() + // Auth layer will produce an error if the request cannot be + // authorized so we must handle that. + .layer(axum::error_handling::HandleErrorLayer::new( + handle_auth_error, + )) + .layer(auth_layer), + ) + .with_state(app_state); + + // create future for our app + let http_serve_future = { + use std::future::IntoFuture; + axum::serve( + listener, + router.into_make_service_with_connect_info::(), + ) + .into_future() + }; + + // todo: integrate with quit_channel and quit_rx elsewhere. + let (quit_trigger, valve) = stream_cancel::Valve::new(); + + let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); + + let url = http_camserver_info.build_urls()?.pop().unwrap(); - if args.show_url { - println!("Depending on things, you may be able to login with this url: {url}",); + if !is_braid { + // Display where we are listening. + println!("Listening at {}", http_camserver_info.addr()); - if !is_loopback { - println!("This same URL as a QR code:"); - display_qr_url(&url); + println!("Predicted URL(s):"); + for url in http_camserver_info.build_urls()?.iter() { + println!(" * {url}"); + if !flydra_types::is_loopback(url) { + println!("This same URL as a QR code:"); + display_qr_url(&format!("{url}")); + } } } @@ -3361,14 +3534,14 @@ where let process_frame_callback = args.process_frame_callback; #[cfg(feature = "checkercal")] - let collected_corners_arc: CollectedCornersArc = Arc::new(RwLock::new(Vec::new())); + let collected_corners_arc: CollectedCornersArc = Arc::new(parking_lot::RwLock::new(Vec::new())); let frame_process_cjh = { let (is_starting_tx, is_starting_rx) = tokio::sync::oneshot::channel(); #[cfg(feature = "flydra_feat_detect")] let acquisition_duration_allowed_imprecision_msec = - args.acquisition_duration_allowed_imprecision_msec; + acquisition_duration_allowed_imprecision_msec; #[cfg(feature = "flydra_feat_detect")] let csv_save_dir = args.csv_save_dir.clone(); #[cfg(feature = "flydratrax")] @@ -3379,38 +3552,22 @@ where let http_camserver_info2 = http_camserver_info.clone(); let led_box_heartbeat_update_arc2 = led_box_heartbeat_update_arc.clone(); - let handle2 = rt_handle.clone(); #[cfg(feature = "flydratrax")] - let (model_server_data_tx, model_server, flydratrax_calibration_source) = { + let (model_server_data_tx, flydratrax_calibration_source) = { info!("send_pose server at {}", model_server_addr); - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; let (model_server_data_tx, data_rx) = tokio::sync::mpsc::channel(50); // we need the tokio reactor already by here - let model_server = flydra2::new_model_server( - data_rx, - valve.clone(), - &model_server_addr, - info, - handle2.clone(), - ) - .await?; + flydra2::new_model_server(data_rx, model_server_addr).await?; let flydratrax_calibration_source = args.flydratrax_calibration_source; - ( - model_server_data_tx, - model_server, - flydratrax_calibration_source, - ) + (model_server_data_tx, flydratrax_calibration_source) }; let new_cam_data = flydra_types::RegisterNewCamera { - orig_cam_name: cam_name.clone(), + raw_cam_name: raw_cam_name.clone(), ros_cam_name: ros_cam_name.clone(), - http_camserver_info: Some(StrandCamHttpServerInfo::Server(http_camserver_info.clone())), + http_camserver_info: Some(BuiServerInfo::Server(http_camserver_info.clone())), cam_settings_data: Some(flydra_types::UpdateCamSettings { current_cam_settings_buf: settings_on_start, current_cam_settings_extension: settings_file_ext, @@ -3420,14 +3577,13 @@ where #[cfg(feature = "flydratrax")] let valve2 = valve.clone(); - let cam_name2 = cam_name.clone(); + let cam_name2 = raw_cam_name.clone(); let (quit_channel, quit_rx) = tokio::sync::oneshot::channel(); let frame_process_task_fut = { { frame_process_task( - handle2, #[cfg(feature = "flydratrax")] - (model_server_data_tx, model_server), + model_server_data_tx, #[cfg(feature = "flydratrax")] flydratrax_calibration_source, cam_name2, @@ -3439,7 +3595,7 @@ where image_height, rx_frame, #[cfg(feature = "flydra_feat_detect")] - tracker_cfg, + im_pt_detect_cfg, #[cfg(feature = "flydra_feat_detect")] std::path::Path::new(&csv_save_dir).to_path_buf(), firehose_tx, @@ -3456,7 +3612,7 @@ where #[cfg(feature = "flydratrax")] http_camserver_info2, process_frame_priority, - mainbrain_info, + transmit_msg_tx.clone(), camdata_addr, led_box_heartbeat_update_arc2, #[cfg(feature = "plugin-process-frame")] @@ -3602,7 +3758,7 @@ where // TODO I just used Arc and RwLock to code this quickly. Convert to single-threaded // versions later. - let known_version = Arc::new(RwLock::new(app_version)); + let known_version = Arc::new(parking_lot::RwLock::new(app_version)); // Create a stream to call our closure now and every 30 minutes. let interval_stream = tokio::time::interval(std::time::Duration::from_secs(1800)); @@ -3627,18 +3783,18 @@ where } debug!("version check future done {}:{}", file!(), line!()); }; - rt_handle.spawn(Box::pin(stream_future)); // confirmed: valved and finishes + tokio::spawn(Box::pin(stream_future)); // confirmed: valved and finishes debug!("version check future spawned {}:{}", file!(), line!()); } - rt_handle.spawn(Box::pin(cam_stream_future)); // confirmed: valved and finishes + tokio::spawn(Box::pin(cam_stream_future)); // confirmed: valved and finishes debug!("cam_stream_future future spawned {}:{}", file!(), line!()); let cam_arg_future = { let shared_store_arc = shared_store_arc.clone(); #[cfg(feature = "checkercal")] - let cam_name2 = cam_name.clone(); + let cam_name2 = raw_cam_name.clone(); let mut cam_args_rx = tokio_stream::wrappers::ReceiverStream::new(cam_args_rx); @@ -3677,13 +3833,16 @@ where } CamArg::SetExposureTime(v) => match cam.set_exposure_time(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|tracker| tracker.exposure_time.current = v); } @@ -3693,13 +3852,16 @@ where }, CamArg::SetGain(v) => match cam.set_gain(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|tracker| tracker.gain.current = v); } @@ -3709,13 +3871,16 @@ where }, CamArg::SetGainAuto(v) => match cam.set_gain_auto(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| match cam.gain_auto() { Ok(latest) => { @@ -3753,13 +3918,16 @@ where } CamArg::SetExposureAuto(v) => match cam.set_exposure_auto(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| match cam.exposure_auto() { Ok(latest) => { @@ -3778,13 +3946,16 @@ where CamArg::SetFrameRateLimitEnabled(v) => { match cam.set_acquisition_frame_rate_enable(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| { match cam.acquisition_frame_rate_enable() { @@ -3804,13 +3975,16 @@ where } CamArg::SetFrameRateLimit(v) => match cam.set_acquisition_frame_rate(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| match cam.acquisition_frame_rate() { Ok(latest) => { @@ -4359,7 +4533,7 @@ where intrinsics, width: image_width as usize, height: image_height as usize, - name: ros_cam_name.as_str().to_string(), + name: raw_cam_name.as_str().to_string(), } .into(); @@ -4456,13 +4630,14 @@ where // sleep to let the webserver start before opening browser std::thread::sleep(std::time::Duration::from_millis(100)); - open_browser(url)?; + open_browser(format!("{url}"))?; } else { info!("listening at {}", url); } let (quit_channel, quit_rx) = tokio::sync::oneshot::channel(); + let connection_callback_rx = rx_new_connection; let join_handle = tokio::spawn(video_streaming::firehose_task( connection_callback_rx, firehose_rx, @@ -4479,6 +4654,7 @@ where #[cfg(feature = "plugin-process-frame")] let plugin_streaming_cjh = { + let cam_args_tx2 = cam_args_tx.clone(); let (flag, control) = thread_control::make_pair(); let join_handle = std::thread::Builder::new() .name("plugin_streaming".to_string()) @@ -4684,7 +4860,33 @@ where result }; - Ok((http_camserver_info, cam_args_tx, cam_arg_future2, my_app)) + // Ok(( + // http_camserver_info, + // cam_arg_future2, + // send_updates_future, + // http_serve_future, + // shutdown_rx, + // )) + + // Now run until first future returns, then exit. + if let Some(mainbrain_transmitter_fut) = mainbrain_transmitter_fut { + tokio::select! { + res = http_serve_future => {res?}, + res = cam_arg_future2 => {res?}, + _ = mainbrain_transmitter_fut => {}, + _ = send_updates_future => {}, + _ = shutdown_rx => {}, + } + } else { + tokio::select! { + res = http_serve_future => {res?}, + res = cam_arg_future2 => {res?}, + _ = send_updates_future => {}, + _ = shutdown_rx => {}, + } + } + + Ok(()) } #[cfg(feature = "plugin-process-frame")] @@ -4727,7 +4929,7 @@ pub struct ControlledTaskJoinHandle { } impl ControlledTaskJoinHandle { - async fn close_and_join(self) -> std::result::Result { + async fn close_and_join(self) -> StdResult { debug!("sending stop"); // debug!( @@ -4751,7 +4953,7 @@ impl ControlledTaskJoinHandle { pub struct AllJoinHandles { frame_process_cjh: ControlledTaskJoinHandle>, - video_streaming_cjh: ControlledTaskJoinHandle>, + video_streaming_cjh: ControlledTaskJoinHandle>, #[cfg(feature = "plugin-process-frame")] plugin_streaming_cjh: ControlledThreadJoinHandle<()>, } @@ -4844,31 +5046,25 @@ fn make_family(family: &ci2_remote_control::TagFamily) -> apriltag::Family { } } -fn send_cam_settings_to_braid( +async fn send_cam_settings_to_braid( cam_settings: &str, - transmit_msg_tx: Option<&mpsc::Sender>, + transmit_msg_tx: &tokio::sync::mpsc::Sender, current_cam_settings_extension: &str, - ros_cam_name: &RosCamName, -) -> Option> { - if let Some(transmit_msg_tx) = transmit_msg_tx { - let current_cam_settings_buf = cam_settings.to_string(); - let current_cam_settings_extension = current_cam_settings_extension.to_string(); - let ros_cam_name = ros_cam_name.clone(); - let mut transmit_msg_tx = transmit_msg_tx.clone(); - let fut = async move { - let msg = flydra_types::HttpApiCallback::UpdateCamSettings(flydra_types::PerCam { - ros_cam_name, - inner: flydra_types::UpdateCamSettings { - current_cam_settings_buf, - current_cam_settings_extension, - }, - }); - transmit_msg_tx.send(msg).await.unwrap(); - }; - Some(fut) - } else { - None - } + raw_cam_name: &RawCamName, +) -> StdResult<(), tokio::sync::mpsc::error::SendError> { + let current_cam_settings_buf = cam_settings.to_string(); + let current_cam_settings_extension = current_cam_settings_extension.to_string(); + let raw_cam_name = raw_cam_name.clone(); + let transmit_msg_tx = transmit_msg_tx.clone(); + + let msg = flydra_types::BraidHttpApiCallback::UpdateCamSettings(flydra_types::PerCam { + raw_cam_name: raw_cam_name, + inner: flydra_types::UpdateCamSettings { + current_cam_settings_buf, + current_cam_settings_extension, + }, + }); + transmit_msg_tx.send(msg).await } fn bitrate_to_u32(br: &ci2_remote_control::BitrateSelection) -> u32 { diff --git a/strand-cam/yew_frontend/src/lib.rs b/strand-cam/yew_frontend/src/lib.rs index 2eedcbfcf..789a5d0d9 100644 --- a/strand-cam/yew_frontend/src/lib.rs +++ b/strand-cam/yew_frontend/src/lib.rs @@ -1314,39 +1314,6 @@ fn to_rate(rate_enum: &RecordingFrameRate) -> Option { } } -// impl Model { -// fn send_message(&mut self, args: &CallbackType) -> Option { -// let post_request = Request::post("callback") -// .header("Content-Type", "application/json;charset=UTF-8") -// .body(Json(&args)) -// .expect("Failed to build request."); - -// let callback = -// self.link -// .callback(move |response: Response>>| { -// if let (meta, Json(Ok(_body))) = response.into_parts() { -// if meta.status.is_success() { -// return Msg::Ignore; -// } -// } -// log::error!("failed sending message"); -// Msg::Ignore -// }); -// let options = FetchOptions { -// credentials: Some(Credentials::SameOrigin), -// ..Default::default() -// }; - -// match FetchService::fetch_with_options(post_request, options, callback) { -// Ok(task) => Some(task), -// Err(err) => { -// log::error!("sending message failed with error: {}", err); -// None -// } -// } -// } -// } - // ----------------------------------------------------------------------------- async fn post_message(msg: &CallbackType) -> Result<(), FetchError> { @@ -1354,11 +1321,13 @@ async fn post_message(msg: &CallbackType) -> Result<(), FetchError> { let mut opts = RequestInit::new(); opts.method("POST"); opts.cache(web_sys::RequestCache::NoStore); - // opts.mode(web_sys::RequestMode::Cors); - // opts.headers("Content-Type", "application/json;charset=UTF-8") - // set SameOrigin let buf = serde_json::to_string(&msg).unwrap_throw(); opts.body(Some(&JsValue::from_str(&buf))); + let headers = web_sys::Headers::new().unwrap_throw(); + headers + .append("Content-Type", "application/json") + .unwrap_throw(); + opts.headers(&headers); let url = "callback"; let request = Request::new_with_str_and_init(url, &opts)?;