From 46c21d05376db4eb437846a37207d1c04232453c Mon Sep 17 00:00:00 2001 From: Andrew Straw Date: Sun, 28 Jan 2024 15:59:07 +0100 Subject: [PATCH] networking overhaul - convert to axum - rework strand/braid connections - remove bui-backend - logging cleanup, including tracing spans do not include all args - simplify async code - switch logging to tracing from log crate --- Cargo.toml | 6 +- ads-webasm/Cargo.toml | 3 +- ads-webasm/src/components/video_field.rs | 7 +- braid-config-data/src/lib.rs | 20 +- braid-http-session/Cargo.toml | 13 +- braid-http-session/src/lib.rs | 67 +- braid-offline/src/lib.rs | 68 +- braid-offline/tests/test-covariance.rs | 3 - .../tests/test-new-kalmanize-vs-flydra1.rs | 2 - braid-offline/tests/test-offline-retrack.rs | 3 - braid-process-video/Cargo.toml | 1 - braid-process-video/src/lib.rs | 38 +- braid-process-video/src/output_braidz.rs | 33 +- braid/Cargo.toml | 2 + braid/braid-run/Cargo.toml | 46 +- braid/braid-run/braid_frontend/src/lib.rs | 39 +- braid/braid-run/build.rs | 22 +- braid/braid-run/src/callback_handling.rs | 168 ++ braid/braid-run/src/main.rs | 133 +- braid/braid-run/src/mainbrain.rs | 1372 +++++++-------- .../src/multicam_http_session_handler.rs | 117 +- braidz-parser/src/lib.rs | 8 +- bui-backend-session/Cargo.toml | 7 +- bui-backend-session/demo/Cargo.toml | 8 +- bui-backend-session/demo/src/main.rs | 2 +- bui-backend-session/src/lib.rs | 96 +- bui-backend-session/types/Cargo.toml | 13 + bui-backend-session/types/src/lib.rs | 32 + build-util/Cargo.toml | 7 - build-util/src/lib.rs | 25 +- env-tracing-logger/Cargo.toml | 2 - env-tracing-logger/src/lib.rs | 61 +- event-stream-types/Cargo.toml | 19 + event-stream-types/src/lib.rs | 241 +++ flydra-types/Cargo.toml | 13 +- flydra-types/src/lib.rs | 254 ++- flydra2/Cargo.toml | 19 +- flydra2/build.rs | 83 +- flydra2/src/bin/send_pose.rs | 29 +- flydra2/src/bundled_data.rs | 10 +- flydra2/src/connected_camera_manager.rs | 158 +- flydra2/src/error.rs | 14 +- flydra2/src/flydra2.rs | 22 +- flydra2/src/frame_bundler.rs | 4 +- flydra2/src/model_server.rs | 451 ++--- flydra2/src/new_object_test_2d.rs | 6 +- flydra2/src/new_object_test_3d.rs | 12 +- flydra2/src/tracking_core.rs | 28 +- flydra2/src/write_data.rs | 4 +- flytrax-csv-to-braidz/Cargo.toml | 2 - flytrax-csv-to-braidz/src/lib.rs | 1 - http-video-streaming/Cargo.toml | 13 +- .../http-video-streaming-types/Cargo.toml | 3 +- .../http-video-streaming-types/src/lib.rs | 9 +- http-video-streaming/src/lib.rs | 99 +- media-utils/frame-source/src/h264_source.rs | 8 +- .../mkv-parser-kit/examples/simple-parser.rs | 2 +- media-utils/mkv-parser-kit/src/de.rs | 4 +- media-utils/mkv-strand-reader/src/lib.rs | 6 +- rust-cam-bui-types/Cargo.toml | 7 +- rust-cam-bui-types/src/lib.rs | 5 +- .../users-guide/src/braid_remote_cameras.md | 4 +- strand-cam-storetype/src/lib.rs | 6 +- strand-cam/Cargo.toml | 35 +- strand-cam/build.rs | 26 +- strand-cam/src/cli_app.rs | 260 +-- strand-cam/src/datagram_socket.rs | 1 + strand-cam/src/flydratrax_handle_msg.rs | 1 + strand-cam/src/strand-cam.rs | 1514 +++++++++-------- strand-cam/strand-cam-pylon/Cargo.toml | 1 + strand-cam/yew_frontend/src/lib.rs | 57 +- 71 files changed, 2888 insertions(+), 2967 deletions(-) create mode 100644 braid/braid-run/src/callback_handling.rs create mode 100644 bui-backend-session/types/Cargo.toml create mode 100644 bui-backend-session/types/src/lib.rs create mode 100644 event-stream-types/Cargo.toml create mode 100644 event-stream-types/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 4097a3412..cb1b80778 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ members = [ "braidz-viewer", "bui-backend-session", "bui-backend-session/demo", + "bui-backend-session/types", "build-util", "camcal", "led-box", @@ -49,6 +50,7 @@ members = [ "enum-iter", "env-tracing-logger", "env-tracing-logger/env-tracing-logger-sample", + "event-stream-types", "fastimage", "fastfreeimage", "fly-eye", @@ -120,9 +122,7 @@ members = [ "zip-or-dir/dir2zip", ] -exclude = [ - "led-box-firmware", -] +exclude = ["led-box-firmware"] [profile.release] debug = true diff --git a/ads-webasm/Cargo.toml b/ads-webasm/Cargo.toml index 551791f06..c918486e9 100644 --- a/ads-webasm/Cargo.toml +++ b/ads-webasm/Cargo.toml @@ -12,7 +12,6 @@ js-sys = "0.3" gloo = "0.8.0" gloo-file = "0.2" wasm-bindgen = { version = "0.2.58" } -http = "0.2" serde = "1.0" serde_yaml = "0.9" serde_derive = "1.0" @@ -27,7 +26,6 @@ uuid = { version = "1.2.2", default-features = false, features = [ ] } # add feature flag required for uuid crate csv = { version = "1.1", optional = true } -bui-backend-types = "0.8" yew-tincture = "0.1" simple-obj-parse = { path = "../simple-obj-parse", optional = true } @@ -35,6 +33,7 @@ textured-tri-mesh = { path = "../textured-tri-mesh", optional = true } http-video-streaming-types = { path = "../http-video-streaming/http-video-streaming-types" } enum-iter = { path = "../enum-iter" } rust-cam-bui-types = { path = "../rust-cam-bui-types" } +bui-backend-session-types = { path = "../bui-backend-session/types" } [dependencies.web-sys] version = "0.3" diff --git a/ads-webasm/src/components/video_field.rs b/ads-webasm/src/components/video_field.rs index cc93bfc37..666749ada 100644 --- a/ads-webasm/src/components/video_field.rs +++ b/ads-webasm/src/components/video_field.rs @@ -1,7 +1,7 @@ use std::{cell::RefCell, rc::Rc}; use crate::video_data::VideoData; -use bui_backend_types; +use bui_backend_session_types; use gloo::timers::callback::Timeout; use serde::{Deserialize, Serialize}; use wasm_bindgen::prelude::*; @@ -29,8 +29,7 @@ pub struct ImData2 { pub draw_shapes: Vec, pub fno: u64, pub ts_rfc3339: String, // timestamp in RFC3339 format - pub ck: bui_backend_types::ConnectionKey, - pub name: Option, + pub ck: bui_backend_session_types::ConnectionKey, } #[derive(Debug, PartialEq, Clone)] @@ -134,7 +133,6 @@ impl Component for VideoField { let fci = FirehoseCallbackInner { ck: im_data.ck, fno: im_data.fno as usize, - name: im_data.name.clone(), ts_rfc3339: im_data.ts_rfc3339, }; @@ -181,7 +179,6 @@ impl Component for VideoField { ck: in_msg.ck, fno: in_msg.fno, found_points: in_msg.found_points.clone(), - name: in_msg.name.clone(), ts_rfc3339: in_msg.ts_rfc3339, draw_shapes: draw_shapes.into_iter().map(|s| s.into()).collect(), }; diff --git a/braid-config-data/src/lib.rs b/braid-config-data/src/lib.rs index 97f7b1bb9..7c793d333 100644 --- a/braid-config-data/src/lib.rs +++ b/braid-config-data/src/lib.rs @@ -1,7 +1,4 @@ -#![cfg_attr( - feature = "backtrace", - feature(error_generic_member_access) -)] +#![cfg_attr(feature = "backtrace", feature(error_generic_member_access))] use serde::{Deserialize, Serialize}; @@ -34,10 +31,6 @@ pub enum Error { type Result = std::result::Result; -fn default_lowlatency_camdata_udp_addr() -> String { - "127.0.0.1:0".to_string() -} - fn default_http_api_server_addr() -> String { "127.0.0.1:0".to_string() } @@ -95,8 +88,7 @@ pub struct MainbrainConfig { // /// Parameters to potentially raise the mainbrain thread priority. // sched_policy_priority: Option<(i32, i32)>, /// Address of UDP port to send low-latency detection data - #[serde(default = "default_lowlatency_camdata_udp_addr")] - pub lowlatency_camdata_udp_addr: String, + pub lowlatency_camdata_udp_addr: Option, /// Address of HTTP port for control API #[serde(default = "default_http_api_server_addr")] pub http_api_server_addr: String, @@ -108,8 +100,8 @@ pub struct MainbrainConfig { /// Save rows to data2d_distorted where nothing detected (saves timestamps) #[serde(default = "default_true")] pub save_empty_data2d: bool, - /// Secret to use for JWT auth on HTTP port for control API - pub jwt_secret: Option, + /// Secret to use for signing HTTP cookies (base64 encoded) + pub secret_base64: Option, /// For debugging: filename to store captured packet data. pub packet_capture_dump_fname: Option, /// Threshold duration before logging error (msec). @@ -130,12 +122,12 @@ impl std::default::Default for MainbrainConfig { tracking_params: flydra_types::default_tracking_params_full_3d(), // Raising the mainbrain thread priority is currently disabled. // sched_policy_priority: None, - lowlatency_camdata_udp_addr: default_lowlatency_camdata_udp_addr(), + lowlatency_camdata_udp_addr: None, http_api_server_addr: default_http_api_server_addr(), http_api_server_token: None, model_server_addr: default_model_server_addr(), save_empty_data2d: true, - jwt_secret: None, + secret_base64: None, packet_capture_dump_fname: None, acquisition_duration_allowed_imprecision_msec: flydra_types::DEFAULT_ACQUISITION_DURATION_ALLOWED_IMPRECISION_MSEC, diff --git a/braid-http-session/Cargo.toml b/braid-http-session/Cargo.toml index 7d6f4973d..9feaa2151 100644 --- a/braid-http-session/Cargo.toml +++ b/braid-http-session/Cargo.toml @@ -2,19 +2,18 @@ name = "braid-http-session" version = "0.1.0" edition = "2021" -rust-version="1.60" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +rust-version = "1.60" [dependencies] -log = "0.4" +tracing = "0.1" thiserror = "1" hyper = "1.1" -serde = {version="1.0",features=["derive"]} +serde = { version = "1.0", features = ["derive"] } serde_json = "1" futures = "0.3" -bui-backend-session = {path="../bui-backend-session"} -flydra-types = { path = "../flydra-types", features=["with-dns"] } +bui-backend-session = { path = "../bui-backend-session" } +flydra-types = { path = "../flydra-types" } http-body-util = "0.1.0" bytes = "1.5.0" +axum = "0.7.4" diff --git a/braid-http-session/src/lib.rs b/braid-http-session/src/lib.rs index 25dc2ea57..072a1dc40 100644 --- a/braid-http-session/src/lib.rs +++ b/braid-http-session/src/lib.rs @@ -1,5 +1,5 @@ use ::bui_backend_session::{future_session, InsecureSession}; -use log::{debug, error}; +use tracing::{debug, error}; #[derive(thiserror::Error, Debug)] pub enum Error { @@ -16,6 +16,7 @@ pub enum Error { } /// Create a `MainbrainSession` which has already made a request +#[tracing::instrument(level = "info")] pub async fn mainbrain_future_session( dest: flydra_types::MainbrainBuiLocation, ) -> Result { @@ -26,54 +27,44 @@ pub async fn mainbrain_future_session( Ok(MainbrainSession { inner }) } -type MyBody = http_body_util::combinators::BoxBody; - -fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - use http_body_util::BodyExt; - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) +fn body_from_buf(body_buf: &[u8]) -> axum::body::Body { + axum::body::Body::new(http_body_util::Full::new(bytes::Bytes::from( + body_buf.to_vec(), + ))) } /// This allows communicating with the Mainbrain over HTTP RPC. /// /// This replaced the old ROS layer for camera -> mainbrain command and control /// communication from flydra. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct MainbrainSession { inner: InsecureSession, } impl MainbrainSession { + #[tracing::instrument(skip_all)] async fn do_post(&mut self, bytes: Vec) -> Result<(), Error> { let body = body_from_buf(&bytes); - let resp = self.inner.post("callback", body).await?; - - debug!("called do_post and got response: {:?}", resp); - if !resp.status().is_success() { - error!( - "error: POST response was not a success {}:{}", - file!(), - line!() - ); - // TODO: return Err(_)? - }; + debug!("calling mainbrain callback handler"); + let _resp = self.inner.post("callback", body).await?; Ok(()) } pub async fn get_remote_info( &mut self, - orig_cam_name: &flydra_types::RawCamName, + raw_cam_name: &flydra_types::RawCamName, ) -> Result { let path = format!( - "{}?camera={}", - flydra_types::REMOTE_CAMERA_INFO_PATH, - orig_cam_name.as_str() + "{}/{}", + flydra_types::braid_http::REMOTE_CAMERA_INFO_PATH, + flydra_types::braid_http::encode_cam_name(raw_cam_name) ); debug!( "Getting remote camera info for camera \"{}\".", - orig_cam_name.as_str() + raw_cam_name.as_str() ); let resp = self.inner.get(&path).await?; @@ -97,32 +88,12 @@ impl MainbrainSession { >(&data)?) } - pub async fn register_flydra_camnode( - &mut self, - msg: &flydra_types::RegisterNewCamera, - ) -> Result<(), Error> { - debug!("register_flydra_camnode with message {:?}", msg); - let msg = flydra_types::HttpApiCallback::NewCamera(msg.clone()); - Ok(self.send_message(msg).await?) - } - - pub async fn update_image( + #[tracing::instrument(skip_all)] + pub async fn post_callback_message( &mut self, - ros_cam_name: flydra_types::RosCamName, - current_image_png: flydra_types::PngImageData, + msg: flydra_types::BraidHttpApiCallback, ) -> Result<(), Error> { - let msg = flydra_types::PerCam { - ros_cam_name, - inner: flydra_types::UpdateImage { current_image_png }, - }; - - debug!("update_image with message {:?}", msg); - let msg = flydra_types::HttpApiCallback::UpdateCurrentImage(msg); - Ok(self.send_message(msg).await?) - } - - pub async fn send_message(&mut self, msg: flydra_types::HttpApiCallback) -> Result<(), Error> { let bytes = serde_json::to_vec(&msg).unwrap(); - Ok(self.do_post(bytes).await?) + self.do_post(bytes).await } } diff --git a/braid-offline/src/lib.rs b/braid-offline/src/lib.rs index 7cdb22bfc..53578e7ca 100644 --- a/braid-offline/src/lib.rs +++ b/braid-offline/src/lib.rs @@ -15,11 +15,11 @@ use tracing::{debug, info, warn}; use braidz_parser::open_maybe_gzipped; use flydra2::{ - CoordProcessor, CoordProcessorConfig, Data2dDistortedRow, FrameData, FrameDataAndPoints, - NumberedRawUdpPoint, StreamItem, + new_model_server, CoordProcessor, CoordProcessorConfig, Data2dDistortedRow, FrameData, + FrameDataAndPoints, NumberedRawUdpPoint, StreamItem, }; use flydra_types::{ - CamInfoRow, PerCamSaveData, RawCamName, RosCamName, SyncFno, TrackingParams, + CamInfoRow, PerCamSaveData, RawCamName, SyncFno, TrackingParams, FEATURE_DETECT_SETTINGS_DIRNAME, IMAGES_DIRNAME, }; use groupby::{AscendingGroupIter, BufferedSortIter}; @@ -78,6 +78,12 @@ pub enum Error { #[cfg(feature = "backtrace")] backtrace: Backtrace, }, + #[error("error registering camera: {msg}")] + RegisterCameraError { + msg: &'static str, + #[cfg(feature = "backtrace")] + backtrace: Backtrace, + }, } fn to_point_info(row: &Data2dDistortedRow, idx: u8) -> NumberedRawUdpPoint { @@ -115,7 +121,7 @@ fn split_by_cam(invec: Vec) -> Vec> by_cam.into_values().collect() } -#[tracing::instrument] +#[tracing::instrument(level = "debug", skip_all)] fn calc_fps_from_data(data_file: R) -> flydra2::Result { let rdr = csv::Reader::from_reader(data_file); let mut data_iter = rdr.into_deserialize(); @@ -209,7 +215,7 @@ pub struct KalmanizeOptions { /// Note that a temporary directly ending with `.braid` is initially created and /// only on upon completed tracking is this converted to the output .braidz /// file. -#[tracing::instrument] +#[tracing::instrument(level = "debug", skip_all)] #[allow(clippy::too_many_arguments)] pub async fn kalmanize( mut data_src: braidz_parser::incremental_parser::IncrementalParser< @@ -220,7 +226,6 @@ pub async fn kalmanize( forced_fps: Option>, tracking_params: TrackingParams, opt2: KalmanizeOptions, - rt_handle: tokio::runtime::Handle, save_performance_histograms: bool, saving_program_name: &str, no_progress: bool, @@ -334,7 +339,7 @@ where let all_expected_cameras = recon .cam_names() - .map(|x| RosCamName::new(x.to_string())) + .map(|x| RawCamName::new(x.to_string())) .collect(); let signal_all_cams_present = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); @@ -347,12 +352,6 @@ where signal_all_cams_synced, ); - // Create `stream_cancel::Valve` for shutting everything down. Note this is - // `Clone`, so we can (and should) shut down everything with it. Here we let - // _quit_trigger drop when it goes out of scope. This is due to use in this - // offline context. - let (_quit_trigger, valve) = stream_cancel::Valve::new(); - let (frame_data_tx, frame_data_rx) = tokio::sync::mpsc::channel(10); let frame_data_rx = tokio_stream::wrappers::ReceiverStream::new(frame_data_rx); let save_empty_data2d = true; @@ -364,11 +363,9 @@ where ignore_latency, mini_arena_debug_image_dir, }, - rt_handle.clone(), cam_manager.clone(), Some(recon.clone()), metadata_builder.clone(), - valve, )?; let images_dirname = data_src.path_starter().join(IMAGES_DIRNAME); @@ -420,13 +417,13 @@ where let images_dirname = data_src.path_starter().join(IMAGES_DIRNAME); - let per_cam_data: BTreeMap = match images_dirname.list_paths() { + let per_cam_data: BTreeMap = match images_dirname.list_paths() { Ok(relnames) => relnames .iter() .map(|relname| { assert_eq!(relname.extension(), Some(std::ffi::OsStr::new("png"))); - let ros_cam_name = - RosCamName::new(relname.file_stem().unwrap().to_str().unwrap().to_string()); + let raw_cam_name = + RawCamName::new(relname.file_stem().unwrap().to_str().unwrap().to_string()); let png_fname = data_src.path_starter().join(IMAGES_DIRNAME).join(relname); let current_image_png = { @@ -439,7 +436,7 @@ where let mut current_feature_detect_settings_fname = data_src .path_starter() .join(FEATURE_DETECT_SETTINGS_DIRNAME) - .join(format!("{}.toml", ros_cam_name.as_str())); + .join(format!("{}.toml", raw_cam_name.as_str())); let current_feature_detect_settings = if current_feature_detect_settings_fname.exists() { @@ -452,7 +449,7 @@ where }; ( - ros_cam_name, + raw_cam_name, PerCamSaveData { current_image_png: current_image_png.into(), cam_settings_data: None, @@ -471,18 +468,23 @@ where let mut cam_info_fname = data_src.path_starter(); cam_info_fname.push(flydra_types::CAM_INFO_CSV_FNAME); let cam_info_file = open_maybe_gzipped(cam_info_fname)?; - let mut orig_camn_to_cam_name: BTreeMap = BTreeMap::new(); + let mut orig_camn_to_cam_name: BTreeMap = BTreeMap::new(); let rdr = csv::Reader::from_reader(cam_info_file); for row in rdr.into_deserialize::() { let row = row?; let orig_cam_name = RawCamName::new(row.cam_id.to_string()); - let ros_cam_name = RosCamName::new(row.cam_id.to_string()); - let no_server = flydra_types::StrandCamHttpServerInfo::NoServer; + let no_server = flydra_types::BuiServerInfo::NoServer; - orig_camn_to_cam_name.insert(row.camn, ros_cam_name.clone()); + orig_camn_to_cam_name.insert(row.camn, orig_cam_name.clone()); - cam_manager.register_new_camera(&orig_cam_name, &no_server, &ros_cam_name); + cam_manager + .register_new_camera(&orig_cam_name, &no_server) + .map_err(|msg| Error::RegisterCameraError { + msg, + #[cfg(feature = "backtrace")] + backtrace: std::backtrace::Backtrace::capture(), + })?; } { @@ -672,21 +674,16 @@ where let expected_framerate = Some(fps as f32); - // let model_server_addr = opt.model_server_addr.clone(); - - let (_quit_trigger, valve) = stream_cancel::Valve::new(); let (data_tx, data_rx) = tokio::sync::mpsc::channel(50); let _model_server = match &opt2.model_server_addr { Some(ref addr) => { let addr = addr.parse().unwrap(); info!("send_pose server at {}", addr); - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; coord_processor.add_listener(data_tx); - Some(flydra2::new_model_server(data_rx, valve, &addr, info, rt_handle).await?) + + let model_server_future = new_model_server(data_rx, addr); + Some(tokio::spawn(async { model_server_future.await })) } None => None, }; @@ -761,7 +758,7 @@ pub fn pick_csvgz_or_csv(csv_path: &Path) -> flydra2::Result> { /// This is our "real" main top-level function but we have some decoration we /// need to do in [main], so we name this differently. -#[tracing::instrument] +#[tracing::instrument(level = "debug", skip_all)] pub async fn braid_offline_retrack(opt: Cli) -> anyhow::Result<()> { let data_src = braidz_parser::incremental_parser::IncrementalParser::open(opt.data_src.as_path()) @@ -826,8 +823,6 @@ pub async fn braid_offline_retrack(opt: Cli) -> anyhow::Result<()> { )); } - let rt_handle = tokio::runtime::Handle::current(); - let save_performance_histograms = true; kalmanize( @@ -836,7 +831,6 @@ pub async fn braid_offline_retrack(opt: Cli) -> anyhow::Result<()> { opt.fps.map(|v| NotNan::new(v).unwrap()), tracking_params, opts, - rt_handle, save_performance_histograms, "braid-offline-retrack", opt.no_progress, diff --git a/braid-offline/tests/test-covariance.rs b/braid-offline/tests/test-covariance.rs index 330b0385a..fe8d5f628 100644 --- a/braid-offline/tests/test-covariance.rs +++ b/braid-offline/tests/test-covariance.rs @@ -33,8 +33,6 @@ async fn test_covariance() { let opts = braid_offline::KalmanizeOptions::default(); - let rt_handle = tokio::runtime::Handle::try_current().unwrap(); - let save_performance_histograms = false; braid_offline::kalmanize( @@ -43,7 +41,6 @@ async fn test_covariance() { None, tracking_params, opts, - rt_handle, save_performance_histograms, &format!("{}:{}", file!(), line!()), true, diff --git a/braid-offline/tests/test-new-kalmanize-vs-flydra1.rs b/braid-offline/tests/test-new-kalmanize-vs-flydra1.rs index aff3e7ae5..a458769c9 100644 --- a/braid-offline/tests/test-new-kalmanize-vs-flydra1.rs +++ b/braid-offline/tests/test-new-kalmanize-vs-flydra1.rs @@ -330,7 +330,6 @@ async fn run_test(src: &str, untracked_dir: PathBuf) -> anyhow::Result<()> { let tracking_params = flydra_types::default_tracking_params_full_3d(); println!("tracking with default 3D tracking parameters"); - let rt_handle = tokio::runtime::Handle::current(); let data_src = braidz_parser::incremental_parser::IncrementalParser::open_dir(&untracked_dir) .unwrap_or_else(|_| panic!("While opening dir {}", untracked_dir.display())); let data_src = data_src @@ -345,7 +344,6 @@ async fn run_test(src: &str, untracked_dir: PathBuf) -> anyhow::Result<()> { expected_fps, tracking_params, braid_offline::KalmanizeOptions::default(), - rt_handle, save_performance_histograms, &format!("{}:{}", file!(), line!()), true, diff --git a/braid-offline/tests/test-offline-retrack.rs b/braid-offline/tests/test-offline-retrack.rs index cf325a6f5..d57ffd01d 100644 --- a/braid-offline/tests/test-offline-retrack.rs +++ b/braid-offline/tests/test-offline-retrack.rs @@ -33,8 +33,6 @@ async fn test_min_two_rays_needed() { let opts = braid_offline::KalmanizeOptions::default(); - let rt_handle = tokio::runtime::Handle::try_current().unwrap(); - let save_performance_histograms = true; braid_offline::kalmanize( @@ -43,7 +41,6 @@ async fn test_min_two_rays_needed() { None, tracking_params, opts, - rt_handle, save_performance_histograms, &format!("{}:{}", file!(), line!()), true, diff --git a/braid-process-video/Cargo.toml b/braid-process-video/Cargo.toml index b0c3a1077..40ed0bb68 100644 --- a/braid-process-video/Cargo.toml +++ b/braid-process-video/Cargo.toml @@ -11,7 +11,6 @@ log = "0.4" env_logger = "0.10" tokio = {version="1.17", features=["macros","rt","tracing"]} tokio-stream = "0.1.8" -stream-cancel = "0.8" anyhow = "1" chrono = {version="0.4.23", default-features=false, features=["serde", "clock", "std", "wasmbind"]} serde = {version="1", features=["derive"]} diff --git a/braid-process-video/src/lib.rs b/braid-process-video/src/lib.rs index ba75ccff5..cdac81b14 100644 --- a/braid-process-video/src/lib.rs +++ b/braid-process-video/src/lib.rs @@ -9,7 +9,7 @@ use ordered_float::NotNan; use machine_vision_formats::ImageData; use timestamped_frame::ExtraTimeData; -use flydra_types::{Data2dDistortedRow, RawCamName, RosCamName}; +use flydra_types::{Data2dDistortedRow, RawCamName}; mod peek2; use peek2::Peek2; @@ -170,7 +170,7 @@ fn synchronize_readers_from( struct PerCamRender { best_name: String, - ros_name: Option, + raw_name: RawCamName, frame0_png_buf: flydra_types::PngImageData, width: usize, height: usize, @@ -179,7 +179,7 @@ struct PerCamRender { impl PerCamRender { fn from_reader(cam_id: &CameraIdentifier) -> Self { let best_name = cam_id.best_name(); - let ros_name = cam_id.ros_name().map(RosCamName::new); + let raw_name = RawCamName::new(best_name.clone()); let rdr = match &cam_id { CameraIdentifier::MovieOnly(m) | CameraIdentifier::Both((m, _)) => { @@ -221,7 +221,7 @@ impl PerCamRender { Self { best_name, - ros_name, + raw_name, frame0_png_buf, width, height, @@ -233,9 +233,9 @@ impl PerCamRender { braidz_cam: &BraidzCamId, ) -> Self { let image_sizes = braid_archive.image_sizes.as_ref().unwrap(); - let (width, height) = image_sizes.get(&braidz_cam.ros_cam_name).unwrap(); - let best_name = braidz_cam.ros_cam_name.clone(); // this is the best we can do - let ros_name = Some(RosCamName::new(braidz_cam.ros_cam_name.clone())); + let (width, height) = image_sizes.get(&braidz_cam.cam_id_str).unwrap(); + let best_name = braidz_cam.cam_id_str.clone(); // this is the best we can do + let raw_name = RawCamName::new(best_name.clone()); // generate blank first image of the correct size. let image_data: Vec = vec![0; *width * *height]; @@ -253,7 +253,7 @@ impl PerCamRender { Self { best_name, - ros_name, + raw_name, frame0_png_buf, width: *width, height: *height, @@ -337,15 +337,7 @@ impl CameraIdentifier { .unwrap_or_else(|| m.filename.clone()) }) } - CameraIdentifier::BraidzOnly(b) => b.ros_cam_name.clone(), - } - } - fn ros_name(&self) -> Option { - match self { - CameraIdentifier::MovieOnly(m) => m.ros_name(), - CameraIdentifier::BraidzOnly(b) | CameraIdentifier::Both((_, b)) => { - Some(b.ros_cam_name.clone()) - } + CameraIdentifier::BraidzOnly(b) => b.cam_id_str.clone(), } } fn frame0_time(&self) -> chrono::DateTime { @@ -390,7 +382,7 @@ impl MovieCamId { #[derive(Clone, Debug, PartialEq)] struct BraidzCamId { - ros_cam_name: String, + cam_id_str: String, camn: flydra_types::CamNum, } @@ -513,7 +505,7 @@ pub async fn run_config(cfg: &Valid) -> Result) -> Result { - if Some(braidz_cam_id.ros_cam_name.clone()) == m.ros_name() { + if Some(braidz_cam_id.cam_id_str.clone()) == m.ros_name() { CameraIdentifier::Both((m, braidz_cam_id.clone())) } else { CameraIdentifier::MovieOnly(m) @@ -589,7 +581,7 @@ pub async fn run_config(cfg: &Valid) -> Result { m.ros_name().unwrap() } - CameraIdentifier::BraidzOnly(b) => b.ros_cam_name.clone(), + CameraIdentifier::BraidzOnly(b) => b.cam_id_str.clone(), }) .collect(); @@ -701,8 +693,8 @@ pub async fn run_config(cfg: &Valid) -> Result>(); + .map(|x| RawCamName::new(x.clone())) + .collect::>(); // Initialize outputs let output_storage: Vec> = diff --git a/braid-process-video/src/output_braidz.rs b/braid-process-video/src/output_braidz.rs index 8eeec6f7b..c0fb95fb6 100644 --- a/braid-process-video/src/output_braidz.rs +++ b/braid-process-video/src/output_braidz.rs @@ -1,7 +1,7 @@ use anyhow::Result; use std::collections::{BTreeMap, BTreeSet}; -use flydra_types::{PerCamSaveData, RawCamName, RosCamName}; +use flydra_types::{PerCamSaveData, RawCamName}; use crate::{ config::{BraidRetrackVideoConfig, CameraCalibrationSource, TrackingParametersSource}, @@ -20,7 +20,7 @@ impl BraidStorage { b: &crate::config::BraidzOutputConfig, tracking_parameters: Option, sources: &[crate::CameraSource], - all_expected_cameras: BTreeSet, + all_expected_cameras: BTreeSet, expected_framerate: Option, ) -> Result { let output_braidz_path = std::path::PathBuf::from(&b.filename); @@ -60,14 +60,14 @@ impl BraidStorage { std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); let signal_all_cams_synced = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); - let braidz_per_cam_save_data: BTreeMap = sources + let braidz_per_cam_save_data: BTreeMap = sources .iter() .map(|source| { - let ros_cam_name = source.per_cam_render.ros_name.clone().unwrap(); + let raw_cam_name = source.per_cam_render.raw_name.clone(); let current_image_png = source.per_cam_render.frame0_png_buf.clone(); ( - ros_cam_name, + raw_cam_name, PerCamSaveData { current_image_png, cam_settings_data: None, @@ -84,18 +84,13 @@ impl BraidStorage { signal_all_cams_synced, ); - for ros_cam_name in all_expected_cameras.iter() { - let no_server = flydra_types::StrandCamHttpServerInfo::NoServer; - let orig_cam_name = RawCamName::new(ros_cam_name.to_string()); // this is a lie... - cam_manager.register_new_camera(&orig_cam_name, &no_server, ros_cam_name); + for raw_cam_name in all_expected_cameras.iter() { + let no_server = flydra_types::BuiServerInfo::NoServer; + cam_manager + .register_new_camera(&raw_cam_name, &no_server) + .map_err(|msg| anyhow::anyhow!("Error registering new camera: {msg}"))?; } - // Create `stream_cancel::Valve` for shutting everything down. Note this is - // `Clone`, so we can (and should) shut down everything with it. Here we let - // _quit_trigger drop when it goes out of scope. This is due to use in this - // offline context. - let (_quit_trigger, valve) = stream_cancel::Valve::new(); - let (frame_data_tx, frame_data_rx) = tokio::sync::mpsc::channel(10); let frame_data_rx = tokio_stream::wrappers::ReceiverStream::new(frame_data_rx); let save_empty_data2d = true; @@ -107,11 +102,9 @@ impl BraidStorage { ignore_latency, mini_arena_debug_image_dir: None, }, - tokio::runtime::Handle::current(), cam_manager.clone(), recon.clone(), flydra2::BraidMetadataBuilder::saving_program_name("braid-process-video"), - valve, )?; let save_cfg = flydra2::StartSavingCsvConfig { @@ -146,8 +139,8 @@ impl BraidStorage { all_cam_render_data: &[PerCamRenderFrame<'_>], ) -> Result<()> { for cam_render_data in all_cam_render_data.iter() { - let ros_cam_name = cam_render_data.p.ros_name.clone().unwrap(); - let cam_num = self.cam_manager.cam_num(&ros_cam_name).unwrap(); + let raw_cam_name = cam_render_data.p.raw_name.clone(); + let cam_num = self.cam_manager.cam_num(&raw_cam_name).unwrap(); let trigger_timestamp = synced_data .braidz_info @@ -155,7 +148,7 @@ impl BraidStorage { .and_then(|bi| bi.trigger_timestamp.clone()); let frame_data = flydra2::FrameData::new( - ros_cam_name, + raw_cam_name, cam_num, flydra_types::SyncFno(out_fno.try_into().unwrap()), trigger_timestamp, diff --git a/braid/Cargo.toml b/braid/Cargo.toml index 2c90d9a66..d8bdf7d28 100644 --- a/braid/Cargo.toml +++ b/braid/Cargo.toml @@ -20,3 +20,5 @@ flydra-types = {path="../flydra-types"} flydra-feature-detector-types = {path = "../flydra-feature-detector/flydra-feature-detector-types"} flydra-pt-detect-cfg = {path = "../flydra-feature-detector/flydra-pt-detect-cfg"} braid-config-data = {path = "../braid-config-data"} +tracing-subscriber = "0.3.18" +tracing = "0.1.40" diff --git a/braid/braid-run/Cargo.toml b/braid/braid-run/Cargo.toml index 7d9843bda..159e4fcc2 100644 --- a/braid/braid-run/Cargo.toml +++ b/braid/braid-run/Cargo.toml @@ -12,8 +12,6 @@ build-util = {path="../../build-util"} [dependencies] -log = { version = "0.4", features = ["release_max_level_debug"] } -env_logger = "0.10" configure = "0.1.1" thiserror = "1.0.33" anyhow = "1.0" @@ -22,7 +20,6 @@ serde = "1.0" serde_json = "1.0" toml = "0.5" regex = "1.0" -url = "2" braid-triggerbox = "0.4.1" chrono = {version="0.4.23", default-features=false, features=["clock", "std", "wasmbind"]} futures = "0.3" @@ -32,42 +29,49 @@ tokio-stream = "0.1.9" stream-cancel = "0.8" bytes = "1.0" clap = { version = "4.3.4", features = ["derive"] } +preferences-serde1 = "2.0.0" qrcodegen = "1.4" image = "0.24.2" hyper = "1.1" lazy_static = "1.4" -ctrlc = { version = "3.1.3", features = ["termination"] } csv = "1.1" http-body-util = "0.1.0" http = "1.0.0" - -includedir = { version = "0.6", optional = true } -phf = { version = "0.8", optional = true } -async-change-tracker = "0.3" -bui-backend-types = "0.8" -bui-backend = {version="0.15", default-features = false} +async-change-tracker = "0.3.4" +tracing = "0.1.40" +axum = "0.7.4" +tower = "0.4.13" +cookie = "0.18.0" +tower-http = { version = "0.5.1", features = ["fs", "trace"] } +tower-serve-static = { version = "0.1", optional = true } +include_dir = { version = "0.7.3", optional = true } +axum-token-auth = "0.1.0" +http-body = "1.0.0" +base64 = "0.12" braid = {path=".."} braid-config-data = {path="../../braid-config-data"} -flydra-pt-detect-cfg = {path = "../../flydra-feature-detector/flydra-pt-detect-cfg"} -strand-cam-storetype = {path = "../../strand-cam-storetype"} -flydra-types = {path="../../flydra-types", features=["with-dns"]} -flydra2 = {path="../../flydra2", default-features = false, features=["braid"]} -rust-cam-bui-types = {path="../../rust-cam-bui-types"} -mvg = {path="../../mvg"} -flydra-mvg = {path="../../flydra-mvg"} +bui-backend-session-types = { path = "../../bui-backend-session/types" } bui-backend-session = {path="../../bui-backend-session"} -flydra-feature-detector-types = {path = "../../flydra-feature-detector/flydra-feature-detector-types", default-features = false} ci2-remote-control = {path = "../../ci2-remote-control"} datetime-conversion = {path = "../../datetime-conversion"} +event-stream-types = { path = "../../event-stream-types" } +flydra-feature-detector-types = {path = "../../flydra-feature-detector/flydra-feature-detector-types", default-features = false} +flydra-mvg = {path="../../flydra-mvg"} +flydra-pt-detect-cfg = {path = "../../flydra-feature-detector/flydra-pt-detect-cfg"} +flydra-types = { path="../../flydra-types", features = [ "start-listener", "build-urls" ] } +flydra2 = {path="../../flydra2", default-features = false, features=["braid"]} +mvg = {path="../../mvg"} +rust-cam-bui-types = {path="../../rust-cam-bui-types"} +strand-cam-storetype = {path = "../../strand-cam-storetype"} [features] default = ["bundle_files"] deadlock_detection = ["parking_lot/deadlock_detection"] -# BUI frontend -bundle_files = ["flydra2/bundle_files", "bui-backend/bundle_files", "build-util/bundle_files", "includedir", "phf"] -serve_files = ["flydra2/serve_files", "bui-backend/serve_files", "build-util/serve_files"] +# BUI frontend. must pick one of the following two: +bundle_files = ["flydra2/bundle_files", "tower-serve-static", "include_dir"] +serve_files = ["flydra2/serve_files"] backtrace = ["flydra2/backtrace", "mvg/backtrace", "flydra-mvg/backtrace"] diff --git a/braid/braid-run/braid_frontend/src/lib.rs b/braid/braid-run/braid_frontend/src/lib.rs index 026099b83..154fb9ebc 100644 --- a/braid/braid-run/braid_frontend/src/lib.rs +++ b/braid/braid-run/braid_frontend/src/lib.rs @@ -11,7 +11,7 @@ use wasm_bindgen::{JsCast, JsValue}; use wasm_bindgen_futures::JsFuture; use web_sys::{Event, EventSource, MessageEvent}; -use flydra_types::{CamInfo, HttpApiCallback, HttpApiShared, StrandCamHttpServerInfo}; +use flydra_types::{BraidHttpApiCallback, BraidHttpApiSharedState, BuiServerInfo, CamInfo}; use rust_cam_bui_types::{ClockModel, RecordingPath}; use yew::prelude::*; @@ -41,7 +41,7 @@ impl std::fmt::Display for MyError { // Model struct Model { - shared: Option, + shared: Option, es: EventSource, fail_msg: String, html_page_title: Option, @@ -54,7 +54,7 @@ struct Model { // ----------------------------------------------------------------------------- enum Msg { - NewServerState(HttpApiShared), + NewServerState(BraidHttpApiSharedState), FailedDecode(serde_json::Error), DoRecordCsvTables(bool), DoRecordMp4Files(bool), @@ -165,7 +165,7 @@ impl Component for Model { } Msg::DoRecordCsvTables(val) => { ctx.link().send_future(async move { - match post_callback(&HttpApiCallback::DoRecordCsvTables(val)).await { + match post_callback(&BraidHttpApiCallback::DoRecordCsvTables(val)).await { Ok(()) => Msg::SendMessageFetchState(FetchState::Success), Err(err) => Msg::SendMessageFetchState(FetchState::Failed(err)), } @@ -175,13 +175,14 @@ impl Component for Model { return false; // Don't update DOM, do that when backend notifies us of new state. } Msg::DoRecordMp4Files(val) => { - return self.send_to_all_cams(&ctx, HttpApiCallback::DoRecordMp4Files(val)); + return self.send_to_all_cams(&ctx, BraidHttpApiCallback::DoRecordMp4Files(val)); } Msg::SetPostTriggerBufferSize(val) => { - return self.send_to_all_cams(&ctx, HttpApiCallback::SetPostTriggerBufferSize(val)); + return self + .send_to_all_cams(&ctx, BraidHttpApiCallback::SetPostTriggerBufferSize(val)); } Msg::PostTriggerMp4Recording => { - return self.send_to_all_cams(&ctx, HttpApiCallback::PostTriggerMp4Recording); + return self.send_to_all_cams(&ctx, BraidHttpApiCallback::PostTriggerMp4Recording); } } true @@ -215,7 +216,7 @@ impl Component for Model { // View impl Model { - fn send_to_all_cams(&mut self, ctx: &Context, msg: HttpApiCallback) -> bool { + fn send_to_all_cams(&mut self, ctx: &Context, msg: BraidHttpApiCallback) -> bool { ctx.link().send_future(async move { match post_callback(&msg).await { Ok(()) => Msg::SendMessageFetchState(FetchState::Success), @@ -382,9 +383,15 @@ fn view_cam_list(cams: &Vec) -> Html { let all_rendered: Vec = cams .iter() .map(|cci| { - let cam_url = match cci.http_camserver_info { - StrandCamHttpServerInfo::NoServer => "http://127.0.0.1/notexist".to_string(), - StrandCamHttpServerInfo::Server(ref details) => details.guess_base_url_with_token(), + let cam_url = match cci.strand_cam_http_server_info { + BuiServerInfo::NoServer => "/does-not-exist".to_string(), + BuiServerInfo::Server(_) => { + format!( + "/{}/{}/", + flydra_types::braid_http::CAM_PROXY_PATH, + flydra_types::braid_http::encode_cam_name(&cci.name) + ) + } }; let state = format!("{:?}", cci.state); let stats = format!("{:?}", cci.recent_stats); @@ -442,16 +449,18 @@ fn view_model_server_link(opt_addr: &Option) -> Html { // ----------------------------------------------------------------------------- -async fn post_callback(msg: &HttpApiCallback) -> Result<(), FetchError> { +async fn post_callback(msg: &BraidHttpApiCallback) -> Result<(), FetchError> { use web_sys::{Request, RequestInit, Response}; let mut opts = RequestInit::new(); opts.method("POST"); opts.cache(web_sys::RequestCache::NoStore); - // opts.mode(web_sys::RequestMode::Cors); - // opts.headers("Content-Type", "application/json;charset=UTF-8") - // set SameOrigin let buf = serde_json::to_string(&msg).unwrap_throw(); opts.body(Some(&JsValue::from_str(&buf))); + let headers = web_sys::Headers::new().unwrap_throw(); + headers + .append("Content-Type", "application/json") + .unwrap_throw(); + opts.headers(&headers); let url = "callback"; let request = Request::new_with_str_and_init(url, &opts)?; diff --git a/braid/braid-run/build.rs b/braid/braid-run/build.rs index 606ac29b7..6d2c3078b 100644 --- a/braid/braid-run/build.rs +++ b/braid/braid-run/build.rs @@ -1,20 +1,24 @@ fn main() -> Result<(), Box> { build_util::git_hash(env!("CARGO_PKG_VERSION"))?; + #[cfg(feature = "bundle_files")] let frontend_dir = std::path::PathBuf::from("braid_frontend"); + #[cfg(feature = "bundle_files")] let frontend_pkg_dir = frontend_dir.join("pkg"); #[cfg(feature = "bundle_files")] - if !frontend_pkg_dir.join("braid_frontend.js").exists() { - return Err(format!( - "The frontend is required but not built. Hint: go to {} and \ - run `build.sh` (or on Windows, `build.bat`).", - frontend_dir.display() - ) - .into()); + { + for path in ["braid_frontend.js", "index.html"] { + if !frontend_pkg_dir.join(path).exists() { + return Err(format!( + "The frontend is required but not built. Hint: go to {} and \ + run `build.sh` (or on Windows, `build.bat`).", + frontend_dir.display() + ) + .into()); + } + } } - build_util::bui_backend_generate_code(&frontend_pkg_dir, "mainbrain_frontend.rs")?; - Ok(()) } diff --git a/braid/braid-run/src/callback_handling.rs b/braid/braid-run/src/callback_handling.rs new file mode 100644 index 000000000..1e030fbfc --- /dev/null +++ b/braid/braid-run/src/callback_handling.rs @@ -0,0 +1,168 @@ +use axum::response::IntoResponse; +use tracing::debug; + +use event_stream_types::TolerantJson; +use flydra_types::{BraidHttpApiCallback, PerCamSaveData}; +use http::StatusCode; +use rust_cam_bui_types::RecordingPath; + +use crate::mainbrain::*; + +fn start_saving_mp4s_all_cams(app_state: &BraidAppState, start_saving: bool) { + let mut tracker = app_state.shared_store.write(); + tracker.modify(|store| { + if start_saving { + store.fake_mp4_recording_path = Some(RecordingPath::new("".to_string())); + } else { + store.fake_mp4_recording_path = None; + } + }); +} + +pub(crate) async fn callback_handler( + axum::extract::State(app_state): axum::extract::State, + _session_key: axum_token_auth::SessionKey, + TolerantJson(payload): TolerantJson, +) -> impl IntoResponse { + let fut = async { + use BraidHttpApiCallback::*; + match payload { + NewCamera(cam_info) => { + debug!("got NewCamera {:?}", cam_info.raw_cam_name.as_str()); + let http_camserver_info = cam_info.http_camserver_info.unwrap(); + let cam_settings_data = cam_info.cam_settings_data.unwrap(); + let mut cam_manager3 = app_state.cam_manager.clone(); + cam_manager3 + .register_new_camera(&cam_info.raw_cam_name, &http_camserver_info) + .map_err(|msg| (StatusCode::BAD_REQUEST, msg))?; + + let mut current_cam_data = app_state.per_cam_data_arc.write(); + if current_cam_data + .insert( + cam_info.raw_cam_name.clone(), + PerCamSaveData { + cam_settings_data: Some(cam_settings_data), + feature_detect_settings: None, + current_image_png: cam_info.current_image_png, + }, + ) + .is_some() + { + panic!("camera {} already known", cam_info.ros_cam_name.as_str()); + } + } + UpdateCurrentImage(image_info) => { + // new image from camera + debug!( + "got new image for camera \"{}\"", + image_info.raw_cam_name.as_str() + ); + let mut current_cam_data = app_state.per_cam_data_arc.write(); + current_cam_data + .get_mut(&image_info.raw_cam_name) + .unwrap() + .current_image_png = image_info.inner.current_image_png; + } + UpdateCamSettings(cam_settings) => { + let mut current_cam_data = app_state.per_cam_data_arc.write(); + current_cam_data + .get_mut(&cam_settings.raw_cam_name) + .unwrap() + .cam_settings_data = Some(cam_settings.inner); + } + UpdateFeatureDetectSettings(feature_detect_settings) => { + let mut current_cam_data = app_state.per_cam_data_arc.write(); + current_cam_data + .get_mut(&feature_detect_settings.raw_cam_name) + .unwrap() + .feature_detect_settings = Some(feature_detect_settings.inner); + } + DoRecordCsvTables(value) => { + debug!("got DoRecordCsvTables({})", value); + toggle_saving_csv_tables( + value, + app_state.expected_framerate_arc.clone(), + app_state.output_base_dirname.clone(), + app_state.braidz_write_tx_weak.clone(), + app_state.per_cam_data_arc.clone(), + app_state.shared_store.clone(), + ) + .await; + } + DoRecordMp4Files(start_saving) => { + debug!("got DoRecordMp4Files({start_saving})"); + + app_state + .strand_cam_http_session_handler + .toggle_saving_mp4_files_all(start_saving) + .await + .map_err(|_e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "toggle_saving_mp4_files_all failed", + ) + })?; + + start_saving_mp4s_all_cams(&app_state, start_saving); + } + SetExperimentUuid(value) => { + debug!("got SetExperimentUuid({})", value); + if let Some(braidz_write_tx) = app_state.braidz_write_tx_weak.upgrade() { + // `braidz_write_tx` will be dropped after this scope. + braidz_write_tx + .send(flydra2::SaveToDiskMsg::SetExperimentUuid(value)) + .await + .unwrap(); + } + } + SetPostTriggerBufferSize(val) => { + debug!("got SetPostTriggerBufferSize({val})"); + + app_state + .strand_cam_http_session_handler + .set_post_trigger_buffer_all(val) + .await + .map_err(|_e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "set_post_trigger_buffer_all failed", + ) + })?; + + { + let mut tracker = app_state.shared_store.write(); + tracker.modify(|store| { + store.post_trigger_buffer_size = val; + }); + } + } + PostTriggerMp4Recording => { + debug!("got PostTriggerMp4Recording"); + + let is_saving = { + let tracker = app_state.shared_store.read(); + (*tracker).as_ref().fake_mp4_recording_path.is_some() + }; + + if !is_saving { + app_state + .strand_cam_http_session_handler + .initiate_post_trigger_mp4_all() + .await + .map_err(|_e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "initiate_post_trigger_mp4_all failed", + ) + })?; + + start_saving_mp4s_all_cams(&app_state, true); + } else { + debug!("Already saving, not initiating again."); + } + } + } + Ok::<_, (StatusCode, &'static str)>(()) + }; + fut.await +} diff --git a/braid/braid-run/src/main.rs b/braid/braid-run/src/main.rs index f462d3db9..0caf7303c 100644 --- a/braid/braid-run/src/main.rs +++ b/braid/braid-run/src/main.rs @@ -1,18 +1,16 @@ #![cfg_attr(feature = "backtrace", feature(error_generic_member_access))] -use clap::Parser; - -#[macro_use] -extern crate log; - use anyhow::Result; - -use flydra_types::{MainbrainBuiLocation, RawCamName, StartCameraBackend, TriggerType}; +use clap::Parser; +use tracing::debug; use braid::braid_start; use braid_config_data::parse_config_file; -use flydra_types::BraidCameraConfig; +use flydra_types::{ + BraidCameraConfig, MainbrainBuiLocation, RawCamName, StartCameraBackend, TriggerType, +}; +mod callback_handling; mod mainbrain; mod multicam_http_session_handler; @@ -23,9 +21,40 @@ struct BraidRunCliArgs { config_file: std::path::PathBuf, } +fn compute_strand_cam_args( + camera: &BraidCameraConfig, + mainbrain_internal_addr: &MainbrainBuiLocation, +) -> Result> { + let urls = mainbrain_internal_addr.0.build_urls()?; + let mut best_url = None; + for url in urls.iter() { + if !flydra_types::is_loopback(url) { + best_url = Some(url); + break; + } + } + if best_url.is_none() { + if urls.is_empty() { + anyhow::bail!( + "could not find a single URL {:?}", + mainbrain_internal_addr.0 + ); + } + best_url = Some(&urls[0]); + } + let url = best_url.unwrap(); + let url_string = format!("{url}"); + Ok(vec![ + "--camera-name".into(), + camera.name.clone(), + "--braid-url".into(), + url_string, + ]) +} + fn launch_strand_cam( - camera: BraidCameraConfig, - mainbrain_internal_addr: MainbrainBuiLocation, + camera: &BraidCameraConfig, + mainbrain_internal_addr: &MainbrainBuiLocation, ) -> Result<()> { use anyhow::Context; @@ -33,8 +62,6 @@ fn launch_strand_cam( // [flydra_types::RemoteCameraInfoResponse] and thus we do not need to // provide much info. - let base_url = mainbrain_internal_addr.0.base_url(); - let braid_run_exe = std::env::current_exe().unwrap(); let exe_dir = braid_run_exe .parent() @@ -51,7 +78,8 @@ fn launch_strand_cam( debug!("strand cam executable name: \"{}\"", exe.display()); let mut exec = std::process::Command::new(&exe); - exec.args(["--camera-name", &camera.name, "--braid_addr", &base_url]); + let args = compute_strand_cam_args(camera, mainbrain_internal_addr)?; + exec.args(&args); debug!("exec: {:?}", exec); let mut obj = exec.spawn().context(format!( "Starting Strand Cam executable \"{}\"", @@ -67,7 +95,8 @@ fn launch_strand_cam( Ok(()) } -fn main() -> Result<()> { +#[tokio::main] +async fn main() -> Result<()> { braid_start("run")?; let args = BraidRunCliArgs::parse(); @@ -76,23 +105,13 @@ fn main() -> Result<()> { let cfg = parse_config_file(&args.config_file)?; debug!("{:?}", cfg); - let n_local_cameras = cfg - .cameras - .iter() - .filter(|c| c.start_backend != StartCameraBackend::Remote) - .count(); - - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .worker_threads(4 + 4 * n_local_cameras) - .thread_name("braid-runtime") - .thread_stack_size(3 * 1024 * 1024) - .build()?; - let camera_configs = cfg .cameras .iter() - .map(|cfg| (cfg.name.clone(), cfg.clone())) + .map(|cfg| { + let raw_cam_name = RawCamName::new(cfg.name.to_string()); + (raw_cam_name, cfg.clone()) + }) .collect(); let trig_cfg = cfg.trigger; @@ -106,42 +125,31 @@ fn main() -> Result<()> { }; let show_tracking_params = false; - let handle = runtime.handle().clone(); + // let handle = runtime.handle().clone(); let all_expected_cameras = cfg .cameras .iter() - .map(|x| RawCamName::new(x.name.clone()).to_ros()) + .map(|x| RawCamName::new(x.name.clone())) .collect(); - let phase1 = runtime.block_on(mainbrain::pre_run( - &handle, - show_tracking_params, - // Raising the mainbrain thread priority is currently disabled. - // cfg.mainbrain.sched_policy_priority, - camera_configs, - trig_cfg, - &cfg.mainbrain, - cfg.mainbrain - .jwt_secret - .as_ref() - .map(|x| x.as_bytes().to_vec()), - all_expected_cameras, - force_camera_sync_mode, - software_limit_framerate.clone(), - "braid", - ))?; - let mainbrain_server_info = MainbrainBuiLocation(phase1.mainbrain_server_info.clone()); + let address_string: String = cfg.mainbrain.http_api_server_addr.clone(); + let (listener, mainbrain_server_info) = flydra_types::start_listener(&address_string).await?; + let mainbrain_internal_addr = MainbrainBuiLocation(mainbrain_server_info.clone()); let cfg_cameras = cfg.cameras; - - let _enter_guard = runtime.enter(); let _strand_cams = cfg_cameras .into_iter() .filter_map(|camera| { if camera.start_backend != StartCameraBackend::Remote { - Some(launch_strand_cam(camera, mainbrain_server_info.clone())) + Some(launch_strand_cam(&camera, &mainbrain_internal_addr)) } else { - log::info!("Not starting remote camera \"{}\"", camera.name); + tracing::info!( + "Not starting remote camera \"{}\". Use args: {}", + camera.name, + compute_strand_cam_args(&camera, &mainbrain_internal_addr) + .unwrap() + .join(" ") + ); None } }) @@ -149,10 +157,25 @@ fn main() -> Result<()> { debug!("done launching cameras"); - // This runs the whole thing and blocks. - runtime.block_on(mainbrain::run(phase1))?; + let secret_base64 = cfg.mainbrain.secret_base64.as_ref().map(Clone::clone); - // Now wait for everything to end.. + // This runs the whole thing and "blocks". Now wait for everything to end. + mainbrain::do_run_forever( + show_tracking_params, + // Raising the mainbrain thread priority is currently disabled. + // cfg.mainbrain.sched_policy_priority, + camera_configs, + trig_cfg, + cfg.mainbrain, + secret_base64, + all_expected_cameras, + force_camera_sync_mode, + software_limit_framerate.clone(), + "braid", + listener, + mainbrain_server_info, + ) + .await?; debug!("done {}:{}", file!(), line!()); diff --git a/braid/braid-run/src/mainbrain.rs b/braid/braid-run/src/mainbrain.rs index 41c0a649f..c0111e2d2 100644 --- a/braid/braid-run/src/mainbrain.rs +++ b/braid/braid-run/src/mainbrain.rs @@ -1,60 +1,63 @@ -use std::collections::BTreeMap; -use std::net::SocketAddr; -use std::sync::{ - atomic::{AtomicBool, Ordering}, - Arc, +use std::{ + collections::BTreeMap, + net::SocketAddr, + path::PathBuf, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, }; -use std::{error::Error as StdError, future::Future, pin::Pin}; - -use parking_lot::RwLock; - -use tokio::net::UdpSocket; -use tokio_util::udp::UdpFramed; - use async_change_tracker::ChangeTracker; -use bui_backend_types::CallbackDataAndSession; - -use bui_backend::{ - highlevel::{create_bui_app_inner, BuiAppInner}, - AccessControl, CallbackHandler, +use axum::{ + extract::{Path, State}, + routing::get, }; +use futures::StreamExt; +use http::{HeaderValue, StatusCode}; +use parking_lot::RwLock; +use preferences_serde1::{AppInfo, Preferences}; +use serde::Serialize; +use tokio::net::UdpSocket; +use tower_http::trace::TraceLayer; +use tracing::{debug, error, info}; +use bui_backend_session_types::AccessToken; +use event_stream_types::{AcceptsEventStream, EventBroadcaster}; use flydra2::{CoordProcessor, CoordProcessorConfig, FrameDataAndPoints, MyFloat, StreamItem}; use flydra_types::{ - CamInfo, CborPacketCodec, FlydraFloatTimestampLocal, HttpApiCallback, HttpApiShared, - PerCamSaveData, RosCamName, StrandCamBuiServerInfo, SyncFno, TriggerType, Triggerbox, + braid_http::{CAM_PROXY_PATH, REMOTE_CAMERA_INFO_PATH}, + BraidHttpApiSharedState, BuiServerAddrInfo, CamInfo, CborPacketCodec, + FlydraFloatTimestampLocal, HostClock, PerCamSaveData, RawCamName, SyncFno, TriggerType, + Triggerbox, BRAID_EVENTS_URL_PATH, BRAID_EVENT_NAME, }; +use rust_cam_bui_types::{ClockModel, RecordingPath}; -use futures::StreamExt; -use rust_cam_bui_types::ClockModel; -use rust_cam_bui_types::RecordingPath; +use anyhow::Result; -pub use crate::multicam_http_session_handler::StrandCamHttpSessionHandler; +use crate::multicam_http_session_handler::{MaybeSession, StrandCamHttpSessionHandler}; -lazy_static::lazy_static! { - static ref EVENTS_PREFIX: String = format!("/{}", flydra_types::BRAID_EVENTS_URL_PATH); -} +pub(crate) type BoxedStdError = Box; -pub(crate) mod from_bui_backend { - // Include the files to be served and define `fn get_default_config()` and `Config`. - include!(concat!(env!("OUT_DIR"), "/mainbrain_frontend.rs")); // Despite slash, this works on Windows. +#[cfg(feature = "bundle_files")] +static ASSETS_DIR: include_dir::Dir<'static> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/braid_frontend/pkg"); - pub(crate) fn get_bui_backend_config() -> Config { - get_default_config() - } +lazy_static::lazy_static! { + static ref EVENTS_PREFIX: String = format!("/{}", BRAID_EVENTS_URL_PATH); } -use anyhow::Result; +pub const APP_INFO: AppInfo = AppInfo { + name: "braid", + author: "AndrewStraw", +}; +const COOKIE_SECRET_KEY: &str = "cookie-secret-base64"; const SYNCHRONIZE_DURATION_SEC: u8 = 3; -const JSON_TYPE: &str = "application/json"; -const EMPTY_JSON_BUF: &[u8] = b"{}"; +type SharedStore = Arc>>; #[derive(thiserror::Error, Debug)] pub(crate) enum MainbrainError { - #[error("The `jwt_secret` configuration variable must be set.")] - JwtError, #[error("{source}")] HyperError { #[from] @@ -73,335 +76,302 @@ pub(crate) enum MainbrainError { pub(crate) type MainbrainResult = std::result::Result; -/// When dropped, send a message. This is used to shutdown the HTTP listener. -struct DropSend(Option>); +/// The structure that holds our app data +#[derive(Clone)] +pub(crate) struct BraidAppState { + pub(crate) shared_store: SharedStore, + public_camdata_addr: String, + force_camera_sync_mode: bool, + software_limit_framerate: flydra_types::StartSoftwareFrameRateLimit, + event_broadcaster: EventBroadcaster, + pub(crate) per_cam_data_arc: Arc>>, + pub(crate) expected_framerate_arc: Arc>>, + camera_configs: BTreeMap, + next_connection_id: Arc>, + pub(crate) strand_cam_http_session_handler: StrandCamHttpSessionHandler, + pub(crate) cam_manager: flydra2::ConnectedCamerasManager, + pub(crate) output_base_dirname: PathBuf, + pub(crate) braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, +} + +async fn events_handler( + State(app_state): State, + _session_key: axum_token_auth::SessionKey, + _: AcceptsEventStream, +) -> impl axum::response::IntoResponse { + let key = { + let mut next_connection_id = app_state.next_connection_id.write(); + let key = *next_connection_id; + *next_connection_id += 1; + key + }; + let (tx, body) = app_state.event_broadcaster.new_connection(key); -impl Drop for DropSend { - fn drop(&mut self) { - if let Some(shutdown_tx) = self.0.take() { - match shutdown_tx.send(()) { - Ok(()) => {} - Err(_) => { - error!("DropSend::drop failed"); - } + // Send an initial copy of our state. + { + let current_state = app_state.shared_store.read().as_ref().clone(); + let frame_string = to_event_frame(¤t_state); + match tx + .send(Ok(http_body::Frame::data(frame_string.into()))) + .await + { + Ok(()) => {} + Err(_) => { + // The receiver was dropped because the connection closed. Should probably do more here. + tracing::debug!("initial send error"); } } } -} -/// The structure that holds our app data -struct HttpApiApp { - inner: BuiAppInner, - time_model_arc: Arc>>, - triggerbox_cmd: Option>, - sync_pulse_pause_started_arc: Arc>>, - expected_framerate_arc: Arc>>, - /// Sender which fires to shutdown the HTTP server upon drop. - _shutdown_tx: DropSend, + body } -#[derive(Clone)] -struct MyCallbackHandler { - cam_manager: flydra2::ConnectedCamerasManager, - per_cam_data_arc: Arc>>, - expected_framerate_arc: Arc>>, - braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, - output_base_dirname: std::path::PathBuf, - shared_data: Arc>>, - strand_cam_http_session_handler: StrandCamHttpSessionHandler, +async fn handle_auth_error(err: tower::BoxError) -> (StatusCode, &'static str) { + match err.downcast::() { + Ok(err) => { + tracing::error!( + "Validation error(s): {:?}", + err.errors().collect::>() + ); + (StatusCode::UNAUTHORIZED, "Request is not authorized") + } + Err(orig_err) => { + tracing::error!("Unhandled internal error: {orig_err}"); + (StatusCode::INTERNAL_SERVER_ERROR, "internal server error") + } + } } -impl MyCallbackHandler { - fn start_saving_mp4s_all_cams(&self, start_saving: bool) { - let mut tracker = self.shared_data.write(); - tracker.modify(|store| { - if start_saving { - store.fake_mp4_recording_path = Some(RecordingPath::new("".to_string())); - } else { - store.fake_mp4_recording_path = None; - } - }); +/// Compute public UDP IP:port given the socket address which was opened. +fn compute_public_camdata_addr(camdata_addr: SocketAddr) -> String { + let all_addrs = flydra_types::expand_unspecified_addr(camdata_addr).unwrap(); + let non_loopback_addrs: Vec<_> = all_addrs + .iter() + .filter(|addr| !addr.ip().is_loopback()) + .collect(); + if !non_loopback_addrs.is_empty() { + // Take first non-loopback address if available. + non_loopback_addrs.first().unwrap().to_string() + } else { + // Otherwise we are serving only on loopback, so serve that. + all_addrs.first().unwrap().to_string() } } -impl CallbackHandler for MyCallbackHandler { - type Data = HttpApiCallback; - - /// HTTP request to "/callback" has been made with payload which as been - /// deserialized into `Self::Data` and session data stored in - /// [CallbackDataAndSession]. - fn call<'a>( - &'a self, - data_sess: CallbackDataAndSession, - ) -> Pin>> + Send + 'a>> { - let payload = data_sess.payload; - - let fut = async { - use HttpApiCallback::*; - match payload { - NewCamera(cam_info) => { - debug!("got NewCamera {:?}", cam_info); - let http_camserver_info = cam_info.http_camserver_info.unwrap(); - let cam_settings_data = cam_info.cam_settings_data.unwrap(); - let mut cam_manager3 = self.cam_manager.clone(); - cam_manager3.register_new_camera( - &cam_info.orig_cam_name, - &http_camserver_info, - &cam_info.ros_cam_name, - ); - - let mut current_cam_data = self.per_cam_data_arc.write(); - if current_cam_data - .insert( - cam_info.ros_cam_name.clone(), - PerCamSaveData { - cam_settings_data: Some(cam_settings_data), - feature_detect_settings: None, - current_image_png: cam_info.current_image_png, - }, - ) - .is_some() - { - panic!("camera {} already known", cam_info.ros_cam_name.as_str()); - } - } - UpdateCurrentImage(image_info) => { - // new image from camera - debug!( - "got new image for camera {}", - image_info.ros_cam_name.as_str() - ); - let mut current_cam_data = self.per_cam_data_arc.write(); - current_cam_data - .get_mut(&image_info.ros_cam_name) - .unwrap() - .current_image_png = image_info.inner.current_image_png; - } - UpdateCamSettings(cam_settings) => { - let mut current_cam_data = self.per_cam_data_arc.write(); - current_cam_data - .get_mut(&cam_settings.ros_cam_name) - .unwrap() - .cam_settings_data = Some(cam_settings.inner); - } - UpdateFeatureDetectSettings(feature_detect_settings) => { - let mut current_cam_data = self.per_cam_data_arc.write(); - current_cam_data - .get_mut(&feature_detect_settings.ros_cam_name) - .unwrap() - .feature_detect_settings = Some(feature_detect_settings.inner); - } - DoRecordCsvTables(value) => { - debug!("got DoRecordCsvTables({})", value); - toggle_saving_csv_tables( - value, - self.expected_framerate_arc.clone(), - self.output_base_dirname.clone(), - self.braidz_write_tx_weak.clone(), - self.per_cam_data_arc.clone(), - self.shared_data.clone(), - ) - .await; - } - DoRecordMp4Files(start_saving) => { - debug!("got DoRecordMp4Files({start_saving})"); - - self.strand_cam_http_session_handler - .toggle_saving_mp4_files_all(start_saving) - .await?; - - self.start_saving_mp4s_all_cams(start_saving); - } - SetExperimentUuid(value) => { - debug!("got SetExperimentUuid({})", value); - if let Some(braidz_write_tx) = self.braidz_write_tx_weak.upgrade() { - // `braidz_write_tx` will be dropped after this scope. - braidz_write_tx - .send(flydra2::SaveToDiskMsg::SetExperimentUuid(value)) - .await - .unwrap(); - } - } - SetPostTriggerBufferSize(val) => { - debug!("got SetPostTriggerBufferSize({val})"); - - self.strand_cam_http_session_handler - .set_post_trigger_buffer_all(val) - .await?; - - { - let mut tracker = self.shared_data.write(); - tracker.modify(|store| { - store.post_trigger_buffer_size = val; - }); - } - } - PostTriggerMp4Recording => { - debug!("got PostTriggerMp4Recording"); - - let is_saving = { - let tracker = self.shared_data.read(); - (*tracker).as_ref().fake_mp4_recording_path.is_some() - }; - - if !is_saving { - self.strand_cam_http_session_handler - .initiate_post_trigger_mp4_all() - .await?; - - self.start_saving_mp4s_all_cams(true); - } else { - debug!("Already saving, not initiating again."); - } - } - } - Ok::<_, MainbrainError>(()) +/// Query the mainbrain configuration to get data required for camera settings. +/// +/// Note that this does not change the state of the mainbrain to register +/// anything about the camera but only queries for its configuration. +/// Registration of a new camera is done by +/// [flydra_types::BraidHttpApiCallback::NewCamera]. +async fn remote_camera_info_handler( + State(app_state): State, + _session_key: axum_token_auth::SessionKey, + Path(raw_cam_name): Path, +) -> impl axum::response::IntoResponse { + let cam_cfg = app_state + .camera_configs + .get(&RawCamName::new(raw_cam_name.clone())); + + if let Some(config) = cam_cfg { + let software_limit_framerate = app_state.software_limit_framerate.clone(); + + let msg = flydra_types::RemoteCameraInfoResponse { + camdata_addr: app_state.public_camdata_addr, + config: config.clone(), + force_camera_sync_mode: app_state.force_camera_sync_mode, + software_limit_framerate, }; - Box::pin(async { - match fut.await { - Ok(()) => Ok(()), - Err(e) => { - let e: Box = Box::new(e); - Err(e) - } + Ok(axum::Json(msg)) + } else { + error!("HTTP camera not found: \"{raw_cam_name:?}\""); + Err(( + StatusCode::NOT_FOUND, + format!("Camera \"{raw_cam_name}\" not found."), + )) + } +} + +async fn cam_proxy_handler_inner( + app_state: BraidAppState, + raw_cam_name: String, + cam_path: String, + req: axum::extract::Request, +) -> impl axum::response::IntoResponse { + tracing::debug!("raw_cam_name: {raw_cam_name}, cam_path: \"{cam_path}\", req: {req:?}"); + let accepts: Vec = req + .headers() + .get_all(http::header::ACCEPT) + .iter() + .map(Clone::clone) + .collect(); + let cam_name = RawCamName::new(raw_cam_name); + let session = { + app_state + .strand_cam_http_session_handler + .name_to_session + .read() + .get(&cam_name) + .and_then(|maybe_session| match maybe_session { + MaybeSession::Alive(inner) => Some(inner.clone()), + MaybeSession::Errored => None, + }) + }; + tracing::debug!("Will request path \"{cam_path}\". Got session {session:?}."); + match session { + Some(mut session) => match session + .req_accepts(&cam_path, &accepts, req.method().clone(), req.into_body()) + .await + { + Ok(response) => Ok(response), + Err(e) => { + tracing::error!("error calling Strand Camera HTTP server: {e} {e:?}"); + Err(( + StatusCode::BAD_REQUEST, + "something went wrong. (Check the server logs.)".to_string(), + )) } - }) + }, + None => Err(( + StatusCode::NOT_FOUND, + format!("camera name \"{}\" not found.", cam_name.as_str()), + )), } } -pub(crate) type MyBody = http_body_util::combinators::BoxBody; +async fn cam_proxy_handler_root( + State(app_state): State, + _session_key: axum_token_auth::SessionKey, + Path(raw_cam_name): Path, + req: axum::extract::Request, +) -> impl axum::response::IntoResponse { + cam_proxy_handler_inner(app_state, raw_cam_name, "".into(), req).await +} -pub(crate) fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - use http_body_util::BodyExt; - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) +async fn cam_proxy_handler( + State(app_state): State, + _session_key: axum_token_auth::SessionKey, + Path((raw_cam_name, cam_path)): Path<(String, String)>, + req: axum::extract::Request, +) -> impl axum::response::IntoResponse { + cam_proxy_handler_inner(app_state, raw_cam_name, cam_path, req).await } async fn launch_braid_http_backend( - auth: AccessControl, - cam_manager: flydra2::ConnectedCamerasManager, - shared: HttpApiShared, - bui_backend_config: bui_backend::lowlevel::Config, - camdata_addr: String, - camera_configs: BTreeMap, - time_model_arc: Arc>>, - triggerbox_cmd: Option>, - sync_pulse_pause_started_arc: Arc>>, - expected_framerate_arc: Arc>>, - output_base_dirname: std::path::PathBuf, - braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, - per_cam_data_arc: Arc>>, - force_camera_sync_mode: bool, - software_limit_framerate: flydra_types::StartSoftwareFrameRateLimit, - strand_cam_http_session_handler: StrandCamHttpSessionHandler, -) -> Result { - // Create our shared state. - let shared_store = Arc::new(RwLock::new(ChangeTracker::new(shared))); + secret_base64: Option, + listener: tokio::net::TcpListener, + mainbrain_server_info: BuiServerAddrInfo, + app_state: BraidAppState, +) -> Result>> { + let persistent_secret_base64 = if let Some(secret) = secret_base64 { + secret + } else { + match String::load(&APP_INFO, COOKIE_SECRET_KEY) { + Ok(secret_base64) => secret_base64, + Err(_) => { + tracing::debug!("No secret loaded from preferences file, generating new."); + let persistent_secret = cookie::Key::generate(); + let persistent_secret_base64 = base64::encode(persistent_secret.master()); + persistent_secret_base64.save(&APP_INFO, COOKIE_SECRET_KEY)?; + persistent_secret_base64 + } + } + }; - // Create `inner`, which takes care of the browser communication details for us. - let chan_size = 10; + let persistent_secret = base64::decode(persistent_secret_base64)?; + let persistent_secret = cookie::Key::try_from(persistent_secret.as_slice())?; - let callback_handler = Box::new(MyCallbackHandler { - shared_data: shared_store.clone(), - cam_manager: cam_manager.clone(), - expected_framerate_arc: expected_framerate_arc.clone(), - output_base_dirname: output_base_dirname.clone(), - per_cam_data_arc: per_cam_data_arc.clone(), - braidz_write_tx_weak, - strand_cam_http_session_handler: strand_cam_http_session_handler.clone(), - }); + // Setup our auth layer. + let token_config = match mainbrain_server_info.token() { + AccessToken::PreSharedToken(value) => Some(axum_token_auth::TokenConfig { + name: "token".to_string(), + value: value.clone(), + }), + AccessToken::NoToken => None, + }; - let raw_req_handler: bui_backend::lowlevel::RawReqHandler = Arc::new(Box::new( - move |resp: http::response::Builder, req: http::Request| { - debug!("got HTTP request {}", req.uri()); - let path = req.uri().path(); - let mut resp = resp.header(hyper::header::CONTENT_TYPE, JSON_TYPE); - let resp = if &path[..1] == "/" && &path[1..] == flydra_types::REMOTE_CAMERA_INFO_PATH { - let query = req.uri().query(); - let query_pairs = url::form_urlencoded::parse(query.unwrap_or("").as_bytes()); - let mut orig_camera_name: Option = None; - for (key, value) in query_pairs { - use std::ops::Deref; - if key.deref() == "camera" { - orig_camera_name = Some(value.to_string()); - } - } - if let Some(camera_name) = orig_camera_name { - if camera_configs.contains_key(&camera_name) { - let config = camera_configs.get(&camera_name).unwrap().clone(); - let camdata_addr = camdata_addr.clone(); - let software_limit_framerate = software_limit_framerate.clone(); - - let msg = flydra_types::RemoteCameraInfoResponse { - camdata_addr, - config, - force_camera_sync_mode, - software_limit_framerate, - }; - let body_buf = serde_json::to_vec(&msg).unwrap(); - resp.body(body_from_buf(&body_buf))? - } else { - error!("HTTP camera not found: \"{camera_name}\""); - resp = resp.status(hyper::StatusCode::NOT_FOUND); - resp.body(body_from_buf(EMPTY_JSON_BUF))? - } - } else { - error!("HTTP request for configuration but no camera specified"); - resp = resp.status(hyper::StatusCode::BAD_REQUEST); - resp.body(body_from_buf(EMPTY_JSON_BUF))? - } - } else { - error!("HTTP request unknown"); - resp = resp.status(hyper::StatusCode::BAD_REQUEST); - resp.body(body_from_buf(EMPTY_JSON_BUF))? - }; - let resp: http::Response> = resp; // type annotation - Ok(resp) - }, - )); - - let (rx_conn, bui_server) = bui_backend::lowlevel::launcher( - bui_backend_config.clone(), - &auth, - chan_size, - &EVENTS_PREFIX, - Some(raw_req_handler), - callback_handler, + let cfg = axum_token_auth::AuthConfig { + token_config, + persistent_secret, + cookie_name: "braid-bui-session", + ..Default::default() + }; + + #[cfg(feature = "bundle_files")] + let serve_dir = tower_serve_static::ServeDir::new(&ASSETS_DIR); + + #[cfg(feature = "serve_files")] + let serve_dir = tower_http::services::fs::ServeDir::new( + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("braid_frontend") + .join("pkg"), ); - let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); - - let (_, inner) = create_bui_app_inner( - tokio::runtime::Handle::current(), - Some(shutdown_rx), - &auth, - shared_store, - Some(flydra_types::BRAID_EVENT_NAME.to_string()), - rx_conn, - bui_server, - ) - .await?; - - let mainbrain_server_info = { - let local_addr = *inner.local_addr(); - let token = inner.token(); - StrandCamBuiServerInfo::new(local_addr, token) + let auth_layer = cfg.into_layer(); + + assert_eq!(BRAID_EVENTS_URL_PATH, "braid-events"); + assert_eq!(REMOTE_CAMERA_INFO_PATH, "remote-camera-info"); + assert_eq!(CAM_PROXY_PATH, "cam-proxy"); + + // Create axum router. + let router = axum::Router::new() + .route("/braid-events", get(events_handler)) + .route( + "/remote-camera-info/:encoded_cam_name", + get(remote_camera_info_handler), + ) + // .route("/cam-proxy/:encoded_cam_name", get(slash_redirect_handler)) + .route( + "/cam-proxy/:encoded_cam_name/", + axum::routing::method_routing::any(cam_proxy_handler_root), + ) + .route( + "/cam-proxy/:encoded_cam_name/*path", + axum::routing::method_routing::any(cam_proxy_handler), + ) + .route( + "/callback", + axum::routing::post(crate::callback_handling::callback_handler) + .layer(axum::extract::DefaultBodyLimit::max(10_000_000)), + ) + .nest_service("/", serve_dir) + .layer( + tower::ServiceBuilder::new() + .layer(TraceLayer::new_for_http()) + // Auth layer will produce an error if the request cannot be + // authorized so we must handle that. + .layer(axum::error_handling::HandleErrorLayer::new( + handle_auth_error, + )) + .layer(auth_layer), + ) + .with_state(app_state); + + // create future for our app + let http_serve_future = { + use futures::TryFutureExt; + use std::future::IntoFuture; + axum::serve(listener, router) + .into_future() + .map_err(anyhow::Error::from) }; - debug!( - "initialized HttpApiApp listening at {}", - mainbrain_server_info.guess_base_url_with_token() + // Display where we are listening. + info!( + "Braid HTTP server listening at {}", + mainbrain_server_info.addr() ); - // Return our app. - Ok(HttpApiApp { - inner, - time_model_arc, - triggerbox_cmd, - sync_pulse_pause_started_arc, - expected_framerate_arc, - _shutdown_tx: DropSend(Some(shutdown_tx)), - }) + info!("Predicted URL(s):"); + for url in mainbrain_server_info.build_urls()?.iter() { + info!(" * {url}"); + if !flydra_types::is_loopback(url) { + println!("QR code for {url}"); + display_qr_url(&format!("{url}")); + } + } + + Ok(http_serve_future) } fn compute_trigger_timestamp( @@ -416,17 +386,8 @@ fn compute_trigger_timestamp( } } -/// Convert the address we are listening on to a string. -/// -/// We can strings over the network, but not binary representations of -/// socket addresses. -fn addr_to_buf(local_addr: &std::net::SocketAddr) -> Result { - let addr_ip = flydra_types::AddrInfoIP::from_socket_addr(local_addr); - Ok(serde_json::to_string(&addr_ip)?) -} - struct SendConnectedCamToBuiBackend { - shared_store: Arc>>, + shared_store: SharedStore, } impl flydra2::ConnectedCamCallback for SendConnectedCamToBuiBackend { @@ -460,44 +421,90 @@ fn display_qr_url(url: &str) { writeln!(stdout_handle).expect("write failed"); } -pub struct StartupPhase1 { - pub camdata_socket: UdpSocket, - my_app: HttpApiApp, - pub mainbrain_server_info: StrandCamBuiServerInfo, - cam_manager: flydra2::ConnectedCamerasManager, - strand_cam_http_session_handler: StrandCamHttpSessionHandler, - handle: tokio::runtime::Handle, - valve: stream_cancel::Valve, - trigger_cfg: TriggerType, - triggerbox_rx: Option>, - model_pose_server_addr: std::net::SocketAddr, - coord_processor: CoordProcessor, - signal_all_cams_present: Arc, - signal_all_cams_synced: Arc, - raw_packet_logger: RawPacketLogger, +/// Format for debugging raw packet data direct from Strand Cam. +#[derive(Serialize)] +struct RawPacketLogRow { + cam_name: String, + #[serde(with = "flydra_types::timestamp_opt_f64")] + timestamp: Option>, + #[serde(with = "flydra_types::timestamp_f64")] + cam_received_time: FlydraFloatTimestampLocal, + device_timestamp: Option, + block_id: Option, + framenumber: i32, + n_frames_skipped: u32, + done_camnode_processing: f64, + preprocess_stamp: f64, + cam_num: Option, + synced_frame: Option, +} + +/// Logger for debugging raw packet data direct from Strand Cam. +struct RawPacketLogger { + fd: Option>, +} + +impl RawPacketLogger { + /// Create a new logger for debugging raw packet data. + /// + /// If `fname` argument is None, this does very little. + fn new(fname: Option<&std::path::Path>) -> Result { + let fd = fname + .map(std::fs::File::create) + .transpose()? + .map(csv::Writer::from_writer); + Ok(Self { fd }) + } + + /// Log debug data for raw packets. + /// + /// If no filename was given to `Self::new`, this does very little. + fn log_raw_packets( + &mut self, + packet: &flydra_types::FlydraRawUdpPacket, + cam_num: Option, + synced_frame: Option, + ) -> Result<()> { + if let Some(ref mut fd) = self.fd { + let row = RawPacketLogRow { + cam_name: packet.cam_name.clone(), + timestamp: packet.timestamp.clone(), + cam_received_time: packet.cam_received_time.clone(), + device_timestamp: packet.device_timestamp, + block_id: packet.block_id, + framenumber: packet.framenumber, + n_frames_skipped: packet.n_frames_skipped, + done_camnode_processing: packet.done_camnode_processing, + preprocess_stamp: packet.preprocess_stamp, + cam_num, + synced_frame, + }; + fd.serialize(row)?; + } + Ok(()) + } } -pub async fn pre_run( - handle: &tokio::runtime::Handle, +pub(crate) async fn do_run_forever( show_tracking_params: bool, // sched_policy_priority: Option<(libc::c_int, libc::c_int)>, - camera_configs: BTreeMap, + camera_configs: BTreeMap, trigger_cfg: TriggerType, - mainbrain_config: &braid_config_data::MainbrainConfig, - jwt_secret: Option>, - all_expected_cameras: std::collections::BTreeSet, + mainbrain_config: braid_config_data::MainbrainConfig, + secret_base64: Option, + all_expected_cameras: std::collections::BTreeSet, force_camera_sync_mode: bool, software_limit_framerate: flydra_types::StartSoftwareFrameRateLimit, saving_program_name: &str, -) -> Result { + listener: tokio::net::TcpListener, + mainbrain_server_info: BuiServerAddrInfo, +) -> Result<()> { let cal_fname: Option = mainbrain_config.cal_fname.clone(); let output_base_dirname: std::path::PathBuf = mainbrain_config.output_base_dirname.clone(); let tracking_params: flydra_types::TrackingParams = mainbrain_config.tracking_params.clone(); - let camdata_addr_unspecified: &str = &mainbrain_config.lowlatency_camdata_udp_addr; + let lowlatency_camdata_udp_addr = &mainbrain_config.lowlatency_camdata_udp_addr; - let http_api_server_addr: String = mainbrain_config.http_api_server_addr.clone(); - let http_api_server_token: Option = mainbrain_config.http_api_server_token.clone(); let model_pose_server_addr: std::net::SocketAddr = mainbrain_config.model_server_addr; let save_empty_data2d: bool = mainbrain_config.save_empty_data2d; @@ -506,27 +513,7 @@ pub async fn pre_run( // Create `stream_cancel::Valve` for shutting everything down. Note this is // `Clone`, so we can (and should) shut down everything with it. let (quit_trigger, valve) = stream_cancel::Valve::new(); - let (shtdwn_q_tx, mut shtdwn_q_rx) = tokio::sync::mpsc::channel::<()>(5); - - ctrlc::set_handler(move || { - // This closure can get called multiple times, but quit_trigger - // and shutdown_tx cannot be copied or cloned and thus can only - // but fired once. So in this signal handler we fire a message - // on a queue and then on the receive side only deal with the first - // send. - info!("got Ctrl-C, shutting down"); - - let shtdwn_q_tx2 = shtdwn_q_tx.clone(); - - // Send quit message. - match futures::executor::block_on(shtdwn_q_tx2.send(())) { - Ok(()) => {} - Err(e) => { - error!("failed sending quit command: {}", e); - } - } - }) - .expect("Error setting Ctrl-C handler"); + let (_shtdwn_q_tx, mut shtdwn_q_rx) = tokio::sync::mpsc::channel::<()>(5); let recon = if let Some(ref cal_fname) = cal_fname { info!("using calibration: {}", cal_fname.display()); @@ -555,7 +542,7 @@ pub async fn pre_run( let signal_all_cams_present = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); let signal_all_cams_synced = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)); - let cam_manager = flydra2::ConnectedCamerasManager::new( + let mut cam_manager = flydra2::ConnectedCamerasManager::new( &recon, all_expected_cameras, signal_all_cams_present.clone(), @@ -571,18 +558,16 @@ pub async fn pre_run( } let ignore_latency = false; - let coord_processor = CoordProcessor::new( + let mut coord_processor = CoordProcessor::new( CoordProcessorConfig { tracking_params, save_empty_data2d, ignore_latency, mini_arena_debug_image_dir: None, }, - tokio::runtime::Handle::current(), cam_manager.clone(), recon.clone(), flydra2::BraidMetadataBuilder::saving_program_name(saving_program_name), - valve.clone(), )?; // Here is what we do on quit: @@ -592,7 +577,7 @@ pub async fn pre_run( let mut quit_trigger_container = Some(quit_trigger); let mut strand_cam_http_session_handler2 = strand_cam_http_session_handler.clone(); let braidz_write_tx_weak = coord_processor.braidz_write_tx.downgrade(); - handle.spawn(async move { + tokio::spawn(async move { while let Some(()) = shtdwn_q_rx.recv().await { debug!("got shutdown command {}:{}", file!(), line!()); @@ -625,11 +610,6 @@ pub async fn pre_run( debug!("shutdown handler finished {}:{}", file!(), line!()); }); - let mut bui_backend_config = from_bui_backend::get_bui_backend_config(); - bui_backend_config.cookie_name = "braid-bui-token".to_string(); - - let time_model_arc = Arc::new(RwLock::new(None)); - let (triggerbox_cmd, triggerbox_rx, fake_sync) = match &trigger_cfg { TriggerType::TriggerboxV1(_) => { let (tx, rx) = tokio::sync::mpsc::channel(20); @@ -643,7 +623,7 @@ pub async fn pre_run( let flydra_app_name = "Braid".to_string(); - let shared = HttpApiShared { + let shared = BraidHttpApiSharedState { fake_sync, csv_tables_dirname: None, fake_mp4_recording_path: None, @@ -655,43 +635,39 @@ pub async fn pre_run( flydra_app_name, all_expected_cameras_are_synced: false, }; + let shared_store = ChangeTracker::new(shared); + let mut shared_store_changes_rx = shared_store.get_changes(1); + let shared_store = Arc::new(RwLock::new(shared_store)); let expected_framerate_arc = Arc::new(RwLock::new(None)); let per_cam_data_arc = Arc::new(RwLock::new(Default::default())); - use std::net::ToSocketAddrs; - let http_api_server_addr = http_api_server_addr.to_socket_addrs()?.next().unwrap(); - - let auth = if let Some(ref secret) = jwt_secret { - if let Some(token) = http_api_server_token { - bui_backend::highlevel::generate_auth_with_token( - http_api_server_addr, - secret.to_vec(), - token, - )? - } else { - bui_backend::highlevel::generate_random_auth(http_api_server_addr, secret.to_vec())? - } - } else if http_api_server_addr.ip().is_loopback() { - AccessControl::Insecure(http_api_server_addr) - } else { - return Err(MainbrainError::JwtError.into()); - }; - - let (camdata_addr, camdata_socket) = { + let (public_camdata_addr, camdata_socket) = { // The port of the low latency UDP incoming data socket may be specified // as 0 in which case the OS will decide which port will actually be // bound. So here we create the socket and get its port. - let camdata_addr_unspecified = camdata_addr_unspecified.parse::().unwrap(); - let camdata_addr_unspecified_buf = addr_to_buf(&camdata_addr_unspecified)?; + let camdata_addr_unspecified_port = + if let Some(lowlatency_camdata_udp_addr) = lowlatency_camdata_udp_addr { + lowlatency_camdata_udp_addr.parse::().unwrap() + } else { + // No low latency UDP address specified. Default to the same IP + // as the mainbrain HTTP server (which may be unspecified) and + // let the OS assign a free port by setting the port as + // unspecified. + let mainbrain_tcp_addr = listener.local_addr()?; + let mut camdata_addr_unspecified_port = mainbrain_tcp_addr; + camdata_addr_unspecified_port.set_port(0); + camdata_addr_unspecified_port + }; + let camdata_socket = UdpSocket::bind(&camdata_addr_unspecified_port).await?; + let camdata_addr = camdata_socket.local_addr()?; + let public_camdata_addr = compute_public_camdata_addr(camdata_addr); debug!( - "flydra mainbrain camera listener at: {}", - camdata_addr_unspecified_buf + "flydra mainbrain camera UDP listener socket: internal: {camdata_addr}, public: {public_camdata_addr}" ); - let camdata_socket = UdpSocket::bind(&camdata_addr_unspecified).await?; - (camdata_socket.local_addr()?.to_string(), camdata_socket) + (public_camdata_addr, camdata_socket) }; if !output_base_dirname.exists() { @@ -709,214 +685,48 @@ pub async fn pre_run( let braidz_write_tx_weak = coord_processor.braidz_write_tx.downgrade(); - let my_app = launch_braid_http_backend( - auth, - cam_manager.clone(), - shared, - bui_backend_config, - camdata_addr, - camera_configs, - time_model_arc, - triggerbox_cmd, - sync_pulse_pause_started_arc, - expected_framerate_arc, - output_base_dirname.clone(), - braidz_write_tx_weak, - per_cam_data_arc.clone(), + let time_model_arc = Arc::new(RwLock::new(None)); + + // Create our app state. + let app_state = BraidAppState { + shared_store: shared_store.clone(), + public_camdata_addr, force_camera_sync_mode, software_limit_framerate, - strand_cam_http_session_handler.clone(), - ) - .await?; - - // This creates a debug logger when `packet_capture_dump_fname` is not - // `None`. - let raw_packet_logger = - RawPacketLogger::new(mainbrain_config.packet_capture_dump_fname.as_deref())?; - - let is_loopback = my_app.inner.local_addr().ip().is_loopback(); - let mainbrain_server_info = - flydra_types::StrandCamBuiServerInfo::new(*my_app.inner.local_addr(), my_app.inner.token()); - let url = mainbrain_server_info.guess_base_url_with_token(); - println!( - "Depending on things, you may be able to login with this url: {}", - url - ); - if !is_loopback { - println!("This same URL as a QR code:"); - display_qr_url(&url); - } - - Ok(StartupPhase1 { - camdata_socket, - my_app, - mainbrain_server_info, - cam_manager, - strand_cam_http_session_handler, - handle: handle.clone(), - trigger_cfg, - triggerbox_rx, - model_pose_server_addr, - coord_processor, - valve, - signal_all_cams_present, - signal_all_cams_synced, - raw_packet_logger, - }) -} - -use flydra_types::HostClock; -use serde::Serialize; - -/// Format for debugging raw packet data direct from Strand Cam. -#[derive(Serialize)] -struct RawPacketLogRow { - cam_name: String, - #[serde(with = "flydra_types::timestamp_opt_f64")] - timestamp: Option>, - #[serde(with = "flydra_types::timestamp_f64")] - cam_received_time: FlydraFloatTimestampLocal, - device_timestamp: Option, - block_id: Option, - framenumber: i32, - n_frames_skipped: u32, - done_camnode_processing: f64, - preprocess_stamp: f64, - cam_num: Option, - synced_frame: Option, -} - -/// Logger for debugging raw packet data direct from Strand Cam. -struct RawPacketLogger { - fd: Option>, -} - -impl RawPacketLogger { - /// Create a new logger for debugging raw packet data. - /// - /// If `fname` argument is None, this does very little. - fn new(fname: Option<&std::path::Path>) -> Result { - let fd = fname - .map(std::fs::File::create) - .transpose()? - .map(csv::Writer::from_writer); - Ok(Self { fd }) - } + event_broadcaster: Default::default(), + per_cam_data_arc: per_cam_data_arc.clone(), + camera_configs, + next_connection_id: Arc::new(RwLock::new(0)), + expected_framerate_arc: expected_framerate_arc.clone(), + braidz_write_tx_weak, + cam_manager: cam_manager.clone(), + output_base_dirname, + strand_cam_http_session_handler: strand_cam_http_session_handler.clone(), + }; - /// Log debug data for raw packets. - /// - /// If no filename was given to `Self::new`, this does very little. - fn log_raw_packets( - &mut self, - packet: &flydra_types::FlydraRawUdpPacket, - cam_num: Option, - synced_frame: Option, - ) -> Result<()> { - if let Some(ref mut fd) = self.fd { - let row = RawPacketLogRow { - cam_name: packet.cam_name.clone(), - timestamp: packet.timestamp.clone(), - cam_received_time: packet.cam_received_time.clone(), - device_timestamp: packet.device_timestamp, - block_id: packet.block_id, - framenumber: packet.framenumber, - n_frames_skipped: packet.n_frames_skipped, - done_camnode_processing: packet.done_camnode_processing, - preprocess_stamp: packet.preprocess_stamp, - cam_num, - synced_frame, - }; - fd.serialize(row)?; + // This future will send state updates to all connected event listeners. + let event_broadcaster = app_state.event_broadcaster.clone(); + let send_updates_future = async move { + while let Some((_prev_state, next_state)) = shared_store_changes_rx.next().await { + let frame_string = to_event_frame(&next_state); + event_broadcaster.broadcast_frame(frame_string).await; } - Ok(()) - } -} - -struct ValvedDebug -where - T: futures::stream::Stream, -{ - inner: T, -} - -impl ValvedDebug -where - T: futures::stream::Stream, -{ - fn new(inner: T) -> Self { - Self { inner } - } -} - -impl std::fmt::Debug for ValvedDebug -where - T: futures::stream::Stream, -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { - f.debug_struct("ValvedDebug").finish_non_exhaustive() - } -} - -impl futures::stream::Stream for ValvedDebug -where - T: futures::stream::Stream, -{ - type Item = X; - fn poll_next( - self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - // safe since we never move nor leak &mut - let inner = unsafe { self.map_unchecked_mut(|s| &mut s.inner) }; - inner.poll_next(cx) - } -} + }; -pub async fn run(phase1: StartupPhase1) -> Result<()> { - let camdata_socket = phase1.camdata_socket; - let my_app = phase1.my_app; - - let mainbrain_server_info = phase1.mainbrain_server_info; - let mut cam_manager = phase1.cam_manager; - let strand_cam_http_session_handler = phase1.strand_cam_http_session_handler; - let handle = phase1.handle; - let rt_handle = handle.clone(); - let rt_handle2 = rt_handle.clone(); - let rt_handle3 = rt_handle2.clone(); - let trigger_cfg = phase1.trigger_cfg; - let triggerbox_rx = phase1.triggerbox_rx; - let model_pose_server_addr = phase1.model_pose_server_addr; - let mut coord_processor = phase1.coord_processor; - let valve = phase1.valve; - let signal_all_cams_present = phase1.signal_all_cams_present; - let signal_all_cams_synced = phase1.signal_all_cams_synced; - let mut raw_packet_logger = phase1.raw_packet_logger; + let http_serve_future = + launch_braid_http_backend(secret_base64, listener, mainbrain_server_info, app_state) + .await?; let signal_triggerbox_connected = Arc::new(AtomicBool::new(false)); - let triggerbox_cmd = my_app.triggerbox_cmd.clone(); - - info!( - "http api server at {}", - mainbrain_server_info.guess_base_url_with_token() - ); - - let time_model_arc = my_app.time_model_arc.clone(); - let expected_framerate_arc = my_app.expected_framerate_arc.clone(); - let sync_pulse_pause_started_arc = my_app.sync_pulse_pause_started_arc.clone(); { let sender = SendConnectedCamToBuiBackend { - shared_store: my_app.inner.shared_arc().clone(), + shared_store: shared_store.clone(), }; let old_callback = cam_manager.set_cam_changed_callback(Box::new(sender)); assert!(old_callback.is_none()); } - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; - let (triggerbox_data_tx, triggerbox_data_rx) = tokio::sync::mpsc::channel::(20); @@ -960,7 +770,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { tokio::spawn(triggerbox_future); } - let tracker = my_app.inner.shared_arc().clone(); + let tracker = shared_store.clone(); let on_new_clock_model = { let time_model_arc = time_model_arc.clone(); @@ -983,7 +793,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { tracker_guard.modify(|shared| shared.clock_model_copy = cm.clone()); } let strand_cam_http_session_handler2 = strand_cam_http_session_handler.clone(); - handle.spawn(async move { + tokio::spawn(async move { let r = strand_cam_http_session_handler2 .send_clock_model_to_all(cm) .await; @@ -1006,7 +816,7 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { use braid_triggerbox::{make_trig_fps_cmd, Cmd}; - let tx = my_app.triggerbox_cmd.clone().unwrap(); + let tx = triggerbox_cmd.clone().unwrap(); let cmd_rx = triggerbox_rx.unwrap(); let (rate_cmd, rate_actual) = make_trig_fps_cmd(*fps as f64); @@ -1087,27 +897,16 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { let live_stats_collector = LiveStatsCollector::new(tracker.clone()); let tracker2 = tracker.clone(); - let raw_cam_data_stream: Box< - dyn futures::stream::Stream< - Item = std::result::Result< - (flydra_types::FlydraRawUdpPacket, std::net::SocketAddr), - std::io::Error, - >, - > + Send - + Unpin, - > = { - let codec = CborPacketCodec::default(); - let stream = UdpFramed::new(camdata_socket, codec); - - Box::new(stream) - }; + // decode UDP frames + let raw_cam_data_stream = + tokio_util::udp::UdpFramed::new(camdata_socket, CborPacketCodec::default()); // Initiate camera synchronization on startup let sync_pulse_pause_started_arc2 = sync_pulse_pause_started_arc.clone(); let time_model_arc2 = time_model_arc.clone(); let cam_manager2 = cam_manager.clone(); let valve2 = valve.clone(); - let sync_start_jh = rt_handle3.spawn(async move { + let _sync_start_jh = tokio::spawn(async move { let interval_stream = tokio_stream::wrappers::IntervalStream::new(tokio::time::interval( std::time::Duration::from_secs(1), )); @@ -1134,9 +933,8 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { // Signal cameras are synchronized - let shared_store = my_app.inner.shared_arc().clone(); let valve2 = valve.clone(); - let sync_done_jh = rt_handle3.spawn(async move { + let _sync_done_jh = tokio::spawn(async move { let interval_stream = tokio_stream::wrappers::IntervalStream::new(tokio::time::interval( std::time::Duration::from_secs(1), )); @@ -1158,172 +956,182 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { let cam_manager2 = cam_manager.clone(); let live_stats_collector2 = live_stats_collector.clone(); - let flydra2_stream = futures::stream::StreamExt::filter_map(raw_cam_data_stream, move |r| { - // vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv - // Start of closure for on each incoming packet. + let packet_filter = move |r| { + let live_stats_collector2 = live_stats_collector2.clone(); + let trigger_cfg = trigger_cfg.clone(); + let strand_cam_http_session_handler2 = strand_cam_http_session_handler2.clone(); + let cam_manager2 = cam_manager2.clone(); + let sync_pulse_pause_started_arc = sync_pulse_pause_started_arc.clone(); + let cam_manager = cam_manager.clone(); + // This creates a debug logger when `packet_capture_dump_fname` is not + // `None`. + let mut raw_packet_logger = + RawPacketLogger::new(mainbrain_config.packet_capture_dump_fname.as_deref()).unwrap(); + let time_model_arc = time_model_arc.clone(); + async move { + // vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv + // Start of closure for on each incoming packet. - // We run this closure for each incoming packet. + // We run this closure for each incoming packet. - // Let's be sure about the type of our input. - let r: std::result::Result< - (flydra_types::FlydraRawUdpPacket, std::net::SocketAddr), - std::io::Error, - > = r; + // Let's be sure about the type of our input. + let r: std::result::Result< + (flydra_types::FlydraRawUdpPacket, std::net::SocketAddr), + std::io::Error, + > = r; - let (packet, _addr) = match r { - Ok(r) => r, - Err(e) => { - error!("{}", e); - return futures::future::ready(Some(StreamItem::EOF)); - } - }; + let (packet, _addr) = match r { + Ok(r) => r, + Err(e) => { + error!("{}", e); + return Some(StreamItem::EOF); + } + }; - let ros_cam_name = RosCamName::new(packet.cam_name.clone()); - live_stats_collector2.register_new_frame_data(&ros_cam_name, packet.points.len()); + let raw_cam_name = RawCamName::new(packet.cam_name.clone()); + live_stats_collector2.register_new_frame_data(&raw_cam_name, packet.points.len()); - let sync_time_min = match &trigger_cfg { - TriggerType::TriggerboxV1(_) => { - // Using trigger box - std::time::Duration::from_secs(SYNCHRONIZE_DURATION_SEC as u64) - } - TriggerType::FakeSync(_) => { - // Using fake trigger - std::time::Duration::from_secs(0) - } - }; + let sync_time_min = match &trigger_cfg { + TriggerType::TriggerboxV1(_) => { + // Using trigger box + std::time::Duration::from_secs(SYNCHRONIZE_DURATION_SEC as u64) + } + TriggerType::FakeSync(_) => { + // Using fake trigger + std::time::Duration::from_secs(0) + } + }; - // Create closure which is called only if there is a new frame offset - // (which occurs upon synchronization). - let send_new_frame_offset = |frame| { - let strand_cam_http_session_handler = strand_cam_http_session_handler2.clone(); - let cam_name = ros_cam_name.clone(); - let fut_no_err = async move { - match strand_cam_http_session_handler - .send_frame_offset(&cam_name, frame) - .await - { - Ok(_) => {} - Err(e) => { - error!("Error sending frame offset: {}", e); - } + // Create closure which is called only if there is a new frame offset + // (which occurs upon synchronization). + let send_new_frame_offset = |frame| { + let strand_cam_http_session_handler = strand_cam_http_session_handler2.clone(); + let cam_name = raw_cam_name.clone(); + let fut_no_err = async move { + match strand_cam_http_session_handler + .send_frame_offset(&cam_name, frame) + .await + { + Ok(_) => {} + Err(e) => { + error!("Error sending frame offset: {}", e); + } + }; }; + tokio::spawn(fut_no_err); }; - rt_handle.spawn(fut_no_err); - }; - let synced_frame = cam_manager2.got_new_frame_live( - &packet, - &sync_pulse_pause_started_arc, - sync_time_min, - std::time::Duration::from_secs(SYNCHRONIZE_DURATION_SEC as u64 + 2), - send_new_frame_offset, - ); + let synced_frame = cam_manager2.got_new_frame_live( + &packet, + &sync_pulse_pause_started_arc, + sync_time_min, + std::time::Duration::from_secs(SYNCHRONIZE_DURATION_SEC as u64 + 2), + send_new_frame_offset, + ); - let cam_num = cam_manager.cam_num(&ros_cam_name); - - raw_packet_logger - .log_raw_packets(&packet, cam_num, synced_frame) - .unwrap(); - - let cam_num = match cam_num { - Some(cam_num) => cam_num, - None => { - let known_ros_cam_names = cam_manager.all_ros_cam_names(); - let cam_names = known_ros_cam_names - .iter() - .map(|x| format!("\"{}\"", x.as_str())) - .collect::>() - .join(", "); - debug!( - "Unknown camera name \"{}\" ({} expected cameras: [{}]).", - ros_cam_name.as_str(), - cam_names.len(), - cam_names - ); - // Cannot compute cam_num, drop this data. - return futures::future::ready(None); - } - }; + let cam_num = cam_manager.cam_num(&raw_cam_name); - let (synced_frame, trigger_timestamp) = match synced_frame { - Some(synced_frame) => { - let time_model = time_model_arc.read(); - let trigger_timestamp = compute_trigger_timestamp(&time_model, synced_frame); - (synced_frame, trigger_timestamp) - } - None => { - // cannot compute synced_frame number, drop this data - return futures::future::ready(None); - } - }; + raw_packet_logger + .log_raw_packets(&packet, cam_num, synced_frame) + .unwrap(); - let frame_data = flydra2::FrameData::new( - ros_cam_name, - cam_num, - synced_frame, - trigger_timestamp, - packet.cam_received_time, - packet.device_timestamp, - packet.block_id, - ); + let cam_num = match cam_num { + Some(cam_num) => cam_num, + None => { + let known_raw_cam_names = cam_manager.all_raw_cam_names(); + let cam_names = known_raw_cam_names + .iter() + .map(|x| format!("\"{}\"", x.as_str())) + .collect::>() + .join(", "); + debug!( + "Unknown camera name \"{}\" ({} expected cameras: [{}]).", + raw_cam_name.as_str(), + cam_names.len(), + cam_names + ); + // Cannot compute cam_num, drop this data. + return None; + } + }; - assert!(packet.points.len() < u8::max_value() as usize); - let points = packet - .points - .into_iter() - .enumerate() - .map(|(idx, pt)| { - assert!(idx <= 255); - flydra2::NumberedRawUdpPoint { idx: idx as u8, pt } - }) - .collect(); + let (synced_frame, trigger_timestamp) = match synced_frame { + Some(synced_frame) => { + let time_model = time_model_arc.read(); + let trigger_timestamp = compute_trigger_timestamp(&time_model, synced_frame); + (synced_frame, trigger_timestamp) + } + None => { + // cannot compute synced_frame number, drop this data + return None; + } + }; - let fdp = FrameDataAndPoints { frame_data, points }; - futures::future::ready(Some(StreamItem::Packet(fdp))) - // This is the end of closure for each incoming packet. - // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - }); + let frame_data = flydra2::FrameData::new( + raw_cam_name, + cam_num, + synced_frame, + trigger_timestamp, + packet.cam_received_time, + packet.device_timestamp, + packet.block_id, + ); + + assert!(packet.points.len() < u8::max_value() as usize); + let points = packet + .points + .into_iter() + .enumerate() + .map(|(idx, pt)| { + assert!(idx <= 255); + flydra2::NumberedRawUdpPoint { idx: idx as u8, pt } + }) + .collect(); + + let fdp = FrameDataAndPoints { frame_data, points }; + Some(StreamItem::Packet(fdp)) + // This is the end of closure for each incoming packet. + // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + } + }; + + let flydra2_stream = raw_cam_data_stream.filter_map(packet_filter); let (data_tx, data_rx) = tokio::sync::mpsc::channel(50); - let ms = flydra2::new_model_server( - data_rx, - valve.clone(), - &model_pose_server_addr, - info, - rt_handle2, - ) - .await?; + tokio::spawn(flydra2::new_model_server(data_rx, model_pose_server_addr)); { let mut tracker = tracker2.write(); - tracker.modify(|shared| shared.model_server_addr = Some(*ms.local_addr())) + tracker.modify(|shared| shared.model_server_addr = Some(model_pose_server_addr)) } let expected_framerate: Option = *expected_framerate_arc9.read(); info!("expected_framerate: {:?}", expected_framerate); + tokio::spawn(send_updates_future); + tokio::spawn(http_serve_future); + coord_processor.add_listener(data_tx); - let consume_future = coord_processor.consume_stream( - ValvedDebug::new(valve.wrap(flydra2_stream)), - expected_framerate, - ); // We "block" (in an async way) here for the entire runtime of the program. - let writer_jh = consume_future.await; - - // If these tasks are still running, cancel them. - debug!("Runtime ending. Aborting any remaining tasks."); - sync_start_jh.abort(); - sync_done_jh.abort(); + let writer_jh = coord_processor + .consume_stream(flydra2_stream, expected_framerate) + .await; // Allow writer task time to finish writing. debug!("Runtime ending. Joining coord_processor.consume_stream future."); + writer_jh .join() .expect("join writer task 1") .expect("join writer task 2"); + // If these tasks are still running, cancel them. + debug!("Runtime ending. Aborting any remaining tasks."); + // sync_start_jh.abort(); + // sync_done_jh.abort(); + debug!("done {}:{}", file!(), line!()); Ok(()) @@ -1331,8 +1139,8 @@ pub async fn run(phase1: StartupPhase1) -> Result<()> { #[derive(Clone)] struct LiveStatsCollector { - shared: Arc>>, - collected: Arc>>, + shared: SharedStore, + collected: Arc>>, } #[derive(Debug)] @@ -1368,12 +1176,12 @@ impl LiveStatsAccum { } impl LiveStatsCollector { - fn new(shared: Arc>>) -> Self { + fn new(shared: SharedStore) -> Self { let collected = Arc::new(RwLock::new(BTreeMap::new())); Self { shared, collected } } - fn register_new_frame_data(&self, name: &RosCamName, n_points: usize) { + fn register_new_frame_data(&self, name: &RawCamName, n_points: usize) { let to_send = { // scope for lock on self.collected let mut collected = self.collected.write(); @@ -1406,13 +1214,13 @@ impl LiveStatsCollector { } } -async fn toggle_saving_csv_tables( +pub(crate) async fn toggle_saving_csv_tables( start_saving: bool, expected_framerate_arc: Arc>>, output_base_dirname: std::path::PathBuf, braidz_write_tx_weak: tokio::sync::mpsc::WeakSender, - per_cam_data_arc: Arc>>, - shared_data: Arc>>, + per_cam_data_arc: Arc>>, + shared_data: SharedStore, ) { if start_saving { let expected_framerate: Option = *expected_framerate_arc.read(); @@ -1520,3 +1328,9 @@ async fn begin_cam_sync_triggerbox_in_process( info!("requesting triggerbox to start sending pulses again"); Ok(()) } + +fn to_event_frame(state: &BraidHttpApiSharedState) -> String { + let buf = serde_json::to_string(&state).unwrap(); + let frame_string = format!("event: {BRAID_EVENT_NAME}\ndata: {buf}\n\n"); + frame_string +} diff --git a/braid/braid-run/src/multicam_http_session_handler.rs b/braid/braid-run/src/multicam_http_session_handler.rs index 1078a7ffd..885468bc4 100644 --- a/braid/braid-run/src/multicam_http_session_handler.rs +++ b/braid/braid-run/src/multicam_http_session_handler.rs @@ -1,51 +1,55 @@ use parking_lot::RwLock; use std::{collections::BTreeMap, sync::Arc}; +use tracing::{debug, error, info, warn}; use bui_backend_session::{self, InsecureSession}; -use flydra_types::{RosCamName, StrandCamHttpServerInfo}; +use flydra_types::{BuiServerInfo, RawCamName}; use strand_cam_storetype::CallbackType; /// Keeps HTTP sessions for all connected cameras. #[derive(Clone)] -pub struct StrandCamHttpSessionHandler { +pub(crate) struct StrandCamHttpSessionHandler { cam_manager: flydra2::ConnectedCamerasManager, - name_to_session: Arc>>, + pub(crate) name_to_session: Arc>>, } #[derive(Clone)] -enum MaybeSession { +pub(crate) enum MaybeSession { Alive(InsecureSession), Errored, } -use crate::mainbrain::{MainbrainError, MainbrainResult}; - -type MyBody = http_body_util::combinators::BoxBody; +trait MyErrorTrait { + fn boxerr(self) -> std::result::Result; +} -fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - use http_body_util::BodyExt; - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) +impl MyErrorTrait for std::result::Result { + fn boxerr(self) -> std::result::Result { + match self { + Ok(v) => Ok(v), + Err(e) => Err(Box::new(e)), + } + } } +use crate::mainbrain::{BoxedStdError, MainbrainError, MainbrainResult}; + impl StrandCamHttpSessionHandler { - pub fn new(cam_manager: flydra2::ConnectedCamerasManager) -> Self { + pub(crate) fn new(cam_manager: flydra2::ConnectedCamerasManager) -> Self { Self { cam_manager, name_to_session: Arc::new(RwLock::new(BTreeMap::new())), } } - async fn open_session(&self, cam_name: &RosCamName) -> Result { + async fn open_session(&self, cam_name: &RawCamName) -> Result { // Create a new session if it doesn't exist. let (base_url, token) = { if let Some(cam_addr) = self.cam_manager.http_camserver_info(cam_name) { match cam_addr { - StrandCamHttpServerInfo::NoServer => { + BuiServerInfo::NoServer => { panic!("cannot connect to camera with no server"); } - StrandCamHttpServerInfo::Server(details) => { - (details.base_url(), details.token().clone()) - } + BuiServerInfo::Server(details) => (details.base_url(), details.token().clone()), } } else { panic!("attempting post to unknown camera") @@ -75,7 +79,7 @@ impl StrandCamHttpSessionHandler { async fn get_or_open_session( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, ) -> Result { // Get session if it already exists. let opt_session = { self.name_to_session.read().get(cam_name).cloned() }; @@ -89,7 +93,7 @@ impl StrandCamHttpSessionHandler { async fn post( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, args: ci2_remote_control::CamArg, ) -> Result<(), MainbrainError> { let session = self.get_or_open_session(cam_name).await?; @@ -97,8 +101,9 @@ impl StrandCamHttpSessionHandler { // Post to session match session { MaybeSession::Alive(mut session) => { - let body = - body_from_buf(&serde_json::to_vec(&CallbackType::ToCamera(args)).unwrap()); + let body = axum::body::Body::new(http_body_util::Full::new(bytes::Bytes::from( + serde_json::to_vec(&CallbackType::ToCamera(args)).unwrap(), + ))); let result = session.post("callback", body).await; match result { @@ -110,21 +115,26 @@ impl StrandCamHttpSessionHandler { } Err(err) => { error!( - "For {cam_name}: StrandCamHttpSessionHandler::post() got error {err:?}" + "For \"{}\": StrandCamHttpSessionHandler::post() got error {err:?}", + cam_name.as_str(), ); let mut name_to_session = self.name_to_session.write(); name_to_session.insert(cam_name.clone(), MaybeSession::Errored); + // return Err(MainbrainError::blarg); } } } - MaybeSession::Errored => {} + MaybeSession::Errored => { + // TODO: should an error be raised here? + // return Err(MainbrainError::blarg); + } }; Ok(()) } - pub async fn send_frame_offset( + pub(crate) async fn send_frame_offset( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, frame_offset: u64, ) -> Result<(), MainbrainError> { info!( @@ -136,7 +146,7 @@ impl StrandCamHttpSessionHandler { self.post(cam_name, args).await } - async fn send_quit(&mut self, cam_name: &RosCamName) -> Result<(), MainbrainError> { + async fn send_quit(&mut self, cam_name: &RawCamName) -> Result<(), MainbrainError> { info!("for cam {}, sending quit", cam_name.as_str()); let args = ci2_remote_control::CamArg::DoQuit; @@ -154,19 +164,20 @@ impl StrandCamHttpSessionHandler { Ok(_) => Ok(()), Err(e) => { warn!( - "Ignoring error while sending quit command to {}: {}", - cam_name, e + "Ignoring error while sending quit command to \"{}\": {}", + cam_name.as_str(), + e ); - Err(e.into()) + Err(e) } } } - pub async fn send_quit_all(&mut self) { + pub(crate) async fn send_quit_all(&mut self) { use futures::{stream, StreamExt}; // Based on https://stackoverflow.com/a/51047786 const CONCURRENT_REQUESTS: usize = 5; - let results = stream::iter(self.cam_manager.all_ros_cam_names()) + let results = stream::iter(self.cam_manager.all_raw_cam_names()) .map(|cam_name| { let mut session = self.clone(); let cam_name = cam_name.clone(); @@ -184,25 +195,29 @@ impl StrandCamHttpSessionHandler { match r { Ok(()) => {} Err((cam_name, e)) => warn!( - "Ignoring error When sending quit command to camera {}: {}", - cam_name, e + "Ignoring error When sending quit command to camera \"{}\": {}", + cam_name.as_str(), + e ), } }) .await; } - pub async fn toggle_saving_mp4_files_all(&self, start_saving: bool) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + pub(crate) async fn toggle_saving_mp4_files_all( + &self, + start_saving: bool, + ) -> MainbrainResult<()> { + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.toggle_saving_mp4_files(cam_name, start_saving).await?; } Ok(()) } - pub async fn toggle_saving_mp4_files( + pub(crate) async fn toggle_saving_mp4_files( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, start_saving: bool, ) -> MainbrainResult<()> { debug!( @@ -217,20 +232,20 @@ impl StrandCamHttpSessionHandler { Ok(()) } - pub async fn send_clock_model_to_all( + pub(crate) async fn send_clock_model_to_all( &self, clock_model: Option, ) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.send_clock_model(cam_name, clock_model.clone()).await?; } Ok(()) } - pub async fn send_clock_model( + pub(crate) async fn send_clock_model( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, clock_model: Option, ) -> MainbrainResult<()> { debug!( @@ -244,17 +259,20 @@ impl StrandCamHttpSessionHandler { self.post(&cam_name, args).await } - pub async fn set_post_trigger_buffer_all(&self, num_frames: usize) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + pub(crate) async fn set_post_trigger_buffer_all( + &self, + num_frames: usize, + ) -> MainbrainResult<()> { + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.set_post_trigger_buffer(cam_name, num_frames).await?; } Ok(()) } - pub async fn set_post_trigger_buffer( + pub(crate) async fn set_post_trigger_buffer( &self, - cam_name: &RosCamName, + cam_name: &RawCamName, num_frames: usize, ) -> MainbrainResult<()> { debug!( @@ -269,15 +287,18 @@ impl StrandCamHttpSessionHandler { Ok(()) } - pub async fn initiate_post_trigger_mp4_all(&self) -> MainbrainResult<()> { - let cam_names = self.cam_manager.all_ros_cam_names(); + pub(crate) async fn initiate_post_trigger_mp4_all(&self) -> MainbrainResult<()> { + let cam_names = self.cam_manager.all_raw_cam_names(); for cam_name in cam_names.iter() { self.initiate_post_trigger_mp4(cam_name).await?; } Ok(()) } - pub async fn initiate_post_trigger_mp4(&self, cam_name: &RosCamName) -> MainbrainResult<()> { + pub(crate) async fn initiate_post_trigger_mp4( + &self, + cam_name: &RawCamName, + ) -> MainbrainResult<()> { debug!( "for cam {}, initiating post trigger recording", cam_name.as_str(), diff --git a/braidz-parser/src/lib.rs b/braidz-parser/src/lib.rs index f54ce237a..638748510 100644 --- a/braidz-parser/src/lib.rs +++ b/braidz-parser/src/lib.rs @@ -1,7 +1,4 @@ -#![cfg_attr( - feature = "backtrace", - feature(error_generic_member_access) -)] +#![cfg_attr(feature = "backtrace", feature(error_generic_member_access))] #[cfg(feature = "backtrace")] use std::backtrace::Backtrace; @@ -445,8 +442,7 @@ fn get_hlog(mut rdr: R) -> Result, ()> { match interval { LogEntry::Interval(ilh) => { let serialized_histogram = - base64::decode_config(ilh.encoded_histogram(), base64::STANDARD) - .map_err(|_| ())?; + base64::decode(ilh.encoded_histogram()).map_err(|_| ())?; let decoded_hist: Histogram = deserializer .deserialize(&mut std::io::Cursor::new(&serialized_histogram)) .map_err(|_| ())?; diff --git a/bui-backend-session/Cargo.toml b/bui-backend-session/Cargo.toml index 0ee7b0806..c81212eaf 100644 --- a/bui-backend-session/Cargo.toml +++ b/bui-backend-session/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" rust-version="1.60" [dependencies] -log = "0.4" +tracing = "0.1.40" futures = "0.3" hyper = {version="1.1", default-features = false, features=["client","http1"]} parking_lot = "0.12" @@ -17,4 +17,7 @@ cookie = "0.18" hyper-util = { version = "0.1.1", features = ["client-legacy", "tokio", "client", "http1"] } http-body-util = "0.1.0" thiserror = "1.0.51" -bui-backend-types = "0.8" + +bui-backend-session-types = { path = "../bui-backend-session/types" } +rust-cam-bui-types = {path="../rust-cam-bui-types"} +axum = "0.7.4" diff --git a/bui-backend-session/demo/Cargo.toml b/bui-backend-session/demo/Cargo.toml index 29f32ee01..1e2959c67 100644 --- a/bui-backend-session/demo/Cargo.toml +++ b/bui-backend-session/demo/Cargo.toml @@ -6,13 +6,11 @@ edition = "2021" rust-version="1.60" [dependencies] -log = "0.4" futures = "0.3" hyper = "1.1" tokio = {version="1.0.1", features=["full"]} -env_logger = "0.10" -bui-backend-types = "0.8" - -bui-backend-session = {path=".."} http-body-util = "0.1.0" bytes = "1.5.0" + +bui-backend-session = {path=".."} +bui-backend-session-types = { path = "../types" } diff --git a/bui-backend-session/demo/src/main.rs b/bui-backend-session/demo/src/main.rs index bad70262e..10489c2b3 100644 --- a/bui-backend-session/demo/src/main.rs +++ b/bui-backend-session/demo/src/main.rs @@ -1,7 +1,7 @@ use http_body_util::BodyExt; use bui_backend_session::future_session; -use bui_backend_types::AccessToken; +use bui_backend_session_types::AccessToken; #[tokio::main] async fn main() -> Result<(), bui_backend_session::Error> { diff --git a/bui-backend-session/src/lib.rs b/bui-backend-session/src/lib.rs index 652e25d8b..33a807ef1 100644 --- a/bui-backend-session/src/lib.rs +++ b/bui-backend-session/src/lib.rs @@ -1,7 +1,5 @@ -#[macro_use] -extern crate log; - -use bui_backend_types::AccessToken; +use bui_backend_session_types::AccessToken; +use http::{header::ACCEPT, HeaderValue}; use parking_lot::RwLock; use std::sync::Arc; use thiserror::Error; @@ -9,13 +7,8 @@ use thiserror::Error; const SET_COOKIE: &str = "set-cookie"; const COOKIE: &str = "cookie"; -pub type MyBody = http_body_util::combinators::BoxBody; - -fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - use http_body_util::BodyExt; - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) -} +// pub type MyBody = http_body_util::combinators::BoxBody; +pub type MyBody = axum::body::Body; /// Possible errors #[derive(Error, Debug)] @@ -26,6 +19,9 @@ pub enum Error { /// A wrapped error from the hyper-util crate #[error("hyper-util error `{0}`")] HyperUtil(#[from] hyper_util::client::legacy::Error), + /// The request was not successful. + #[error("request not successful. status code: `{0}`")] + RequestFailed(http::StatusCode), } /// A session for a single server. @@ -33,13 +29,14 @@ pub enum Error { /// Warning: this does not attempt to store cookies associated with a single /// server and thus could subject you to cross-signing attacks. Therefore, the /// name `InsecureSession`. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct InsecureSession { base_uri: hyper::Uri, jar: Arc>, } /// Create an `InsecureSession` which has already made a request +#[tracing::instrument(level = "debug", skip(token))] pub async fn future_session(base_uri: &str, token: AccessToken) -> Result { let mut base = InsecureSession::new(base_uri); base.get_with_token("", token).await?; @@ -85,32 +82,57 @@ impl InsecureSession { .build() .expect("build url") } - async fn inner_get( + async fn inner_req( &mut self, rel: &str, token: Option, + accepts: &[HeaderValue], + method: http::Method, + body: axum::body::Body, ) -> Result, Error> { let uri = self.get_rel_uri(rel, token); - let mut req = hyper::Request::new(body_from_buf(b"")); - *req.method_mut() = hyper::Method::GET; + let mut req = hyper::Request::new(body); + *req.method_mut() = method; *req.uri_mut() = uri; - self.make_request(req).await + for accept in accepts.iter() { + req.headers_mut().insert(ACCEPT, (*accept).clone()); + } + let response = self.make_request(req).await?; + Ok(response) } pub async fn get( &mut self, rel: &str, ) -> Result, Error> { - self.inner_get(rel, None).await + self.inner_req(rel, None, &[], http::Method::GET, axum::body::Body::empty()) + .await + } + pub async fn req_accepts( + &mut self, + rel: &str, + accepts: &[HeaderValue], + method: http::Method, + body: axum::body::Body, + ) -> Result, Error> { + self.inner_req(rel, None, accepts, method, body).await } async fn get_with_token( &mut self, rel: &str, token: AccessToken, ) -> Result, Error> { - self.inner_get(rel, Some(token)).await + self.inner_req( + rel, + Some(token), + &[], + http::Method::GET, + axum::body::Body::empty(), + ) + .await } + #[tracing::instrument(skip_all)] pub async fn post( &mut self, rel: &str, @@ -124,6 +146,7 @@ impl InsecureSession { self.make_request(req).await } + #[tracing::instrument(skip_all)] async fn make_request( &mut self, mut req: hyper::Request, @@ -132,11 +155,11 @@ impl InsecureSession { hyper_util::client::legacy::Client::builder(hyper_util::rt::TokioExecutor::new()) .build_http(); - debug!("building request"); + tracing::trace!("building request"); { let jar = self.jar.read(); for cookie in jar.iter() { - debug!("adding cookie {}", cookie); + tracing::trace!("adding cookie {}", cookie); req.headers_mut().insert( COOKIE, hyper::header::HeaderValue::from_str(&cookie.to_string()).unwrap(), @@ -144,12 +167,29 @@ impl InsecureSession { } } - let jar2 = self.jar.clone(); - debug!("making request {:?}", req); - let response = client.request(req).await?; + req.headers_mut().insert( + http::header::CONTENT_TYPE, + hyper::header::HeaderValue::from_str("application/json").unwrap(), + ); - debug!("handling response {:?}", response); - handle_response(jar2, response) + let jar2 = self.jar.clone(); + tracing::debug!("making request {:?}", req); + let response = client.request(req).await.map_err(|e| { + tracing::error!("encountered error {e}: {e:?}"); + Error::from(e) + })?; + + tracing::debug!("handling response {:?}", response); + let response = handle_response(jar2, response)?; + let status_code = response.status(); + if !status_code.is_success() { + use http_body_util::BodyExt; + let body_bytes = response.into_body().collect().await.unwrap().to_bytes(); + let body_str = std::string::String::from_utf8_lossy(body_bytes.as_ref()); + tracing::error!("response {status_code:?}: \"{body_str}\""); + return Err(Error::RequestFailed(status_code)); + } + Ok(response) } } @@ -157,7 +197,7 @@ fn handle_response( jar2: Arc>, mut response: hyper::Response, ) -> Result, Error> { - debug!("starting to handle cookies in response {:?}", response); + tracing::trace!("starting to handle cookies in response {:?}", response); use hyper::header::Entry::*; match response.headers_mut().entry(SET_COOKIE) { @@ -168,12 +208,12 @@ fn handle_response( let c = cookie::Cookie::parse(cookie_raw.to_str().unwrap().to_string()).unwrap(); jar.add(c); // TODO FIXME do not reinsert same cookie again and again - debug!("stored cookie {:?}", cookie_raw); + tracing::trace!("stored cookie {:?}", cookie_raw); } } Vacant(_) => {} } - debug!("done handling cookies in response {:?}", response); + tracing::trace!("done handling cookies in response {:?}", response); Ok(response) } diff --git a/bui-backend-session/types/Cargo.toml b/bui-backend-session/types/Cargo.toml new file mode 100644 index 000000000..2fdfcf382 --- /dev/null +++ b/bui-backend-session/types/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "bui-backend-session-types" +version = "0.1.0" +edition = "2021" + +[dependencies] +uuid = { version = "1.0", features = ["serde"] } +serde = {version="1.0", features=["derive"]} + +[features] +default = [] + +uuid-v4 = ["uuid/v4"] diff --git a/bui-backend-session/types/src/lib.rs b/bui-backend-session/types/src/lib.rs new file mode 100644 index 000000000..8a62bcbc4 --- /dev/null +++ b/bui-backend-session/types/src/lib.rs @@ -0,0 +1,32 @@ +use serde::{Deserialize, Serialize}; + +/// Identifier for each session (one per client browser). +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct SessionKey(pub uuid::Uuid); + +#[cfg(feature = "uuid-v4")] +impl SessionKey { + /// Create a new SessionKey + #[cfg_attr(docsrs, doc(cfg(feature = "uuid-v4")))] + pub fn new() -> Self { + SessionKey(uuid::Uuid::new_v4()) + } +} + +/// Identifier for each connected event stream listener (one per client tab). +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] +pub struct ConnectionKey { + pub addr: std::net::SocketAddr, +} + +/// A token which can be required to gain access to HTTP API +/// +/// If the server receives a valid token, it will respond with a cookie carrying +/// a session key. +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub enum AccessToken { + /// No token needed (access must be controlled via other means). + NoToken, + /// A pre-shared token to gain access. + PreSharedToken(String), +} diff --git a/build-util/Cargo.toml b/build-util/Cargo.toml index 0c3d4fbe3..312309527 100644 --- a/build-util/Cargo.toml +++ b/build-util/Cargo.toml @@ -3,10 +3,3 @@ name = "build-util" version = "0.1.0" edition = "2021" rust-version = "1.60" - -[dependencies] -bui-backend-codegen = {version="0.9.1", default-features = false} - -[features] -bundle_files = ["bui-backend-codegen/bundle_files"] -serve_files = ["bui-backend-codegen/serve_files"] diff --git a/build-util/src/lib.rs b/build-util/src/lib.rs index 723a5b7cb..46af1ebd5 100644 --- a/build-util/src/lib.rs +++ b/build-util/src/lib.rs @@ -11,19 +11,12 @@ pub fn git_hash(orig_version: &str) -> Result<(), Box<(dyn std::error::Error)>> Ok(()) } -pub fn bui_backend_generate_code

( - files_dir: P, - generated_path: &str, -) -> Result<(), Box<(dyn std::error::Error)>> -where - P: AsRef, -{ - match bui_backend_codegen::codegen(&files_dir, generated_path) { - Ok(()) => Ok(()), - Err(e) => Err(format!( - "Error in the process of generating '{generated_path}' when attempting to read {} : {e}", - files_dir.as_ref().display() - ) - .into()), - } -} +// pub fn bui_backend_generate_code

( +// _files_dir: P, +// _generated_path: &str, +// ) -> Result<(), Box<(dyn std::error::Error)>> +// where +// P: AsRef, +// { +// todo!(); +// } \ No newline at end of file diff --git a/env-tracing-logger/Cargo.toml b/env-tracing-logger/Cargo.toml index 403ea5439..03895918d 100644 --- a/env-tracing-logger/Cargo.toml +++ b/env-tracing-logger/Cargo.toml @@ -7,6 +7,4 @@ rust-version = "1.60" [dependencies] tracing = "0.1.37" -tracing-flame = "0.2.0" -tracing-log = "0.1.3" tracing-subscriber = { version = "0.3.16", features = ["env-filter", "time"] } diff --git a/env-tracing-logger/src/lib.rs b/env-tracing-logger/src/lib.rs index a1512ac1c..0951d3519 100644 --- a/env-tracing-logger/src/lib.rs +++ b/env-tracing-logger/src/lib.rs @@ -1,60 +1,10 @@ -use tracing::{event::Event, subscriber::SetGlobalDefaultError, Level, Subscriber}; -use tracing_log::NormalizeEvent; +use tracing::subscriber::SetGlobalDefaultError; use tracing_subscriber::{ - fmt::{ - self, - format::Writer, - time::{self, FormatTime}, - FmtContext, FormatEvent, FormatFields, - }, + fmt::{self, time}, prelude::*, - registry::LookupSpan, EnvFilter, }; -struct MyEventFormat { - timer: time::Uptime, -} - -// This is mostly from the tracing_subscriber Compact formatter. As there was no -// way to suppress printing the context with Compact, I reimplemented this here. -// ANSI codes have been removed for simplicity but would be nice to add back. -impl FormatEvent for MyEventFormat -where - S: Subscriber + for<'a> LookupSpan<'a>, - N: for<'a> FormatFields<'a> + 'static, -{ - fn format_event( - &self, - ctx: &FmtContext<'_, S, N>, - mut writer: Writer<'_>, - event: &Event<'_>, - ) -> std::fmt::Result { - let normalized_meta = event.normalized_metadata(); - - let meta = normalized_meta.as_ref().unwrap_or_else(|| event.metadata()); - - if self.timer.format_time(&mut writer).is_err() { - writer.write_str("")?; - } - - let fmt_level = match *meta.level() { - Level::ERROR => "ERROR", - Level::WARN => " WARN", - Level::INFO => " INFO", - Level::DEBUG => "DEBUG", - Level::TRACE => "TRACE", - }; - write!(writer, " {} ", fmt_level)?; - - write!(writer, "{}: ", meta.target())?; - - ctx.format_fields(writer.by_ref(), event)?; - - writeln!(writer) - } -} - struct Guard {} impl Drop for Guard { @@ -68,10 +18,9 @@ pub fn init() -> impl Drop { } fn init_result() -> Result { - // let evt_fmt = format().with_timer(time::Uptime::default()).compact(); - let evt_fmt = MyEventFormat { - timer: time::Uptime::default(), - }; + let evt_fmt = tracing_subscriber::fmt::format() + .with_timer(time::Uptime::default()) + .compact(); let fmt_layer = fmt::layer().event_format(evt_fmt); tracing_subscriber::registry() diff --git a/event-stream-types/Cargo.toml b/event-stream-types/Cargo.toml new file mode 100644 index 000000000..b0d7b757f --- /dev/null +++ b/event-stream-types/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "event-stream-types" +version = "0.1.0" +edition = "2021" + +[dependencies] +tokio = "1" +uuid = { version = "1.0", features = ["serde"] } +serde = { version = "1.0", features = ["derive"] } +http = "1" +axum = "0.7.4" +http-body = "1.0.0" +bytes = "1.5.0" +tokio-stream = "0.1.14" +futures = "0.3.30" +tracing = "0.1.37" +mime = "0.3.17" + +bui-backend-session-types = { path = "../bui-backend-session/types" } diff --git a/event-stream-types/src/lib.rs b/event-stream-types/src/lib.rs new file mode 100644 index 000000000..2b264adf9 --- /dev/null +++ b/event-stream-types/src/lib.rs @@ -0,0 +1,241 @@ +use bui_backend_session_types::{ConnectionKey, SessionKey}; +use bytes::Bytes; +use futures::StreamExt; +use http::{header::ACCEPT, request::Parts, StatusCode}; +use http_body::Frame; +use std::{ + collections::HashMap, + convert::Infallible, + pin::Pin, + sync::{Arc, RwLock}, +}; +use tokio::sync::mpsc::Sender; +use tokio_stream::wrappers::ReceiverStream; + +pub type EventChunkSender = Sender, Infallible>>; +type EventReceiver = ReceiverStream, Infallible>>; + +/// The type of possible connect event, either connect or disconnect. +#[derive(Debug)] +pub enum ConnectionEventType { + /// A connection event with sink for event stream messages to the connected client. + Connect(EventChunkSender), + /// A disconnection event. + Disconnect, +} + +/// State associated with connection or disconnection. +#[derive(Debug)] +pub struct ConnectionEvent { + /// The type of connection for this event. + pub typ: ConnectionEventType, + /// Identifier for the connecting session (one per browser). + pub session_key: SessionKey, + /// Identifier for the connection (one per tab). + pub connection_key: ConnectionKey, + /// The path being requested (starts with `BuiService::events_prefix`). + pub path: String, +} + +// header extractor for "Accept: text/event-stream" -------------------------- + +pub struct AcceptsEventStream; + +#[axum::async_trait] +impl axum::extract::FromRequestParts for AcceptsEventStream { + type Rejection = (StatusCode, &'static str); + async fn from_request_parts(p: &mut Parts, _: &S) -> Result { + const ES: &[u8] = b"text/event-stream"; + if p.headers.get_all(ACCEPT).iter().any(|v| v.as_bytes() == ES) { + Ok(AcceptsEventStream) + } else { + Err(( + StatusCode::BAD_REQUEST, + "Bad request: It is required that you have an \ + HTTP Header \"Accept: text/event-stream\"", + )) + } + } +} + +// TolerantJson extractor -------------------------- + +/// This is much like `axum::Json` but does not fail if the request does not set +/// the 'Content-Type' header. +/// +/// This is purely for backwards-compatibility and can be removed sometime. +pub struct TolerantJson(pub T); + +#[axum::async_trait] +impl axum::extract::FromRequest for TolerantJson +where + T: serde::de::DeserializeOwned, + S: Send + Sync, +{ + type Rejection = axum::extract::rejection::JsonRejection; + + async fn from_request( + mut req: axum::extract::Request, + state: &S, + ) -> Result { + if !json_content_type(req.headers()) { + tracing::error!("request should indicate \"Content-Type: application/json\""); + req.headers_mut().insert( + http::header::CONTENT_TYPE, + http::HeaderValue::from_static("application/json"), + ); + } + match axum::Json::from_request(req, state).await { + Ok(payload) => Ok(TolerantJson(payload.0)), + Err(e) => Err(e), + } + } +} + +// events body --------------------------- + +pub struct EventsBody { + events: EventReceiver, +} + +impl EventsBody { + fn new(events: EventReceiver) -> Self { + Self { events } + } +} + +impl http_body::Body for EventsBody { + type Data = Bytes; + type Error = Infallible; + + fn poll_frame( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll, Self::Error>>> { + self.events.poll_next_unpin(cx) + } +} + +impl axum::response::IntoResponse for EventsBody { + fn into_response(self) -> axum::response::Response { + let mut response = axum::response::Response::new(axum::body::Body::new(self)); + response.headers_mut().insert( + "content-type", + http::header::HeaderValue::from_static("text/event-stream"), + ); + response + } +} + +// ----- + +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct ConnectionSessionKey { + session_key: uuid::Uuid, + connection_key: std::net::SocketAddr, +} + +impl ConnectionSessionKey { + pub fn new(session_key: uuid::Uuid, connection_key: std::net::SocketAddr) -> Self { + Self { + session_key, + connection_key, + } + } +} + +/// broadcasts events to many listeners. +/// +/// This is generic over the key type. +#[derive(Debug, Clone)] +pub struct EventBroadcaster { + txers: Arc>>, +} + +impl Default for EventBroadcaster { + fn default() -> Self { + Self { + txers: Default::default(), + } + } +} + +impl EventBroadcaster +where + KEY: std::cmp::Eq + std::hash::Hash, +{ + /// Add a new connection indexed by a key. + /// + /// This returns an [EventsBody]. + pub fn new_connection(&self, key: KEY) -> (EventChunkSender, EventsBody) { + let (tx, rx) = tokio::sync::mpsc::channel(10); + let mut txers = self.txers.write().unwrap(); + txers.insert(key, tx.clone()); + let rx = tokio_stream::wrappers::ReceiverStream::new(rx); + let body = EventsBody::new(rx); + + (tx, body) + } + /// Transmit bytes as frame + /// + /// This will drop connections which have errored. + pub async fn broadcast_frame(&self, frame_string: String) { + let txers: Vec<_> = { + // Keep lock in this scope. + // Move all listeners out of shared map. + self.txers.write().unwrap().drain().collect() + }; + + // now we have released the lock and can await without holding the lock. + let mut keep_event_listeners = Vec::with_capacity(txers.len()); + for (key, tx) in txers.into_iter() { + match tx.send(Ok(Frame::data(frame_string.clone().into()))).await { + Ok(()) => { + keep_event_listeners.push((key, tx)); + } + Err(tokio::sync::mpsc::error::SendError(_frame)) => { + // The receiver was dropped because the connection closed. + tracing::debug!("send error"); + } + } + } + + { + // Keep lock in this scope. + // Move all listeners back into shared map. + let mut event_listeners = self.txers.write().unwrap(); + for (key, value) in keep_event_listeners.into_iter() { + event_listeners.insert(key, value); + } + }; + } +} + +// ---- + +// This does not really belong here... + +fn json_content_type(headers: &http::HeaderMap) -> bool { + let content_type = if let Some(content_type) = headers.get(http::header::CONTENT_TYPE) { + content_type + } else { + return false; + }; + + let content_type = if let Ok(content_type) = content_type.to_str() { + content_type + } else { + return false; + }; + + let mime = if let Ok(mime) = content_type.parse::() { + mime + } else { + return false; + }; + + let is_json_content_type = mime.type_() == "application" + && (mime.subtype() == "json" || mime.suffix().map_or(false, |name| name == "json")); + + is_json_content_type +} diff --git a/flydra-types/Cargo.toml b/flydra-types/Cargo.toml index d6dff1435..4f9360410 100644 --- a/flydra-types/Cargo.toml +++ b/flydra-types/Cargo.toml @@ -16,24 +16,31 @@ serde_cbor = {version="0.11.2", optional=true} tokio-util = {version="0.7.3", features=["codec"], optional=true} bytes = {version="1.0", optional=true} bitflags = "1.0" -dns-lookup = {version="1", optional=true} ordered-float = {version="3.0.0", features=["serde"]} static_assertions = "1.1.0" -bui-backend-types = "0.8" nalgebra = {version="0.32", features=["serde-serialize"]} num-integer = "0.1" +http = "1" +if-addrs = {version="0.11.0", optional=true} +percent-encoding = "2.3.1" + +anyhow = { version = "1", optional = true } +axum-token-auth = { version = "0.1.0", optional = true } +tokio = {version="1", optional=true} withkey = {path="../withkey"} datetime-conversion = {path="../datetime-conversion"} rust-cam-bui-types = {path="../rust-cam-bui-types"} flydra-pt-detect-cfg = {path="../flydra-feature-detector/flydra-pt-detect-cfg"} flydra-feature-detector-types = {path="../flydra-feature-detector/flydra-feature-detector-types"} +bui-backend-session-types = { path = "../bui-backend-session/types" } [features] default=["with-tokio-codec"] -with-dns=["dns-lookup"] with-tokio-codec=["tokio-util", "bytes", "serde_cbor"] +start-listener = [ "anyhow", "axum-token-auth", "tokio" ] +build-urls = ["if-addrs"] [dev-dependencies] anyhow = "1" diff --git a/flydra-types/src/lib.rs b/flydra-types/src/lib.rs index a2de77818..411f2cb6c 100644 --- a/flydra-types/src/lib.rs +++ b/flydra-types/src/lib.rs @@ -12,10 +12,11 @@ extern crate static_assertions; use ordered_float::NotNan; use rust_cam_bui_types::{ClockModel, RecordingPath}; +use std::net::{IpAddr, SocketAddr}; use serde::{Deserialize, Deserializer, Serialize}; -use bui_backend_types::AccessToken; +use bui_backend_session_types::AccessToken; use withkey::WithKey; pub const DEFAULT_MODEL_SERVER_ADDR: &str = "0.0.0.0:8397"; @@ -71,18 +72,23 @@ pub const REPROJECTION_DIST_HLOG_FNAME: &str = "reprojection_distance_100x_pixel // this approach is common with a scale factor of 10. // -------------------------------------------------------------------- +// Changes to this struct should update BraidMetadataSchemaTag. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct CamInfoRow { - // changes to this should update BraidMetadataSchemaTag + /// The index of the camera. This changes from invocation to invocation of Braid. pub camn: CamNum, + /// The name of the camera. This is stable across invocations of Braid. + /// + /// Any valid UTF-8 string is possible. (Previously, this was the "ROS name" + /// of the camera in which, e.g. '-' was replaced with '_'. This is no + /// longer the case.) pub cam_id: String, - // pub hostname: String, } +// Changes to this struct should update BraidMetadataSchemaTag. #[allow(non_snake_case)] #[derive(Debug, Serialize, Deserialize, Clone)] pub struct KalmanEstimatesRow { - // changes to this struct should update BraidMetadataSchemaTag pub obj_id: u32, pub frame: SyncFno, /// The timestamp when the trigger pulse fired. @@ -150,6 +156,7 @@ impl RawCamName { pub fn to_ros(&self) -> RosCamName { let ros_name: String = self.0.replace('-', "_"); let ros_name: String = ros_name.replace(' ', "_"); + let ros_name: String = ros_name.replace('/', "_"); RosCamName::new(ros_name) } pub fn as_str(&self) -> &str { @@ -162,9 +169,21 @@ impl RawCamName { pub struct RosCamName(String); impl RosCamName { + /// Create new `RosCamName` assuming input `s` is already valid ROS name. pub fn new(s: String) -> Self { RosCamName(s) } + /// Create new `RosCamName` if `s` is valid ROS name. + pub fn new_checked(s: String) -> Option { + let raw = RawCamName::new(s); + let ros_name = raw.to_ros(); + if ros_name.0 == raw.0 { + // No replacement was used, therefore `s` is ROS already. + Some(ros_name) + } else { + None + } + } pub fn as_str(&self) -> &str { &self.0 } @@ -176,15 +195,29 @@ impl std::fmt::Display for RosCamName { } } -pub const REMOTE_CAMERA_INFO_PATH: &str = "remote_camera_info/"; +pub mod braid_http { + // URL paths on Braid HTTP server. + pub const REMOTE_CAMERA_INFO_PATH: &str = "remote-camera-info"; + pub const CAM_PROXY_PATH: &str = "cam-proxy"; -#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] + /// Encode camera name, potentially with slashes or spaces, to be a single + /// URL path component. + /// + /// Use percent-encoding, which `axum::extract::Path` automatically decodes. + pub fn encode_cam_name(cam_name: &crate::RawCamName) -> String { + percent_encoding::utf8_percent_encode(&cam_name.0, percent_encoding::NON_ALPHANUMERIC) + .to_string() + } +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Default)] pub enum StartSoftwareFrameRateLimit { /// Set the frame_rate limit at a given frame rate. Enable(f64), /// Disable the frame_rate limit. Disabled, /// Do not change the frame rate limit. + #[default] NoChange, } @@ -206,6 +239,9 @@ fn return_false() -> bool { #[serde(deny_unknown_fields)] pub struct BraidCameraConfig { /// The name of the camera (e.g. "Basler-22005677") + /// + /// (This is the original UTF-8 camera name, not the ROS-encoded camera name + /// in which certain characters are not allowed.) pub name: String, /// Filename of vendor-specific camera settings file. pub camera_settings_filename: Option, @@ -275,11 +311,11 @@ pub struct PerCamSaveData { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct RegisterNewCamera { /// The name of the camera as returned by the camera - pub orig_cam_name: RawCamName, + pub raw_cam_name: RawCamName, /// The name of the camera used in ROS (e.g. with '-' converted to '_'). pub ros_cam_name: RosCamName, /// Location of the camera control HTTP server. - pub http_camserver_info: Option, + pub http_camserver_info: Option, /// The camera settings. pub cam_settings_data: Option, /// The current image. @@ -347,7 +383,7 @@ impl ConnectedCameraSyncState { } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct HttpApiShared { +pub struct BraidHttpApiSharedState { pub fake_sync: bool, pub clock_model_copy: Option, pub csv_tables_dirname: Option, @@ -357,7 +393,7 @@ pub struct HttpApiShared { pub post_trigger_buffer_size: usize, pub calibration_filename: Option, pub connected_cameras: Vec, // TODO: make this a BTreeMap? - pub model_server_addr: Option, + pub model_server_addr: Option, pub flydra_app_name: String, pub all_expected_cameras_are_synced: bool, } @@ -369,47 +405,72 @@ pub struct RecentStats { pub points_detected: usize, } +/// Generic HTTP API server information +/// +/// This is used for both the Strand Camera BUI and the Braid BUI. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub enum StrandCamHttpServerInfo { +pub enum BuiServerInfo { /// No server is present (e.g. prerecorded data). NoServer, /// A server is available. - Server(StrandCamBuiServerInfo), + Server(BuiServerAddrInfo), } +/// HTTP API server access information +/// +/// This contains the address and access token. +/// +/// This is used for both the Strand Camera BUI and the Braid BUI. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] -pub struct StrandCamBuiServerInfo { - /// The address of the camera control HTTP server. - addr: std::net::SocketAddr, - /// The token for initial connection to the camera control HTTP server. +pub struct BuiServerAddrInfo { + /// The address of the HTTP server. + addr: SocketAddr, + /// The token for initial connection to the HTTP server. token: AccessToken, - resolved_addr: String, } -impl StrandCamBuiServerInfo { - #[cfg(feature = "with-dns")] - pub fn new(addr: std::net::SocketAddr, token: AccessToken) -> Self { - let resolved_addr = if addr.ip().is_unspecified() { - format!("{}:{}", dns_lookup::get_hostname().unwrap(), addr.port()) - } else { - format!("{}", addr) +impl BuiServerAddrInfo { + pub fn new(addr: SocketAddr, token: AccessToken) -> Self { + Self { addr, token } + } + + pub fn addr(&self) -> &SocketAddr { + &self.addr + } + + pub fn token(&self) -> &AccessToken { + &self.token + } + + #[cfg(feature = "build-urls")] + pub fn build_urls(&self) -> std::io::Result> { + let query = match &self.token { + AccessToken::NoToken => "".to_string(), + AccessToken::PreSharedToken(tok) => format!("token={tok}"), }; - Self { - addr, - token, - resolved_addr, - } + + Ok(expand_unspecified_ip(self.addr.ip())? + .into_iter() + .map(|ip| { + http::uri::Builder::new() + .scheme("http") + .authority(format!("{ip}:{}", self.addr.port())) + .path_and_query(format!("/?{query}")) + .build() + .unwrap() + }) + .collect()) } - #[cfg(feature = "with-dns")] pub fn parse_url_with_token(url: &str) -> Result { + // TODO: replace this ugly implementation... let stripped = url .strip_prefix("http://") .ok_or(FlydraTypesError::UrlParseError)?; let first_slash = stripped.find('/'); let (addr_str, token) = if let Some(slash_idx) = first_slash { let path = &stripped[slash_idx..]; - if path.len() == 1 { + if path == "/" || path == "/?" { (&stripped[..slash_idx], AccessToken::NoToken) } else { let token_str = path[1..] @@ -429,42 +490,81 @@ impl StrandCamBuiServerInfo { Ok(Self::new(addr, token)) } - pub fn guess_base_url_with_token(&self) -> String { - match self.token { - AccessToken::NoToken => format!("http://{}/", self.resolved_addr), - AccessToken::PreSharedToken(ref tok) => { - format!("http://{}/?token={}", self.resolved_addr, tok) - } - } - } - pub fn base_url(&self) -> String { format!("http://{}", self.addr) } +} - pub fn token(&self) -> &AccessToken { - &self.token +pub fn is_loopback(url: &http::Uri) -> bool { + let authority = match url.authority() { + None => return false, + Some(authority) => authority, + }; + match authority.host() { + "127.0.0.1" | "[::1]" => true, + // should we include "localhost"? only if it actually resolves? + _ => false, } } -#[cfg(feature = "with-dns")] -#[test] -fn test_bui_server_info() { - for addr_str in &[ - "127.0.0.1:1234", - // Ideally, we would also test unspecified addresses here. - // "0.0.0.0:222" - ] { - let addr1 = std::net::ToSocketAddrs::to_socket_addrs(addr_str) - .unwrap() - .next() - .unwrap(); - let bsi1 = StrandCamBuiServerInfo::new(addr1, AccessToken::PreSharedToken("token1".into())); +// ----- + +#[cfg(feature = "start-listener")] +pub async fn start_listener( + address_string: &str, +) -> anyhow::Result<(tokio::net::TcpListener, BuiServerAddrInfo)> { + let socket_addr = std::net::ToSocketAddrs::to_socket_addrs(&address_string)? + .next() + .ok_or_else(|| anyhow::anyhow!("no address found for HTTP server"))?; + + let listener = tokio::net::TcpListener::bind(socket_addr).await?; + let listener_local_addr = listener.local_addr()?; + let token_config = if !listener_local_addr.ip().is_loopback() { + Some(axum_token_auth::TokenConfig::new_token("token")) + } else { + None + }; + let token = match token_config { + None => bui_backend_session_types::AccessToken::NoToken, + Some(cfg) => bui_backend_session_types::AccessToken::PreSharedToken(cfg.value.clone()), + }; + let http_camserver_info = BuiServerAddrInfo::new(listener_local_addr, token); + + Ok((listener, http_camserver_info)) +} + +// ----- + +#[cfg(feature = "build-urls")] +fn expand_unspecified_ip(ip: IpAddr) -> std::io::Result> { + if ip.is_unspecified() { + // Get all interfaces if IP is unspecified. + Ok(if_addrs::get_if_addrs()? + .iter() + .filter_map(|x| { + let this_ip = x.addr.ip(); + // Take only IP addresses from correct family. + if ip.is_ipv4() == this_ip.is_ipv4() { + Some(this_ip) + } else { + None + } + }) + .collect()) + } else { + Ok(vec![ip]) + } +} - let url1 = bsi1.guess_base_url_with_token(); - let test1 = StrandCamBuiServerInfo::parse_url_with_token(&url1).unwrap(); - let url2 = test1.guess_base_url_with_token(); - assert_eq!(url1, url2); +#[cfg(feature = "build-urls")] +pub fn expand_unspecified_addr(addr: SocketAddr) -> std::io::Result> { + if addr.ip().is_unspecified() { + Ok(expand_unspecified_ip(addr.ip())? + .into_iter() + .map(|ip| SocketAddr::new(ip, addr.port())) + .collect()) + } else { + Ok(vec![addr]) } } @@ -785,16 +885,19 @@ pub fn make_hypothesis_test_full3d_default() -> HypothesisTestParams { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct CamInfo { - pub name: RosCamName, + pub name: RawCamName, pub state: ConnectedCameraSyncState, - pub http_camserver_info: StrandCamHttpServerInfo, + pub strand_cam_http_server_info: BuiServerInfo, pub recent_stats: RecentStats, } -/// Messages to the mainbrain +/// Messages to Braid #[derive(Clone, Debug, Serialize, Deserialize)] -pub enum HttpApiCallback { +pub enum BraidHttpApiCallback { /// Called from strand-cam to register a camera + /// + /// Note this is different than the `cam_info_handler` which only queries + /// for the appropriate camera configuration. NewCamera(RegisterNewCamera), /// Called from strand-cam to update the current image UpdateCurrentImage(PerCam), @@ -819,8 +922,7 @@ pub enum HttpApiCallback { #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct PerCam { - /// The name of the camera used in ROS (e.g. with '-' converted to '_'). - pub ros_cam_name: RosCamName, + pub raw_cam_name: RawCamName, pub inner: T, } @@ -829,7 +931,7 @@ pub struct FlydraRawUdpPacket { /// The name of the camera /// /// Traditionally this was the ROS camera name (e.g. with '-' converted to - /// '_'), but we should transition to allowing any valid UTF-8 string. + /// '_'), but have transitioned to allowing any valid UTF-8 string. pub cam_name: String, /// frame timestamp of trigger pulse start (or None if cannot be determined) #[serde(with = "crate::timestamp_opt_f64")] @@ -891,24 +993,24 @@ pub enum FlydraTypesError { UrlParseError, } -#[derive(Deserialize, Serialize, Debug)] +#[derive(Deserialize, Serialize, Debug, Clone)] pub struct AddrInfoUnixDomainSocket { pub filename: String, } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct AddrInfoIP { - inner: std::net::SocketAddr, + inner: SocketAddr, } impl AddrInfoIP { - pub fn from_socket_addr(src: &std::net::SocketAddr) -> Self { + pub fn from_socket_addr(src: &SocketAddr) -> Self { Self { inner: *src } } - pub fn to_socket_addr(&self) -> std::net::SocketAddr { + pub fn to_socket_addr(&self) -> SocketAddr { self.inner } - pub fn ip(&self) -> std::net::IpAddr { + pub fn ip(&self) -> IpAddr { self.inner.ip() } pub fn port(&self) -> u16 { @@ -916,7 +1018,7 @@ impl AddrInfoIP { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum RealtimePointsDestAddr { UnixDomainSocket(AddrInfoUnixDomainSocket), IpAddr(AddrInfoIP), @@ -932,7 +1034,7 @@ impl RealtimePointsDestAddr { } #[derive(Debug, Clone)] -pub struct MainbrainBuiLocation(pub StrandCamBuiServerInfo); +pub struct MainbrainBuiLocation(pub BuiServerAddrInfo); #[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct TriggerClockInfoRow { @@ -946,12 +1048,6 @@ pub struct TriggerClockInfoRow { pub stop_timestamp: FlydraFloatTimestampLocal, } -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct StaticMainbrainInfo { - pub name: String, - pub version: String, -} - bitflags! { #[derive(Serialize, Deserialize)] pub struct ImageProcessingSteps: u8 { diff --git a/flydra2/Cargo.toml b/flydra2/Cargo.toml index 290945dbb..6b5d9b82e 100644 --- a/flydra2/Cargo.toml +++ b/flydra2/Cargo.toml @@ -9,8 +9,7 @@ rust-version = "1.59" path = "src/flydra2.rs" [build-dependencies] -walkdir = { version = "2.2.5", optional = true } -includedir_codegen = { version = "0.5", optional = true } +build-util = { path = "../build-util" } [dependencies] thiserror = "1.0.33" @@ -34,19 +33,18 @@ parry3d-f64 = "0.13.5" alga = "0.9" configure = "0.1.1" itertools = "0.8" +axum = "0.7.4" http = "1.0" -hyper = { version = "1.1", features = ["server", "http1"] } +# hyper = { version = "1.1", features = ["server", "http1"] } tokio = { version = "1.0.1", default-features = false, features = [ "macros", + "net", "rt", "rt-multi-thread", "sync", "time", ] } tokio-stream = { version = "0.1.8" } -stream-cancel = "0.8" -includedir = { version = "0.5", optional = true } -phf = { version = "0.7.23", optional = true } libflate = "0.1" zip = { version = "0.6.3", default-features = false, features = ["time"] } machine-vision-formats = "0.1" @@ -60,6 +58,10 @@ adskalman = "0.15" pretty-print-nalgebra = "0.1.0" nalgebra-mvn = "0.14" iana-time-zone = "0.1" +tower-http = { version = "0.5.0", features = ["fs"], optional = true } +tower-serve-static = { version = "0.1", optional = true } +include_dir = { version = "0.7.3", optional = true } +const_format = "0.2.32" braidz-types = { path = "../braidz-types" } braidz-writer = { path = "../braid/braidz-writer" } @@ -75,6 +77,7 @@ withkey = { path = "../withkey" } simple-frame = { path = "../simple-frame" } convert-image = { path = "../convert-image" } strand-cam-csv-config-types = { path = "../strand-cam-csv-config-types" } +event-stream-types = { path = "../event-stream-types" } [dev-dependencies] tempfile = "3.4.0" @@ -85,8 +88,8 @@ download-verify = { path = "../download-verify" } default = ["bundle_files"] # must pick one of the following two: -bundle_files = ["walkdir", "includedir_codegen", "includedir", "phf"] -serve_files = [] +bundle_files = ["tower-serve-static", "include_dir"] +serve_files = ["tower-http"] braid = [] diff --git a/flydra2/build.rs b/flydra2/build.rs index 9d1ad9ee9..68b87f456 100644 --- a/flydra2/build.rs +++ b/flydra2/build.rs @@ -1,84 +1,7 @@ -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -use std::error::Error; -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -use std::path::Path; - -use std::process::Command; - -fn git_hash() { - let output = Command::new("git") - .args(["rev-parse", "HEAD"]) - .output() - .expect("git"); - let git_hash = String::from_utf8(output.stdout).expect("from_utf8"); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); -} - -/// Do codegen to write a file (`codegen_fname`) which includes -/// the contents of all entries in `files_dir`. -#[cfg(feature = "bundle_files")] -fn create_codegen_file(files_dir: P, codegen_fname: Q) -> Result<(), std::io::Error> -where - P: AsRef, - Q: AsRef, -{ - // Collect list of files to include - let entries = walkdir::WalkDir::new(files_dir.as_ref()) - .into_iter() - .map(|entry| entry.expect("DirEntry error").path().into()) - .collect::>(); - - // Make sure we recompile if these files change - println!("cargo:rerun-if-changed={}", files_dir.as_ref().display()); - for entry in entries.iter() { - println!("cargo:rerun-if-changed={}", entry.display()); - } - - // Check that at least one of the needed files is there. - let required: std::path::PathBuf = files_dir.as_ref().join("index.html"); - if !entries.contains(&required) { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!("no {:?} file (hint: run make in elm_frontend)", required), - )); - } - - let codegen_fname_str = format!("{}", codegen_fname.as_ref().display()); - // Write the contents of the files. - includedir_codegen::start("PUBLIC") - .dir(files_dir, includedir_codegen::Compression::Gzip) - .build(&codegen_fname_str)?; - Ok(()) -} - -/// Create an empty file (`codegen_fname`). -#[cfg(feature = "serve_files")] -fn create_codegen_file(_: P, codegen_fname: Q) -> Result<(), Box> -where - P: AsRef, - Q: AsRef, -{ - let out_dir = std::env::var("OUT_DIR")?; - let dest_path = std::path::Path::new(&out_dir).join(codegen_fname); - std::fs::File::create(dest_path)?; - Ok(()) -} - -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -pub fn codegen(files_dir: P, generated_path: Q) -> Result<(), Box> -where - P: AsRef, - Q: AsRef, -{ - create_codegen_file(&files_dir, &generated_path)?; - Ok(()) -} - #[cfg(not(any(feature = "bundle_files", feature = "serve_files")))] compile_error!("Need cargo feature \"bundle_files\" or \"serve_files\""); -fn main() { - #[cfg(any(feature = "bundle_files", feature = "serve_files"))] - codegen("static", "public.rs").expect("codegen failed"); - git_hash(); +fn main() -> Result<(), Box<(dyn std::error::Error)>> { + build_util::git_hash(env!("CARGO_PKG_VERSION"))?; + Ok(()) } diff --git a/flydra2/src/bin/send_pose.rs b/flydra2/src/bin/send_pose.rs index 800e204d6..a8f45efb6 100644 --- a/flydra2/src/bin/send_pose.rs +++ b/flydra2/src/bin/send_pose.rs @@ -1,34 +1,21 @@ use chrono::Local; -use std::{sync::Arc, time::Instant}; -use tracing::info; +use std::time::Instant; use flydra2::{new_model_server, Result, SendType, TimeDataPassthrough}; use flydra_types::{FlydraFloatTimestampLocal, KalmanEstimatesRow, SyncFno, Triggerbox}; -fn main() -> Result<()> { +#[tokio::main] +async fn main() -> Result<()> { let _tracing_guard = env_tracing_logger::init(); - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build()?; - - let runtime = Arc::new(runtime); - runtime.block_on(inner(runtime.handle().clone())) -} - -async fn inner(rt_handle: tokio::runtime::Handle) -> Result<()> { - let addr = flydra_types::DEFAULT_MODEL_SERVER_ADDR.parse().unwrap(); - info!("send_pose server at {}", addr); - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; - - let (_quit_trigger, valve) = stream_cancel::Valve::new(); + let addr: std::net::SocketAddr = flydra_types::DEFAULT_MODEL_SERVER_ADDR.parse().unwrap(); + println!("send_pose server at {}", &addr); let (data_tx, data_rx) = tokio::sync::mpsc::channel(50); - new_model_server(data_rx, valve, &addr, info, rt_handle).await?; + let model_server_future = new_model_server(data_rx, addr); + + tokio::spawn(async { model_server_future.await }); let starti = Instant::now(); diff --git a/flydra2/src/bundled_data.rs b/flydra2/src/bundled_data.rs index 434c4c6be..c0f215307 100644 --- a/flydra2/src/bundled_data.rs +++ b/flydra2/src/bundled_data.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use flydra_types::{MiniArenaConfig, RosCamName}; +use flydra_types::{MiniArenaConfig, RawCamName}; use nalgebra::Point2; use crate::connected_camera_manager::CameraList; @@ -39,7 +39,7 @@ pub(crate) struct MiniArenaPointPerCam { #[derive(Debug, Default)] pub(crate) struct PerMiniArenaAllCamsOneFrameUndistorted { - pub(crate) per_cam: BTreeMap>, + pub(crate) per_cam: BTreeMap>, } // impl PerMiniArenaAllCamsOneFrameUndistorted { @@ -176,9 +176,9 @@ impl BundledAllCamsOneFrameDistorted { let is_new = self.cameras.inner.insert(fdp.frame_data.cam_num.0); assert!( is_new, - "Received data twice: camera={}, orig frame={}. \ + "Received data twice: camera=\"{}\", orig frame={}. \ new frame={}", - fdp.frame_data.cam_name, + fdp.frame_data.cam_name.as_str(), self.frame().0, fdp.frame_data.synced_frame.0 ); @@ -192,7 +192,7 @@ impl BundledAllCamsOneFrameDistorted { &self.cameras } - #[tracing::instrument] + #[tracing::instrument(level = "debug", skip_all)] pub(crate) fn undistort_and_split_to_mini_arenas( self, recon: &flydra_mvg::FlydraMultiCameraSystem, diff --git a/flydra2/src/connected_camera_manager.rs b/flydra2/src/connected_camera_manager.rs index ff479f386..08fc3382a 100644 --- a/flydra2/src/connected_camera_manager.rs +++ b/flydra2/src/connected_camera_manager.rs @@ -6,8 +6,7 @@ use tracing::{debug, error, info}; use crate::{safe_u8, CamInfoRow, MyFloat}; use flydra_types::{ - CamInfo, CamNum, ConnectedCameraSyncState, RawCamName, RecentStats, RosCamName, - StrandCamHttpServerInfo, SyncFno, + BuiServerInfo, CamInfo, CamNum, ConnectedCameraSyncState, RawCamName, RecentStats, SyncFno, }; pub(crate) trait HasCameraList { @@ -39,10 +38,9 @@ impl HasCameraList for CameraList { #[derive(Debug)] pub struct ConnectedCameraInfo { cam_num: CamNum, - orig_cam_name: RawCamName, - ros_cam_name: RosCamName, + raw_cam_name: RawCamName, sync_state: ConnectedCameraSyncState, - http_camserver_info: StrandCamHttpServerInfo, + http_camserver_info: BuiServerInfo, frames_during_sync: u64, } @@ -50,20 +48,20 @@ impl ConnectedCameraInfo { fn copy_to_caminfo(&self) -> CamInfoRow { CamInfoRow { camn: self.cam_num, - cam_id: self.ros_cam_name.as_str().to_string(), + cam_id: self.raw_cam_name.as_str().to_string(), } } } #[derive(Debug)] struct ConnectedCamerasManagerInner { - all_expected_cameras: BTreeSet, + all_expected_cameras: BTreeSet, next_cam_num: CamNum, - ccis: BTreeMap, - not_yet_connected: BTreeMap, + ccis: BTreeMap, + not_yet_connected: BTreeMap, all_expected_cameras_are_present: bool, all_expected_cameras_are_synced: bool, - first_frame_arrived: BTreeSet, + first_frame_arrived: BTreeSet, } pub trait ConnectedCamCallback: Send { @@ -100,7 +98,7 @@ impl HasCameraList for ConnectedCamerasManager { impl ConnectedCamerasManager { pub fn new( recon: &Option>, - all_expected_cameras: BTreeSet, + all_expected_cameras: BTreeSet, signal_all_cams_present: Arc, signal_all_cams_synced: Arc, ) -> Self { @@ -109,9 +107,9 @@ impl ConnectedCamerasManager { // pre-reserve cam numbers for cameras in calibration let next_cam_num = if let Some(ref recon) = recon { for (base_num, cam_name) in recon.cam_names().enumerate() { - let ros_cam_name = RosCamName::new(cam_name.to_string()); + let raw_cam_name = RawCamName::new(cam_name.to_string()); let cam_num: CamNum = safe_u8(base_num).into(); - not_yet_connected.insert(ros_cam_name, cam_num); + not_yet_connected.insert(raw_cam_name, cam_num); } safe_u8(recon.len()) } else { @@ -147,9 +145,9 @@ impl ConnectedCamerasManager { for cam_name in recon.cam_names() { let cam_num = next_cam_num; next_cam_num = safe_u8(next_cam_num as usize + 1); - let ros_cam_name = RosCamName::new(cam_name.to_string()); + let raw_cam_name = RawCamName::new(cam_name.to_string()); let cam_num: CamNum = cam_num.into(); - not_yet_connected.insert(ros_cam_name, cam_num); + not_yet_connected.insert(raw_cam_name, cam_num); } } @@ -163,11 +161,8 @@ impl ConnectedCamerasManager { for cam_info in old_ccis.values() { // This calls self.notify_cam_changed_listeners(): - self.register_new_camera( - &cam_info.orig_cam_name, - &cam_info.http_camserver_info, - &cam_info.ros_cam_name, - ); + self.register_new_camera(&cam_info.raw_cam_name, &cam_info.http_camserver_info) + .unwrap(); } } @@ -176,7 +171,6 @@ impl ConnectedCamerasManager { &mut self, f: Box, ) -> Option> { - info!("setting listener for new cameras info"); let old = { let mut mutex_guard = self.on_cam_change_func.lock(); mutex_guard.replace(f) @@ -198,9 +192,9 @@ impl ConnectedCamerasManager { .ccis .values() .map(|cci| CamInfo { - name: cci.ros_cam_name.clone(), + name: cci.raw_cam_name.clone(), state: cci.sync_state.clone(), - http_camserver_info: cci.http_camserver_info.clone(), + strand_cam_http_server_info: cci.http_camserver_info.clone(), recent_stats: RecentStats::default(), }) .collect() @@ -214,17 +208,15 @@ impl ConnectedCamerasManager { /// See `new` and `register_new_camera` for the case when multiple cameras /// will be added. pub fn new_single_cam( - orig_cam_name: &RawCamName, - http_camserver_info: &StrandCamHttpServerInfo, + raw_cam_name: &RawCamName, + http_camserver_info: &BuiServerInfo, recon: &Option>, ) -> Self { - let ros_cam_name = orig_cam_name.to_ros(); - let signal_all_cams_present = Arc::new(AtomicBool::new(false)); let signal_all_cams_synced = Arc::new(AtomicBool::new(false)); let mut all_expected_cameras = BTreeSet::new(); - all_expected_cameras.insert(ros_cam_name.clone()); + all_expected_cameras.insert(raw_cam_name.clone()); let this = Self::new( recon, @@ -233,26 +225,26 @@ impl ConnectedCamerasManager { signal_all_cams_synced, ); { - let orig_cam_name = orig_cam_name.clone(); + let raw_cam_name = raw_cam_name.clone(); let mut inner = this.inner.write(); assert!( - !inner.ccis.contains_key(&ros_cam_name), + !inner.ccis.contains_key(&raw_cam_name), "camera connecting again?" ); - let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&ros_cam_name) + let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&raw_cam_name) { debug!( "registering camera {}, which is in existing calibration", - ros_cam_name.as_str() + raw_cam_name.as_str() ); pre_existing } else { debug!( "registering camera {}, which is not in existing calibration", - ros_cam_name.as_str() + raw_cam_name.as_str() ); // unknown (and thus un-calibrated) camera let cam_num = inner.next_cam_num; @@ -261,11 +253,10 @@ impl ConnectedCamerasManager { }; inner.ccis.insert( - ros_cam_name.clone(), + raw_cam_name.clone(), ConnectedCameraInfo { cam_num, - orig_cam_name, - ros_cam_name, + raw_cam_name, sync_state: ConnectedCameraSyncState::Unsynchronized, http_camserver_info: http_camserver_info.clone(), frames_during_sync: 0, @@ -275,8 +266,8 @@ impl ConnectedCamerasManager { this } - pub fn remove(&mut self, ros_cam_name: &RosCamName) { - self.inner.write().ccis.remove(ros_cam_name); + pub fn remove(&mut self, raw_cam_name: &RawCamName) { + self.inner.write().ccis.remove(raw_cam_name); self.notify_cam_changed_listeners(); } @@ -286,34 +277,30 @@ impl ConnectedCamerasManager { /// added. pub fn register_new_camera( &mut self, - orig_cam_name: &RawCamName, - http_camserver_info: &StrandCamHttpServerInfo, - ros_cam_name: &RosCamName, - ) { - let orig_cam_name = orig_cam_name.clone(); - let ros_cam_name = ros_cam_name.clone(); + raw_cam_name: &RawCamName, + http_camserver_info: &BuiServerInfo, + ) -> Result<(), &'static str> { + let raw_cam_name = raw_cam_name.clone(); let cam_num = { // This scope is for the write lock on self.inner. Keep it minimal. let mut inner = self.inner.write(); - assert!( - !inner.ccis.contains_key(&ros_cam_name), - "camera {} already connected", - ros_cam_name - ); + if inner.ccis.contains_key(&raw_cam_name) { + return Err("camera already connected"); + } - let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&ros_cam_name) + let cam_num = if let Some(pre_existing) = inner.not_yet_connected.remove(&raw_cam_name) { debug!( "registering camera {}, which is in existing calibration", - ros_cam_name.as_str() + raw_cam_name.as_str() ); pre_existing } else { if self.recon.is_some() { tracing::warn!( "Camera {} connected, but this is not in existing calibration.", - ros_cam_name.as_str() + raw_cam_name.as_str() ); } // unknown (and thus un-calibrated) camera @@ -323,11 +310,10 @@ impl ConnectedCamerasManager { }; inner.ccis.insert( - ros_cam_name.clone(), + raw_cam_name.clone(), ConnectedCameraInfo { cam_num: cam_num.clone(), - orig_cam_name: orig_cam_name.clone(), - ros_cam_name: ros_cam_name.clone(), + raw_cam_name: raw_cam_name.clone(), sync_state: ConnectedCameraSyncState::Unsynchronized, http_camserver_info: http_camserver_info.clone(), frames_during_sync: 0, @@ -336,13 +322,13 @@ impl ConnectedCamerasManager { cam_num }; info!( - "register_new_camera got original camera name \"{}\", \ - ROS camera name \"{}\", assigned camera number {}", - orig_cam_name.as_str(), - ros_cam_name.as_str(), + "register_new_camera got camera name \"{}\", \ + assigned camera number {}", + raw_cam_name.as_str(), cam_num ); self.notify_cam_changed_listeners(); + Ok(()) } /// Register that a new frame was received @@ -361,7 +347,7 @@ impl ConnectedCamerasManager { { assert!(packet.framenumber >= 0); - let ros_cam_name = RosCamName::new(packet.cam_name.clone()); + let raw_cam_name = RawCamName::new(packet.cam_name.clone()); let cam_frame = packet.framenumber as u64; let mut synced_frame = None; @@ -371,7 +357,7 @@ impl ConnectedCamerasManager { let mut do_check_if_all_cameras_synchronized = false; { let inner = self.inner.read(); - if let Some(cci) = inner.ccis.get(&ros_cam_name) { + if let Some(cci) = inner.ccis.get(&raw_cam_name) { // We know this camera already. use crate::ConnectedCameraSyncState::*; match cci.sync_state { @@ -412,7 +398,7 @@ impl ConnectedCamerasManager { // raises the issue slightly earlier. panic!( "Impossible frame number. cam_name: {}, cam_frame: {}, frame0: {}", - ros_cam_name.as_str(), + raw_cam_name.as_str(), cam_frame, frame0, ); @@ -434,7 +420,7 @@ impl ConnectedCamerasManager { let frames_during_sync = { // This scope is for the write lock on self.inner. Keep it minimal. let mut inner = self.inner.write(); - let frames_during_sync = match inner.ccis.get_mut(&ros_cam_name) { + let frames_during_sync = match inner.ccis.get_mut(&raw_cam_name) { Some(cci) => { cci.frames_during_sync += 1; cci.frames_during_sync @@ -448,9 +434,9 @@ impl ConnectedCamerasManager { if frames_during_sync > 10 { error!( - "Many frames during sync period. Camera {} not \ + "Many frames during sync period. Camera \"{}\" not \ being externally triggered?", - ros_cam_name.as_str() + raw_cam_name.as_str() ); } } @@ -460,7 +446,7 @@ impl ConnectedCamerasManager { { // This scope is for the write lock on self.inner. Keep it minimal. let mut inner = self.inner.write(); - match inner.ccis.get_mut(&ros_cam_name) { + match inner.ccis.get_mut(&raw_cam_name) { Some(cci) => { cci.sync_state = ConnectedCameraSyncState::Synchronized(frame0); } @@ -475,8 +461,8 @@ impl ConnectedCamerasManager { // Do notifications associated with synchronization. send_new_frame_offset(frame0); info!( - "cam {} synchronized with frame offset: {}", - ros_cam_name.as_str(), + "cam \"{}\" synchronized with frame offset: {}", + raw_cam_name.as_str(), frame0, ); do_check_if_all_cameras_synchronized = true; @@ -485,8 +471,11 @@ impl ConnectedCamerasManager { if do_check_if_all_cameras_present && !self.inner.read().all_expected_cameras_are_present { let mut inner = self.inner.write(); let i2: &mut ConnectedCamerasManagerInner = &mut inner; - if i2.first_frame_arrived.insert(ros_cam_name.clone()) { - info!("first frame from camera {} arrived.", ros_cam_name); + if i2.first_frame_arrived.insert(raw_cam_name.clone()) { + info!( + "first frame from camera \"{}\" arrived.", + raw_cam_name.as_str() + ); if i2.first_frame_arrived == i2.all_expected_cameras { inner.all_expected_cameras_are_present = true; self.signal_all_cams_present.store(true, Ordering::SeqCst); @@ -502,13 +491,13 @@ impl ConnectedCamerasManager { { let mut inner = self.inner.write(); let i2: &mut ConnectedCamerasManagerInner = &mut inner; - // if i2.first_frame_arrived.insert(ros_cam_name.clone()) { - // info!("first frame from camera {} arrived.", ros_cam_name); + // if i2.first_frame_arrived.insert(raw_cam_name.clone()) { + // info!("first frame from camera {} arrived.", raw_cam_name); let mut all_synced = true; - for ros_cam_name in i2.all_expected_cameras.iter() { + for raw_cam_name in i2.all_expected_cameras.iter() { let this_sync = i2 .ccis - .get(ros_cam_name) + .get(raw_cam_name) .map(|cci| cci.sync_state.is_synchronized()) .unwrap_or(false); if !this_sync { @@ -528,40 +517,37 @@ impl ConnectedCamerasManager { synced_frame.map(SyncFno) } - pub fn get_ros_cam_name(&self, cam_num: CamNum) -> Option { + pub fn get_raw_cam_name(&self, cam_num: CamNum) -> Option { for cci in self.inner.read().ccis.values() { if cci.cam_num == cam_num { - return Some(cci.ros_cam_name.clone()); + return Some(cci.raw_cam_name.clone()); } } None } - pub fn all_ros_cam_names(&self) -> Vec { + pub fn all_raw_cam_names(&self) -> Vec { self.inner .read() .ccis .values() - .map(|cci| cci.ros_cam_name.clone()) + .map(|cci| cci.raw_cam_name.clone()) .collect() } - pub fn http_camserver_info( - &self, - ros_cam_name: &RosCamName, - ) -> Option { + pub fn http_camserver_info(&self, raw_cam_name: &RawCamName) -> Option { self.inner .read() .ccis - .get(ros_cam_name) + .get(raw_cam_name) .map(|cci| cci.http_camserver_info.clone()) } - pub fn cam_num(&self, ros_cam_name: &RosCamName) -> Option { + pub fn cam_num(&self, raw_cam_name: &RawCamName) -> Option { let inner = self.inner.read(); - match inner.ccis.get(ros_cam_name) { + match inner.ccis.get(raw_cam_name) { Some(cci) => Some(cci.cam_num), - None => inner.not_yet_connected.get(ros_cam_name).copied(), + None => inner.not_yet_connected.get(raw_cam_name).copied(), } } diff --git a/flydra2/src/error.rs b/flydra2/src/error.rs index cd832a2e9..ae767474d 100644 --- a/flydra2/src/error.rs +++ b/flydra2/src/error.rs @@ -49,13 +49,13 @@ pub enum Error { #[cfg(feature = "backtrace")] backtrace: Backtrace, }, - #[error("{source}")] - HyperError { - #[from] - source: hyper::Error, - #[cfg(feature = "backtrace")] - backtrace: Backtrace, - }, + // #[error("{source}")] + // HyperError { + // #[from] + // source: hyper::Error, + // #[cfg(feature = "backtrace")] + // backtrace: Backtrace, + // }, #[error("{source}")] TomlSerError { #[from] diff --git a/flydra2/src/flydra2.rs b/flydra2/src/flydra2.rs index 912cebecd..e245c6e66 100644 --- a/flydra2/src/flydra2.rs +++ b/flydra2/src/flydra2.rs @@ -34,7 +34,7 @@ pub use braidz_types::BraidMetadata; use flydra_types::{ CamInfoRow, CamNum, ConnectedCameraSyncState, DataAssocRow, FlydraFloatTimestampLocal, - HostClock, KalmanEstimatesRow, RosCamName, SyncFno, TextlogRow, TrackingParams, + HostClock, KalmanEstimatesRow, RawCamName, SyncFno, TextlogRow, TrackingParams, TriggerClockInfoRow, Triggerbox, RECONSTRUCT_LATENCY_HLOG_FNAME, REPROJECTION_DIST_HLOG_FNAME, }; pub use flydra_types::{Data2dDistortedRow, Data2dDistortedRowF32}; @@ -59,7 +59,7 @@ mod tracking_core; mod mini_arenas; mod model_server; -pub use crate::model_server::{new_model_server, ModelServer, SendKalmanEstimatesRow, SendType}; +pub use crate::model_server::{new_model_server, SendKalmanEstimatesRow, SendType}; use crate::contiguous_stream::make_contiguous; use crate::frame_bundler::bundle_frames; @@ -307,7 +307,9 @@ fn to_world_point(vec6: &OVector) -> PointWorldFrame #[derive(Clone, Debug, PartialEq)] pub struct FrameData { /// camera name as kept by mvg::MultiCamSystem - pub cam_name: RosCamName, + /// + /// This can be any UTF-8 string. + pub cam_name: RawCamName, /// camera identification number pub cam_num: CamNum, /// framenumber after synchronization @@ -327,7 +329,7 @@ pub struct FrameData { impl FrameData { #[inline] pub fn new( - cam_name: RosCamName, + cam_name: RawCamName, cam_num: CamNum, synced_frame: SyncFno, trigger_timestamp: Option>, @@ -701,7 +703,7 @@ pub struct StartSavingCsvConfig { pub local: Option>, pub git_rev: String, pub fps: Option, - pub per_cam_data: BTreeMap, + pub per_cam_data: BTreeMap, pub print_stats: bool, pub save_performance_histograms: bool, } @@ -768,14 +770,12 @@ pub struct CoordProcessor { } impl CoordProcessor { - #[tracing::instrument] + #[tracing::instrument(level = "debug", skip_all)] pub fn new( cfg: CoordProcessorConfig, - handle: tokio::runtime::Handle, cam_manager: ConnectedCamerasManager, recon: Option>, metadata_builder: BraidMetadataBuilder, - valve: stream_cancel::Valve, ) -> Result { let CoordProcessorConfig { tracking_params, @@ -867,14 +867,14 @@ impl CoordProcessor { /// Upon completion, returns a [tokio::task::JoinHandle] from a spawned /// writing task. To ensure data is completely saved, this should be driven /// to completion before ending the process. - #[tracing::instrument] + #[tracing::instrument(level = "debug", skip_all)] pub async fn consume_stream( mut self, frame_data_rx: S, expected_framerate: Option, ) -> std::thread::JoinHandle> where - S: 'static + Send + futures::stream::Stream + std::fmt::Debug + Unpin, + S: 'static + Send + futures::stream::Stream, { let mut prev_frame = SyncFno(0); use futures::stream::StreamExt; @@ -1058,7 +1058,7 @@ impl CoordProcessor { #[derive(Debug, Clone)] pub(crate) struct CamAndDist { - pub(crate) ros_cam_name: RosCamName, + pub(crate) raw_cam_name: RawCamName, /// The reprojection distance of the undistorted pixels. pub(crate) reproj_dist: MyFloat, } diff --git a/flydra2/src/frame_bundler.rs b/flydra2/src/frame_bundler.rs index 25d168b86..a3f1018d0 100644 --- a/flydra2/src/frame_bundler.rs +++ b/flydra2/src/frame_bundler.rs @@ -149,9 +149,9 @@ fn test_frame_bundler() { use crate::{FlydraFloatTimestampLocal, FrameData, SyncFno}; - let cam_name_1 = crate::RosCamName::new("cam1".into()); + let cam_name_1 = crate::RawCamName::new("cam1".into()); let cam_num_1 = crate::CamNum(1); - let cam_name_2 = crate::RosCamName::new("cam2".into()); + let cam_name_2 = crate::RawCamName::new("cam2".into()); let cam_num_2 = crate::CamNum(2); let trigger_timestamp = None; diff --git a/flydra2/src/model_server.rs b/flydra2/src/model_server.rs index 830706290..701b561a4 100644 --- a/flydra2/src/model_server.rs +++ b/flydra2/src/model_server.rs @@ -1,231 +1,77 @@ -use tracing::{debug, error, info}; +use tracing::{debug, info}; -use std::{future::Future, pin::Pin}; +use std::sync::{Arc, RwLock}; -use futures::sink::SinkExt; +use http_body::Frame; use serde::{Deserialize, Serialize}; -use futures::stream::StreamExt; -use http_body_util::BodyExt; -use hyper::header::ACCEPT; -use hyper::{Method, Response, StatusCode}; +use event_stream_types::{AcceptsEventStream, EventBroadcaster}; use crate::{Result, TimeDataPassthrough}; -use flydra_types::{FlydraFloatTimestampLocal, StaticMainbrainInfo, SyncFno, Triggerbox}; +use flydra_types::{FlydraFloatTimestampLocal, SyncFno, Triggerbox}; -#[cfg(any(feature = "bundle_files", feature = "serve_files"))] -include!(concat!(env!("OUT_DIR"), "/public.rs")); // Despite slash, this does work on Windows. +const EVENTS_PATH: &str = "/events"; -pub type EventChunkSender = tokio::sync::mpsc::Sender; +#[cfg(feature = "bundle_files")] +static ASSETS_DIR: include_dir::Dir<'static> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/static"); -#[derive(Debug)] -pub struct NewEventStreamConnection { - /// A sink for messages send to each connection (one per client tab). - pub chunk_sender: EventChunkSender, -} - -#[derive(Clone)] -struct ModelService { - events_path: String, - config_serve_filepath: String, - config_channel_size: usize, - tx_new_connection: futures::channel::mpsc::Sender, - info: StaticMainbrainInfo, - valve: stream_cancel::Valve, - rt_handle: tokio::runtime::Handle, -} - -impl ModelService { - fn new( - valve: stream_cancel::Valve, - tx_new_connection: futures::channel::mpsc::Sender, - info: StaticMainbrainInfo, - rt_handle: tokio::runtime::Handle, - ) -> Self { - Self { - valve, - events_path: "/events".to_string(), - config_serve_filepath: "static".to_string(), - config_channel_size: 100, - tx_new_connection, - info, - rt_handle, +async fn events_handler( + axum::extract::State(app_state): axum::extract::State, + _: AcceptsEventStream, +) -> impl axum::response::IntoResponse { + let key = { + let mut next_connection_id = app_state.next_connection_id.write().unwrap(); + let key = *next_connection_id; + *next_connection_id += 1; + key + }; + let (tx, body) = app_state.event_broadcaster.new_connection(key); + + // If we have a calibration, extract it. + let cal_data = { + // scope for read lock on app_state.current_calibration + let current_calibration = app_state.current_calibration.read().unwrap(); + if let Some((cal_data, tdpt)) = &*current_calibration { + let data = ( + SendType::CalibrationFlydraXml(cal_data.clone()), + tdpt.clone(), + ); + Some(data) + } else { + None } - } - - #[allow(dead_code)] - fn fullpath(&self, path: &str) -> String { - assert!(path.starts_with('/')); // security check - let path = std::path::PathBuf::from(path) - .strip_prefix("/") - .unwrap() - .to_path_buf(); - assert!(!path.starts_with("..")); // security check - - let base = std::path::PathBuf::from(self.config_serve_filepath.clone()); - let result = base.join(path); - result.into_os_string().into_string().unwrap() - } - - #[cfg(not(any(feature = "bundle_files", feature = "serve_files")))] - fn get_file_content(&self, _file_path: &str) -> Option> { - None - } + }; - #[cfg(feature = "bundle_files")] - fn get_file_content(&self, file_path: &str) -> Option> { - let fullpath = self.fullpath(file_path); - let r = PUBLIC.get(&fullpath); - match r { - Ok(s) => Some(s.into_owned()), - Err(_) => None, - } + // If we extracted a calibration above, send it already now. + if let Some(cal_data) = cal_data { + let cal_body = get_body(&cal_data); + tx.send(Ok(Frame::data(cal_body.into()))).await.unwrap(); } - #[cfg(feature = "serve_files")] - fn get_file_content(&self, file_path: &str) -> Option> { - let fullpath = self.fullpath(file_path); - let contents = match std::fs::read(&fullpath) { - Ok(contents) => contents, - Err(e) => { - error!("requested path {:?}, but got error {:?}", file_path, e); - return None; - } - }; - Some(contents) - } + body } -type MyBody = http_body_util::combinators::BoxBody; - -fn body_from_buf(body_buf: &[u8]) -> MyBody { - let body = http_body_util::Full::new(bytes::Bytes::from(body_buf.to_vec())); - MyBody::new(body.map_err(|_: std::convert::Infallible| unreachable!())) +async fn info_handler() -> impl axum::response::IntoResponse { + tracing::trace!("info_handler"); + "info_handler" } -impl hyper::service::Service> for ModelService { - type Response = hyper::Response; - type Error = hyper::Error; - type Future = - Pin> + Send>>; - - fn call(&self, req: http::Request) -> Self::Future { - let resp = Response::builder(); - debug!("got request {:?}", req); - let resp_final = match (req.method(), req.uri().path()) { - (&Method::GET, path) => { - let path = if path == "/" { "/index.html" } else { path }; - - if path == "/info" { - let buf = serde_json::to_string_pretty(&self.info).unwrap(); - let len = buf.len(); - let body = body_from_buf(buf.as_bytes()); - resp.header(hyper::header::CONTENT_LENGTH, format!("{}", len).as_bytes()) - .header( - hyper::header::CONTENT_TYPE, - hyper::header::HeaderValue::from_str("application/json") - .expect("from_str"), - ) - .body(body) - .expect("response") // todo map err - } else if path == self.events_path { - let mut accepts_event_stream = false; - for value in req.headers().get_all(ACCEPT).iter() { - if value - .to_str() - .expect("to_str()") - .contains("text/event-stream") - { - accepts_event_stream = true; - } - } - - if accepts_event_stream { - let (tx_event_stream, rx_event_stream) = - tokio::sync::mpsc::channel(self.config_channel_size); - let tx_event_stream: EventChunkSender = tx_event_stream; // type annotation only - - let rx_event_stream = self - .valve - .wrap(tokio_stream::wrappers::ReceiverStream::new(rx_event_stream)); - - let rx_event_stream = rx_event_stream - .map(|data: bytes::Bytes| Ok::<_, _>(http_body::Frame::data(data))); - - { - let conn_info = NewEventStreamConnection { - chunk_sender: tx_event_stream, - }; - - let mut tx_new_connection2 = self.tx_new_connection.clone(); - let fut = async move { - match tx_new_connection2.send(conn_info).await { - Ok(()) => {} - Err(e) => error!("sending new connection info failed: {}", e), - } - }; - - self.rt_handle.spawn(fut); - } +#[derive(Clone)] +struct ModelServerAppState { + current_calibration: Arc>>, + event_broadcaster: EventBroadcaster, + next_connection_id: Arc>, +} - let body1 = http_body_util::StreamBody::new(rx_event_stream); - let body2 = http_body_util::BodyExt::boxed(body1); - - resp.header( - hyper::header::CONTENT_TYPE, - hyper::header::HeaderValue::from_str("text/event-stream") - .expect("from_str"), - ) - .body(body2) - .expect("response") // todo map err - } else { - let msg = r#" - - - - Error - bad request - - -

Error - bad request

- Event request does not specify 'Accept' HTTP Header or does not accept - the required 'text/event-stream'. (View event stream live in browser - here.) - -"# - .to_string(); - resp.status(StatusCode::BAD_REQUEST) - .body(body_from_buf(msg.as_bytes())) - .expect("response") // todo map err - } - } else { - // TODO read file asynchronously - match self.get_file_content(path) { - Some(buf) => { - let len = buf.len(); - let body = body_from_buf(&buf); - resp.header( - hyper::header::CONTENT_LENGTH, - format!("{}", len).as_bytes(), - ) - .body(body) - .expect("response") // todo map err - } - None => { - resp.status(StatusCode::NOT_FOUND) - .body(body_from_buf(b"")) - .expect("response") // todo map err - } - } - } - } - _ => { - resp.status(StatusCode::NOT_FOUND) - .body(body_from_buf(b"")) - .expect("response") // todo map err - } - }; - Box::pin(futures::future::ok(resp_final)) +impl Default for ModelServerAppState { + fn default() -> Self { + Self { + current_calibration: Arc::new(RwLock::new(None)), + event_broadcaster: Default::default(), + next_connection_id: Arc::new(RwLock::new(0)), + } } } @@ -301,73 +147,37 @@ pub struct ToListener { trigger_timestamp: Option>, } -#[derive(Clone)] -pub struct ModelServer { - local_addr: std::net::SocketAddr, -} - pub async fn new_model_server( - data_rx: tokio::sync::mpsc::Receiver<(SendType, TimeDataPassthrough)>, - valve: stream_cancel::Valve, - addr: &std::net::SocketAddr, - info: StaticMainbrainInfo, - rt_handle: tokio::runtime::Handle, -) -> Result { + mut data_rx: tokio::sync::mpsc::Receiver<(SendType, TimeDataPassthrough)>, + addr: std::net::SocketAddr, +) -> Result<()> { { - let channel_size = 2; - let (tx_new_connection, rx_new_connection) = futures::channel::mpsc::channel(channel_size); - - let service = ModelService::new( - valve.clone(), - tx_new_connection, - info.clone(), - rt_handle.clone(), - ); - - let service2 = service.clone(); + let app_state = ModelServerAppState::default(); let listener = tokio::net::TcpListener::bind(addr).await?; let local_addr = listener.local_addr()?; - let handle2 = rt_handle.clone(); - rt_handle.spawn(async move { - loop { - let (socket, _remote_addr) = listener.accept().await.unwrap(); - let model_service = service2.clone(); - - // Spawn a task to handle the connection. That way we can multiple connections - // concurrently. - handle2.spawn(async move { - // Hyper has its own `AsyncRead` and `AsyncWrite` traits and doesn't use tokio. - // `TokioIo` converts between them. - let socket = hyper_util::rt::TokioIo::new(socket); - let model_server = model_service.clone(); - - let hyper_service = hyper::service::service_fn( - move |request: hyper::Request| { - // Do we need to call `poll_ready`???? - use hyper::service::Service; - model_server.call(request) - }, - ); - - // `server::conn::auto::Builder` supports both http1 and http2. - // - // `TokioExecutor` tells hyper to use `tokio::spawn` to spawn tasks. - if let Err(err) = hyper_util::server::conn::auto::Builder::new( - hyper_util::rt::TokioExecutor::new(), - ) - // `serve_connection_with_upgrades` is required for websockets. If you don't need - // that you can use `serve_connection` instead. - .serve_connection_with_upgrades(socket, hyper_service) - .await - { - eprintln!("failed to serve connection: {err:#}"); - } - }); - } - }); + #[cfg(feature = "bundle_files")] + let serve_dir = tower_serve_static::ServeDir::new(&ASSETS_DIR); + + #[cfg(feature = "serve_files")] + let serve_dir = tower_http::services::fs::ServeDir::new( + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("static"), + ); + + // Create axum router. + let router = axum::Router::new() + .route(EVENTS_PATH, axum::routing::get(events_handler)) + .route("/info", axum::routing::get(info_handler)) + .nest_service("/", serve_dir) + .with_state(app_state.clone()); + + // create future for our app + let http_serve_future = { + use std::future::IntoFuture; + axum::serve(listener, router).into_future() + }; info!( "ModelServer at http://{}:{}/", @@ -379,68 +189,46 @@ pub async fn new_model_server( "ModelServer events at http://{}:{}{}", local_addr.ip(), local_addr.port(), - service.events_path + EVENTS_PATH, ); - let result = ModelServer { local_addr }; - - let mut rx_new_connection_valved = valve.wrap(rx_new_connection); - let mut data_rx = tokio_stream::wrappers::ReceiverStream::new(data_rx); - - let main_task = async move { - let mut connections: Vec = vec![]; - let mut current_calibration: Option<(SendType, TimeDataPassthrough)> = None; + // Infinite loop to process and forward data. + let app_state2 = app_state.clone(); + let new_data_processor_future = async move { + let app_state = app_state2; + // Wait for the next update time to arrive ... loop { - tokio::select! { - opt_new_connection = rx_new_connection_valved.next() => { - match opt_new_connection { - Some(new_connection) => { - - if let Some(data) = ¤t_calibration { - let bytes = get_body(data)?; - new_connection.chunk_sender.send(bytes.clone()).await.unwrap(); - } - - connections.push(new_connection); - } - None => { - // All senders done. (So the server has quit and so should we.) - break; - } + let opt_new_data = data_rx.recv().await; + match &opt_new_data { + Some(data) => { + if let (SendType::CalibrationFlydraXml(calib), tdpt) = &data { + let mut current_calibration = + app_state.current_calibration.write().unwrap(); + *current_calibration = Some((calib.clone(), tdpt.clone())); } + send_msg(data, &app_state).await?; } - opt_new_data = data_rx.next() => { - match &opt_new_data { - Some(data) => { - if let (SendType::CalibrationFlydraXml(_),_) = &data { - current_calibration = Some(data.clone()); - } - send_msg(data, &mut connections).await?; - } - None => { - // All senders done. No new data will be coming, so quit. - break; - } - } - - + None => { + // All senders done. No new data will be coming, so quit. + break; } } } Ok::<_, crate::Error>(()) }; - rt_handle.spawn(main_task); - Ok(result) - } -} -impl ModelServer { - pub fn local_addr(&self) -> &std::net::SocketAddr { - &self.local_addr + // Wait for one of our futures to finish... + tokio::select! { + result = new_data_processor_future => {result?} + _ = http_serve_future => {} + } + // ...then exit. + + Ok(()) } } -fn get_body(data: &(SendType, TimeDataPassthrough)) -> Result { +fn get_body(data: &(SendType, TimeDataPassthrough)) -> String { let (msg, tdpt) = data; let latency: f64 = if let Some(ref tt) = tdpt.trigger_timestamp() { let now_f64 = datetime_conversion::datetime_to_f64(&chrono::Local::now()); @@ -460,36 +248,17 @@ fn get_body(data: &(SendType, TimeDataPassthrough)) -> Result, + app_state: &ModelServerAppState, ) -> Result<()> { - let bytes = get_body(data)?; - - // Send to all listening connections. - let keep: Vec = futures::future::join_all( - connections - .iter_mut() - .map(|conn| async { conn.chunk_sender.send(bytes.clone()).await.is_ok() }), - ) - .await; - - assert_eq!(keep.len(), connections.len()); - - // Remove connections which resulted in error. - let mut index = 0; - connections.retain(|_| { - index += 1; - keep[index - 1] - }); - + let buf = get_body(data); + app_state.event_broadcaster.broadcast_frame(buf).await; Ok(()) } diff --git a/flydra2/src/new_object_test_2d.rs b/flydra2/src/new_object_test_2d.rs index 28a44c6b2..f79e2e526 100644 --- a/flydra2/src/new_object_test_2d.rs +++ b/flydra2/src/new_object_test_2d.rs @@ -1,7 +1,7 @@ use std::{collections::BTreeMap, sync::Arc}; use crate::{tracking_core::HypothesisTest, CamAndDist, HypothesisTestResult}; -use flydra_types::{MyFloat, RosCamName, TrackingParams}; +use flydra_types::{MyFloat, RawCamName, TrackingParams}; #[derive(Clone)] pub(crate) struct NewObjectTestFlat3D { @@ -22,7 +22,7 @@ impl NewObjectTestFlat3D { impl HypothesisTest for NewObjectTestFlat3D { fn hypothesis_test( &self, - good_points: &BTreeMap>, + good_points: &BTreeMap>, ) -> Option { let recon_ref = &self.recon; assert!(good_points.len() < 2, "cannot have >1 camera"); @@ -30,7 +30,7 @@ impl HypothesisTest for NewObjectTestFlat3D { let cam = recon_ref.cam_by_name(cam_name.as_str()).unwrap(); if let Some(surface_pt) = crate::flat_2d::distorted_2d_to_flat_3d(&cam, xy) { let cams_and_reproj_dist = vec![CamAndDist { - ros_cam_name: cam_name.clone(), + raw_cam_name: cam_name.clone(), reproj_dist: 0.0, }]; return Some(HypothesisTestResult { diff --git a/flydra2/src/new_object_test_3d.rs b/flydra2/src/new_object_test_3d.rs index a89ae0687..e49053f09 100644 --- a/flydra2/src/new_object_test_3d.rs +++ b/flydra2/src/new_object_test_3d.rs @@ -1,7 +1,7 @@ use std::{collections::BTreeMap, sync::Arc}; use tracing::error; -use flydra_types::{RosCamName, TrackingParams}; +use flydra_types::{RawCamName, TrackingParams}; use mvg::PointWorldFrameWithSumReprojError; @@ -12,8 +12,8 @@ use crate::{ const HTEST_MAX_N_CAMS: u8 = 3; -type CamComboKey = RosCamName; -type CamComboList = Vec>; +type CamComboKey = RawCamName; +type CamComboList = Vec>; #[derive(Clone)] pub(crate) struct NewObjectTestFull3D { @@ -33,7 +33,7 @@ impl NewObjectTestFull3D { { let mut useful_cams = BTreeMap::new(); for raw_cam_name in recon.cam_names() { - let name = RosCamName::new(raw_cam_name.to_string()); + let name = RawCamName::new(raw_cam_name.to_string()); let k: CamComboKey = name; useful_cams.insert(k, ()); } @@ -76,7 +76,7 @@ impl HypothesisTest for NewObjectTestFull3D { /// framenumber and timestamp. fn hypothesis_test( &self, - good_points: &BTreeMap>, + good_points: &BTreeMap>, ) -> Option { // TODO: convert this to use undistorted points and then remove // orig_distorted, also from the structure it is in. @@ -188,7 +188,7 @@ impl HypothesisTest for NewObjectTestFull3D { .iter() .zip(bssf.reproj_dists.iter()) .map(|(ros_cam_name, reproj_dist)| CamAndDist { - ros_cam_name: ros_cam_name.clone(), + raw_cam_name: ros_cam_name.clone(), reproj_dist: *reproj_dist, }) .collect(); diff --git a/flydra2/src/tracking_core.rs b/flydra2/src/tracking_core.rs index 49281d1e9..a7b1b70f3 100644 --- a/flydra2/src/tracking_core.rs +++ b/flydra2/src/tracking_core.rs @@ -18,7 +18,7 @@ use adskalman::{StateAndCovariance, TransitionModelLinearNoControl}; use flydra_types::{ CamNum, DataAssocRow, FlydraFloatTimestampLocal, FlydraRawUdpPoint, KalmanEstimatesRow, - RosCamName, SyncFno, TrackingParams, Triggerbox, + RawCamName, SyncFno, TrackingParams, Triggerbox, }; use crate::bundled_data::{MiniArenaPointPerCam, PerMiniArenaAllCamsOneFrameUndistorted}; @@ -229,9 +229,9 @@ impl LivingModel { let likes: Vec = if let Some(expected_observation) = eo { trace!( - "object {} {} expects ({},{})", + "object {} \"{}\" expects ({},{})", self.lmi.obj_id, - cam_name, + cam_name.as_str(), expected_observation.mean()[0], expected_observation.mean()[1] ); @@ -512,7 +512,7 @@ where pub(crate) trait HypothesisTest: Send + dyn_clone::DynClone { fn hypothesis_test( &self, - good_points: &BTreeMap>, + good_points: &BTreeMap>, ) -> Option; } @@ -576,7 +576,7 @@ pub(crate) struct MCInner { } impl ModelCollection { - #[tracing::instrument] + #[tracing::instrument(level = "debug", skip_all)] pub(crate) fn predict_motion(self) -> ModelCollection { let mcinner = self.mcinner; let models = self @@ -603,7 +603,7 @@ impl ModelCollection { } impl ModelCollection { - #[tracing::instrument] + #[tracing::instrument(level = "debug", skip_all)] pub(crate) fn compute_observation_likes( self, tdpt: &TimeDataPassthrough, @@ -637,7 +637,7 @@ impl ModelCollection { } impl ModelCollection { - #[tracing::instrument] + #[tracing::instrument(level = "debug", skip_all)] pub(crate) fn solve_data_association_and_update( self, tdpt: &TimeDataPassthrough, @@ -721,8 +721,8 @@ impl ModelCollection { let cam_num = self.mcinner.cam_manager.cam_num(&cam_name).unwrap(); trace!( - "camera {} ({}): {} points", - cam_name, + "camera \"{}\" ({}): {} points", + cam_name.as_str(), cam_num, arena_data.len() ); @@ -907,6 +907,7 @@ fn to_bayesian_estimate( } impl ModelCollection { + #[tracing::instrument(level = "debug", skip_all)] pub(crate) fn births_and_deaths( mut self, tdpt: &TimeDataPassthrough, @@ -920,11 +921,6 @@ impl ModelCollection { where F: Fn() -> u32, { - // Instead of a `#[tracing::instrument]` attribute on this method, which - // we cannot do because F does not implement Debug, here we enter a - // span. - let _span = tracing::span!(tracing::Level::INFO, "births_and_deaths").entered(); - let mut result_messages = Vec::new(); // Check deaths before births so we do not check if we kill a @@ -1004,7 +1000,7 @@ impl ModelCollection { .iter() .map(|ci| { let pt_idx = 0; - let cam_num = self.mcinner.cam_manager.cam_num(&ci.ros_cam_name).unwrap(); + let cam_num = self.mcinner.cam_manager.cam_num(&ci.raw_cam_name).unwrap(); DataAssocInfo { pt_idx, cam_num, @@ -1090,7 +1086,7 @@ fn filter_points_and_take_first( // fdp_vec: &[FrameDataAndPoints], fdp_vec: &UnusedDataPerArena, minimum_pixel_abs_zscore: f64, -) -> BTreeMap> { +) -> BTreeMap> { fdp_vec .0 .per_cam diff --git a/flydra2/src/write_data.rs b/flydra2/src/write_data.rs index a982b8542..2f4fee429 100644 --- a/flydra2/src/write_data.rs +++ b/flydra2/src/write_data.rs @@ -475,7 +475,7 @@ impl Drop for WritingState { } } -#[tracing::instrument] +#[tracing::instrument(level = "debug", skip_all)] pub(crate) fn writer_task_main( mut braidz_write_rx: tokio::sync::mpsc::Receiver, cam_manager: ConnectedCamerasManager, @@ -719,7 +719,7 @@ mod test { FrameDataAndPoints { frame_data: FrameData { block_id: None, - cam_name: RosCamName::new("cam".to_string()), + cam_name: RawCamName::new("cam".to_string()), cam_num: CamNum(0), cam_received_timestamp: FlydraFloatTimestampLocal::from_f64(i as f64 + 0.123), device_timestamp: None, diff --git a/flytrax-csv-to-braidz/Cargo.toml b/flytrax-csv-to-braidz/Cargo.toml index ee4e98699..95dfc982b 100644 --- a/flytrax-csv-to-braidz/Cargo.toml +++ b/flytrax-csv-to-braidz/Cargo.toml @@ -23,9 +23,7 @@ itertools = "0.8" lazy_static = "1.4.0" futures = "0.3" tokio = {version="1.0.1", default-features=false, features=["macros"]} -includedir = { version = "0.5", optional = true } tempfile = "3.4.0" -phf = { version = "0.7.23", optional = true } anyhow = "1.0" image = { version = "0.24.2", default-features = false, features = ["jpeg", "png"] } opencv-ros-camera = { version = "0.14", features = ["serde-serialize"] } diff --git a/flytrax-csv-to-braidz/src/lib.rs b/flytrax-csv-to-braidz/src/lib.rs index b37b2fe4f..b938674e0 100644 --- a/flytrax-csv-to-braidz/src/lib.rs +++ b/flytrax-csv-to-braidz/src/lib.rs @@ -152,7 +152,6 @@ where None, tracking_params, opt2, - tokio::runtime::Handle::current(), save_performance_histograms, &env!("CARGO_PKG_NAME"), no_progress, diff --git a/http-video-streaming/Cargo.toml b/http-video-streaming/Cargo.toml index f35be1963..49143025f 100644 --- a/http-video-streaming/Cargo.toml +++ b/http-video-streaming/Cargo.toml @@ -7,8 +7,6 @@ rust-version="1.60" [dependencies] thiserror = "1.0.33" -bui-backend = {version="0.15", default-features = false} -bui-backend-types = "0.8" chrono = {version="0.4.23", default-features=false, features=["serde", "clock", "std", "wasmbind"]} base64 = "0.6" tokio = {version="1", features=["sync","time","macros"]} @@ -16,12 +14,17 @@ tokio-stream = "0.1.8" parking_lot = "0.12" serde_json = "1.0" futures = "0.3" -convert-image = {path = "../convert-image"} -log = "0.4" machine-vision-formats = "0.1" -http-video-streaming-types = {path = "http-video-streaming-types"} +bytes = "1.5.0" +http-body = "1.0.0" +tracing = "0.1.40" +convert-image = {path = "../convert-image"} +http-video-streaming-types = {path = "http-video-streaming-types"} basic-frame = {path="../basic-frame"} +rust-cam-bui-types = { path = "../rust-cam-bui-types" } +event-stream-types = { path = "../event-stream-types" } +bui-backend-session-types = { path = "../bui-backend-session/types" } [features] backtrace = ["convert-image/backtrace"] diff --git a/http-video-streaming/http-video-streaming-types/Cargo.toml b/http-video-streaming/http-video-streaming-types/Cargo.toml index 95a601f52..f02483ba1 100644 --- a/http-video-streaming/http-video-streaming-types/Cargo.toml +++ b/http-video-streaming/http-video-streaming-types/Cargo.toml @@ -7,7 +7,8 @@ license = "MIT/Apache-2.0" [dependencies] serde = {version="1.0", features=["derive"]} -bui-backend-types = "0.8" + +bui-backend-session-types = { path = "../../bui-backend-session/types" } [dev-dependencies] serde_yaml = "0.9" diff --git a/http-video-streaming/http-video-streaming-types/src/lib.rs b/http-video-streaming/http-video-streaming-types/src/lib.rs index ea8d37262..15dd8d1e0 100644 --- a/http-video-streaming/http-video-streaming-types/src/lib.rs +++ b/http-video-streaming/http-video-streaming-types/src/lib.rs @@ -5,6 +5,7 @@ // or http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. +use bui_backend_session_types::ConnectionKey; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] @@ -29,8 +30,7 @@ pub struct ToClient { pub annotations: Vec, pub fno: u64, pub ts_rfc3339: String, // timestamp in RFC3339 format - pub ck: bui_backend_types::ConnectionKey, - pub name: Option, + pub ck: ConnectionKey, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] @@ -68,17 +68,16 @@ pub enum Shape { // Rectangle(RectangleParams), // Mask(MaskImage), Polygon(PolygonParams), - /// mulitple individual circles + /// multiple individual circles MultipleCircles(Vec), } // from client to server #[derive(Debug, Serialize, Deserialize, Clone)] pub struct FirehoseCallbackInner { - pub ck: bui_backend_types::ConnectionKey, + pub ck: ConnectionKey, pub fno: usize, pub ts_rfc3339: String, - pub name: Option, } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] diff --git a/http-video-streaming/src/lib.rs b/http-video-streaming/src/lib.rs index c5adca5e0..c639442e1 100644 --- a/http-video-streaming/src/lib.rs +++ b/http-video-streaming/src/lib.rs @@ -1,20 +1,13 @@ #![cfg_attr(feature = "backtrace", feature(error_generic_member_access))] -#[macro_use] -extern crate log; - use parking_lot::Mutex; use std::{collections::HashMap, sync::Arc}; use tokio_stream::StreamExt; -use bui_backend::{ - highlevel::{ConnectionEvent, ConnectionEventType}, - lowlevel::EventChunkSender, -}; -use bui_backend_types::ConnectionKey; - use basic_frame::DynamicFrame; +use bui_backend_session_types::ConnectionKey; +use event_stream_types::{ConnectionEvent, ConnectionEventType, EventChunkSender}; pub use http_video_streaming_types::{ CircleParams, DrawableShape, FirehoseCallbackInner, Point, Shape, ToClient, @@ -41,7 +34,6 @@ pub struct AnnotatedFrame { pub frame: DynamicFrame, pub found_points: Vec, pub valid_display: Option, - pub name: Option, pub annotations: Vec, } @@ -58,7 +50,6 @@ pub struct FirehoseCallback { } struct PerSender { - name_selector: NameSelector, out: EventChunkSender, frame_lifo: Option>>, ready_to_send: bool, @@ -83,11 +74,9 @@ impl PerSender { fn new( out: EventChunkSender, conn_key: ConnectionKey, - name_selector: NameSelector, frame: Arc>, ) -> PerSender { PerSender { - name_selector, out, frame_lifo: Some(frame), ready_to_send: true, @@ -96,33 +85,18 @@ impl PerSender { } } fn push(&mut self, frame: Arc>) { - let use_frame = match self.name_selector { - NameSelector::All => true, - NameSelector::None => false, - NameSelector::Name(ref select_name) => { - let mut tmp = false; - if let Some(ref this_name) = frame.lock().name { - if this_name == select_name { - tmp = true; - } - } - tmp - } - }; - if use_frame { - self.fno += 1; - self.frame_lifo = Some(frame); - } + self.fno += 1; + self.frame_lifo = Some(frame); } fn got_callback(&mut self, msg: FirehoseCallback) { match chrono::DateTime::parse_from_rfc3339(&msg.inner.ts_rfc3339) { // match chrono::DateTime::parse_from_rfc3339(&msg.inner.ts_rfc3339) { Ok(sent_time) => { let latency = msg.arrival_time.signed_duration_since(sent_time); - trace!("latency: {:?}", latency); + tracing::trace!("latency: {:?}", latency); } Err(e) => { - error!("failed to parse timestamp in callback: {:?}", e); + tracing::error!("failed to parse timestamp in callback: {:?}", e); } } self.ready_to_send = true; @@ -159,7 +133,6 @@ impl PerSender { fno: self.fno, ts_rfc3339: sent_time.to_rfc3339(), ck: self.conn_key, - name: most_recent_frame_data.name.clone(), } }; let buf = serde_json::to_string(&tc).expect("encode"); @@ -168,12 +141,12 @@ impl PerSender { http_video_streaming_types::VIDEO_STREAM_EVENT_NAME, buf ); - let hc = buf.into(); + let hc = http_body::Frame::data(bytes::Bytes::from(buf)); - match self.out.send(hc).await { + match self.out.send(Ok(hc)).await { Ok(()) => {} Err(_) => { - info!("failed to send data to connection. dropping."); + tracing::info!("failed to send data to connection. dropping."); // Failed to send data to event stream key. // TODO: drop this sender. } @@ -189,8 +162,6 @@ impl PerSender { } struct TaskState { - use_frame_selector: bool, - events_prefix: String, /// cache of senders per_sender_map: HashMap, /// most recent image frame, with annotations @@ -212,32 +183,10 @@ impl TaskState { Ok(()) } fn handle_connection(&mut self, conn_evt: ConnectionEvent) -> Result<()> { - let path = conn_evt.path.as_str(); match conn_evt.typ { ConnectionEventType::Connect(chunk_sender) => { // sender was added. - let name_selector = if path == self.events_prefix.as_str() { - match self.use_frame_selector { - true => NameSelector::None, - false => NameSelector::All, - } - } else { - if !path.starts_with(self.events_prefix.as_str()) { - return Err(Error::UnknownPath( - #[cfg(feature = "backtrace")] - std::backtrace::Backtrace::capture(), - )); - } - let slash_idx = self.events_prefix.len() + 1; // get location of '/' separator - let use_name = path[slash_idx..].to_string(); - NameSelector::Name(use_name) - }; - let ps = PerSender::new( - chunk_sender, - conn_evt.connection_key, - name_selector, - self.frame.clone(), - ); + let ps = PerSender::new(chunk_sender, conn_evt.connection_key, self.frame.clone()); self.per_sender_map.insert(conn_evt.connection_key, ps); } ConnectionEventType::Disconnect => { @@ -259,7 +208,7 @@ impl TaskState { if let Some(ps) = self.per_sender_map.get_mut(&callback.inner.ck) { ps.got_callback(callback) } else { - warn!( + tracing::warn!( "Got firehose_callback for non-existant connection key. \ Did connection disconnect?" ); @@ -270,53 +219,43 @@ impl TaskState { pub async fn firehose_task( connection_callback_rx: tokio::sync::mpsc::Receiver, - // sender_map_arc: SenderMap, mut firehose_rx: tokio::sync::mpsc::Receiver, firehose_callback_rx: tokio::sync::mpsc::Receiver, - use_frame_selector: bool, - events_prefix: &str, - mut quit_rx: tokio::sync::oneshot::Receiver<()>, ) -> Result<()> { // Wait for the first frame so we don't need to deal with an Option<>. - let frame = Arc::new(Mutex::new(firehose_rx.recv().await.unwrap())); + let first_frame = firehose_rx.recv().await.unwrap(); + let frame = Arc::new(Mutex::new(first_frame)); let mut task_state = TaskState { - events_prefix: events_prefix.to_string(), - use_frame_selector, per_sender_map: HashMap::new(), frame, }; let mut connection_callback_rx = tokio_stream::wrappers::ReceiverStream::new(connection_callback_rx); - let mut firehose_rx = tokio_stream::wrappers::ReceiverStream::new(firehose_rx); let mut firehose_callback_rx = tokio_stream::wrappers::ReceiverStream::new(firehose_callback_rx); loop { tokio::select! { - _quit_val = &mut quit_rx => { - log::debug!("quitting."); - break; - } opt_new_connection = connection_callback_rx.next() => { match opt_new_connection { Some(new_connection) => { task_state.handle_connection(new_connection)?; } None => { - log::debug!("new connection senders done."); + tracing::debug!("new connection senders done."); // All senders done. break; } } } - opt_new_frame = firehose_rx.next() => { + opt_new_frame = firehose_rx.recv() => { match opt_new_frame { Some(new_frame) => { task_state.handle_frame(new_frame)?; } None => { - log::debug!("new frame senders done."); + tracing::debug!("new frame senders done."); // All senders done. break; } @@ -328,15 +267,15 @@ pub async fn firehose_task( task_state.handle_callback(callback)?; } None => { - log::debug!("new callback senders done."); + tracing::debug!("new callback senders done."); // All senders done. break; } } }, } - task_state.service().await?; + task_state.service().await?; // should use a timer for this?? } - log::debug!("firehose task done."); + tracing::debug!("firehose task done."); Ok(()) } diff --git a/media-utils/frame-source/src/h264_source.rs b/media-utils/frame-source/src/h264_source.rs index 98d1eafdd..327003b50 100644 --- a/media-utils/frame-source/src/h264_source.rs +++ b/media-utils/frame-source/src/h264_source.rs @@ -306,12 +306,10 @@ impl H264Source { let (timestamp_source, has_timestamps) = if frame0_precision_time.is_some() { ("MISPmicrosectime", true) + } else if mp4_pts.is_some() { + ("MP4 PTS", true) } else { - if mp4_pts.is_some() { - ("MP4 PTS", true) - } else { - ("(no timestamps)", false) - } + ("(no timestamps)", false) }; Ok(Self { diff --git a/media-utils/mkv-parser-kit/examples/simple-parser.rs b/media-utils/mkv-parser-kit/examples/simple-parser.rs index 92cf9183d..54aeb4d94 100644 --- a/media-utils/mkv-parser-kit/examples/simple-parser.rs +++ b/media-utils/mkv-parser-kit/examples/simple-parser.rs @@ -91,7 +91,7 @@ fn accumulate(element: &EbmlElement, depth: u8, accum: &mut Accum, tag_path: &[T &[Tag::Segment, Tag::Cluster, Tag::Timestamp] => { assert!(accum.segment_cluster_timestamp.is_none()); // convert to u64 because these numbers can get big. - let n_timesteps: u64 = get_uint(element).try_into().unwrap(); + let n_timesteps: u64 = get_uint(element).into(); let timestep_nanos: u64 = accum.timestep_nanos.unwrap().try_into().unwrap(); let pts_total_nanos = n_timesteps * timestep_nanos; diff --git a/media-utils/mkv-parser-kit/src/de.rs b/media-utils/mkv-parser-kit/src/de.rs index 124ccb37b..ac4aa3ff0 100644 --- a/media-utils/mkv-parser-kit/src/de.rs +++ b/media-utils/mkv-parser-kit/src/de.rs @@ -184,10 +184,10 @@ impl<'a, R: Read + Seek> Deserializer<'a, R> { b'u' => { assert!(buf.len() <= 4); let val = match buf.len() { - 1 => buf[0].try_into().unwrap(), + 1 => buf[0].into(), 2 => { let buf: [u8; 2] = buf.try_into().unwrap(); - u16::from_be_bytes(buf).try_into().unwrap() + u16::from_be_bytes(buf).into() } 3 => { let mut buf4 = [0u8; 4]; diff --git a/media-utils/mkv-strand-reader/src/lib.rs b/media-utils/mkv-strand-reader/src/lib.rs index 7a8e646f3..0c61fff58 100644 --- a/media-utils/mkv-strand-reader/src/lib.rs +++ b/media-utils/mkv-strand-reader/src/lib.rs @@ -185,8 +185,8 @@ fn do_parse( // assert!(accum.segment_cluster_timestamp.is_none()); assert!(accum.pts.is_none()); // convert to u64 because these numbers can get big. - let n_timesteps: u64 = get_uint(element).try_into().unwrap(); - let timestep_nanos: u64 = accum.timestep_nanos.unwrap().try_into().unwrap(); + let n_timesteps: u64 = get_uint(element).into(); + let timestep_nanos: u64 = accum.timestep_nanos.unwrap().into(); let pts_total_nanos = n_timesteps * timestep_nanos; let pts = std::time::Duration::from_nanos(pts_total_nanos); @@ -199,7 +199,7 @@ fn do_parse( if let Some(cluster_pts) = accum.pts { let x = if let Some(BoxData::SimpleBlockData(block_data)) = &element.box_data() { let n_timesteps: u64 = block_data.timestamp.try_into().unwrap(); - let timestep_nanos: u64 = accum.timestep_nanos.unwrap().try_into().unwrap(); + let timestep_nanos: u64 = accum.timestep_nanos.unwrap().into(); let cluster_offset_nanos = n_timesteps * timestep_nanos; let cluster_offset = std::time::Duration::from_nanos(cluster_offset_nanos); let pts = cluster_pts + cluster_offset; diff --git a/rust-cam-bui-types/Cargo.toml b/rust-cam-bui-types/Cargo.toml index 6ffaeeed3..11aeb9f06 100644 --- a/rust-cam-bui-types/Cargo.toml +++ b/rust-cam-bui-types/Cargo.toml @@ -3,8 +3,11 @@ name = "rust-cam-bui-types" version = "0.1.0" authors = ["Andrew Straw "] license = "MIT/Apache-2.0" +edition = "2021" [dependencies] -serde = "1.0" -serde_derive = "1.0" +serde = {version = "1.0", features = [ "derive" ] } chrono = {version="0.4.23", default-features=false, features=["serde", "clock", "std", "wasmbind"]} +tokio = "1" + +bui-backend-session-types = { path = "../bui-backend-session/types" } diff --git a/rust-cam-bui-types/src/lib.rs b/rust-cam-bui-types/src/lib.rs index a9c38bf76..5cb974306 100644 --- a/rust-cam-bui-types/src/lib.rs +++ b/rust-cam-bui-types/src/lib.rs @@ -5,10 +5,7 @@ // or http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. -extern crate serde; -#[macro_use] -extern crate serde_derive; -extern crate chrono; +use serde::{Serialize, Deserialize}; #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct RecordingPath { diff --git a/strand-braid-user/users-guide/src/braid_remote_cameras.md b/strand-braid-user/users-guide/src/braid_remote_cameras.md index 72ce170ba..6084ddf1d 100644 --- a/strand-braid-user/users-guide/src/braid_remote_cameras.md +++ b/strand-braid-user/users-guide/src/braid_remote_cameras.md @@ -17,7 +17,7 @@ file have connected will Braid synchronize the cameras and allow recording of data. To start Strand Camera as a remote camera for Braid, run `strand-cam-pylon` (to -start Strand Camera) with the command line argument `--braid_addr ` +start Strand Camera) with the command line argument `--braid-url ` specifying the URL for the braid HTTP address. The camera name should also be specified on the command line, along with any other options. @@ -25,5 +25,5 @@ In the following example, the Strand Camera will open the camera named `Basler-12345` and will connect to Braid running at `http://127.0.0.1:44444`. ```ignore -strand-cam-pylon --camera-name Basler-12345 --braid_addr http://127.0.0.1:44444 +strand-cam-pylon --camera-name Basler-12345 --braid-url http://127.0.0.1:44444 ``` diff --git a/strand-cam-storetype/src/lib.rs b/strand-cam-storetype/src/lib.rs index 901f53bcd..841ac0378 100644 --- a/strand-cam-storetype/src/lib.rs +++ b/strand-cam-storetype/src/lib.rs @@ -23,7 +23,11 @@ use led_box_comms::DeviceState; pub use led_box_comms::ToDevice as ToLedBoxDevice; -pub const STRAND_CAM_EVENTS_URL_PATH: &str = "/strand-cam-events"; +// Note: this does not start with a slash because we do not want an absolute +// root path in case we are in a case where we are proxied by braid. I.e. it +// should work at `http://braid/cam-proxy/cam-name/strand-cam-events` as well as +// `http://strand-cam/strand-cam-events`. +pub const STRAND_CAM_EVENTS_URL_PATH: &str = "strand-cam-events"; pub const STRAND_CAM_EVENT_NAME: &str = "strand-cam"; #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] diff --git a/strand-cam/Cargo.toml b/strand-cam/Cargo.toml index 4bbd773f4..ba5bee34e 100644 --- a/strand-cam/Cargo.toml +++ b/strand-cam/Cargo.toml @@ -12,27 +12,22 @@ path = "src/strand-cam.rs" [dependencies] jemallocator = {version="0.3", optional=true} -async-change-tracker = "0.3" -bui-backend-types = "0.8" -bui-backend = {version="0.15", default-features = false} +async-change-tracker = "0.3.4" qrcodegen = "1.4" -log = { version = "0.4.5", features = ["release_max_level_debug"] } -ctrlc = { version = "3.1.3", features = ["termination"] } stream-cancel = "0.8" csv = {version="1.1", optional=true} libflate = {version="1.0", optional=true} env-tracing-logger = {path="../env-tracing-logger"} -includedir = { version = "0.6", optional = true } -phf = { version = "0.8", optional = true } serde = {version="1.0.79",features=["derive"]} serde_json = "1.0.29" serde_yaml = "0.9" serde_cbor = "0.11" webbrowser = "0.8.3" tempfile = "3.4.0" -clap = { version = "4", features = [ "string" ] } -preferences = {version="2.0.0", package = "preferences-serde1"} +clap = { version = "4", features = [ "string", "env" ] } +preferences-serde1 = "2.0.0" +base64 = "0.12" directories = "4.0.1" anyhow = "1.0" thiserror = "1.0.33" @@ -69,7 +64,7 @@ shellexpand = "2" imops = {path="../imops"} led-box = {path="../led-box"} led-box-comms = {path="../led-box-comms"} -flydra-types = {path="../flydra-types", features=["with-dns"]} +flydra-types = {path="../flydra-types", features = [ "start-listener", "build-urls" ] } flydra2 = {path="../flydra2", default-features = false, optional=true} mvg = {path="../mvg", optional=true} flydra-mvg = {path="../flydra-mvg", optional=true} @@ -83,7 +78,18 @@ byteorder = "1.4" target = "2.0.0" hyper-util = { version = "0.1.1", features = ["full"] } http-body-util = "0.1.0" - +tower = "0.4.13" +axum = "0.7.4" +tracing = "0.1.40" +axum-token-auth = "0.1.0" +tower-http = { version = "0.5.1", features = ["trace", "fs"] } +tower-serve-static = { version = "0.1", optional = true } +include_dir = { version = "0.7.3", optional = true } +http-body = "1.0.0" +http = "1.0.0" +cookie = "0.18.0" + +bui-backend-session-types = { path = "../bui-backend-session/types" } braid-config-data = {path="../braid-config-data"} opencv-calibrate = {path="../opencv-calibrate", optional=true} camcal = {path="../camcal", optional=true} @@ -99,6 +105,7 @@ ads-apriltag = {path="../apriltag", optional=true} channellib = {path="../channellib", optional=true} braid-http-session = {path="../braid-http-session"} bui-backend-session = { path = "../bui-backend-session" } +event-stream-types = { path = "../event-stream-types" } [build-dependencies] build-util = {path="../build-util"} @@ -121,8 +128,10 @@ flydra-uds = ["flydra-feature-detector?/flydra-uds"] posix_sched_fifo = ["posix-scheduler", "posix-scheduler/linux"] # Serve style -bundle_files = ["bui-backend/bundle_files", "build-util/bundle_files", "includedir", "phf", "flydra2?/bundle_files" ] -serve_files = ["bui-backend/serve_files", "build-util/serve_files", "flydra2?/serve_files"] +## Bundle files into executable +bundle_files = ["flydra2?/bundle_files", "tower-serve-static", "include_dir" ] +## Serve files from disk at runtime +serve_files = ["flydra2?/serve_files"] imtrack-dark-circle = [] imtrack-absdiff = [] diff --git a/strand-cam/build.rs b/strand-cam/build.rs index 136b6146e..27317c3c1 100644 --- a/strand-cam/build.rs +++ b/strand-cam/build.rs @@ -1,19 +1,23 @@ +#[cfg(not(any(feature = "bundle_files", feature = "serve_files")))] +compile_error!("Need cargo feature \"bundle_files\" or \"serve_files\""); + fn main() -> Result<(), Box<(dyn std::error::Error)>> { build_util::git_hash(env!("CARGO_PKG_VERSION"))?; - let frontend_dir = std::path::PathBuf::from("yew_frontend"); - let frontend_pkg_dir = frontend_dir.join("pkg"); - #[cfg(feature = "bundle_files")] - if !frontend_pkg_dir.join("strand_cam_frontend_yew.js").exists() { - return Err(format!( - "The frontend is required but not built. Hint: go to {} and \ - run `build.sh` (or on Windows, `build.bat`).", - frontend_dir.display() - ) - .into()); + { + let frontend_dir = std::path::PathBuf::from("yew_frontend"); + let frontend_pkg_dir = frontend_dir.join("pkg"); + + if !frontend_pkg_dir.join("strand_cam_frontend_yew.js").exists() { + return Err(format!( + "The frontend is required but not built. Hint: go to {} and \ + run `build.sh` (or on Windows, `build.bat`).", + frontend_dir.display() + ) + .into()); + } } - build_util::bui_backend_generate_code(&frontend_pkg_dir, "frontend.rs")?; Ok(()) } diff --git a/strand-cam/src/cli_app.rs b/strand-cam/src/cli_app.rs index dab9ae97c..1b5312fa9 100644 --- a/strand-cam/src/cli_app.rs +++ b/strand-cam/src/cli_app.rs @@ -6,26 +6,18 @@ use std::path::PathBuf; use clap::{Arg, ArgAction}; -use crate::{run_app, StrandCamArgs}; +use crate::{run_app, BraidArgs, StandaloneArgs, StandaloneOrBraid, StrandCamArgs}; use crate::APP_INFO; use anyhow::Result; -fn jwt_secret(matches: &clap::ArgMatches) -> Option> { - matches - .get_one::("JWT_SECRET") - .map(|s| s.to_string()) - .or_else(|| std::env::var("JWT_SECRET").ok().clone()) - .map(|s| s.into_bytes()) -} - pub fn cli_main( mymod: ci2_async::ThreadedAsyncCameraModule, app_name: &'static str, ) -> Result<()> where - M: ci2::CameraModule, + M: ci2::CameraModule + 'static, C: 'static + ci2::Camera + Send, { dotenv::dotenv().ok(); @@ -39,43 +31,14 @@ where env_tracing_logger::init(); - let runtime = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .worker_threads(4) - .thread_name("strand-cam-runtime") - .thread_stack_size(3 * 1024 * 1024) - .build()?; - - let handle = runtime.handle(); - - let args = parse_args(handle, app_name)?; - - // run_app(mymod, args, app_name).map_err(|e| { - // #[cfg(feature = "backtrace")] - // match std::error::Error::backtrace(&e) { - // None => log::error!("no backtrace in upcoming error {}", e), - // Some(bt) => log::error!("backtrace in upcoming error {}: {}", e, bt), - // } - // #[cfg(not(feature = "backtrace"))] - // { - // log::error!( - // "compiled without backtrace support. No backtrace in upcoming error {}", - // e - // ); - // } - // anyhow::Error::new(e) - // }) - run_app(mymod, args, app_name).map_err(anyhow::Error::new) + let args = parse_args(app_name)?; + run_app(mymod, args, app_name) } fn get_cli_args() -> Vec { std::env::args().collect() } -fn no_browser_default() -> bool { - false -} - #[cfg(feature = "posix_sched_fifo")] fn parse_sched_policy_priority(matches: &clap::ArgMatches) -> Result> { let errstr = "Set --sched-policy if and only if --sched-priority also set."; @@ -110,10 +73,7 @@ fn get_tracker_cfg(_matches: &clap::ArgMatches) -> Result std::result::Result { +fn parse_args(app_name: &str) -> anyhow::Result { let cli_args = get_cli_args(); let arg_default_box: Box = Default::default(); @@ -123,7 +83,6 @@ fn parse_args( let app_name: &'static clap::builder::Str = Box::leak(app_name_box); let matches = { - #[allow(unused_mut)] let mut parser = clap::Command::new(app_name) .version(env!("CARGO_PKG_VERSION")) .arg( @@ -181,9 +140,7 @@ fn parse_args( .default_value("~/DATA"), ); - // #[cfg(not(feature = "braid-config"))] - { - parser = parser + parser = parser .arg( Arg::new("pixel_format") .long("pixel-format") @@ -191,14 +148,11 @@ fn parse_args( , ) .arg( - clap::Arg::new("JWT_SECRET") - .long("jwt-secret") - .help( - "Specifies the JWT secret. Falls back to the JWT_SECRET \ - environment variable if unspecified. (incompatible with braid).", - ) - .global(true) - , + clap::Arg::new("strand_cam_cookie_secret") + .help("The secret (base64 encoded) for signing HTTP cookies.") + .long("strand-cam-cookie-secret") + .env("STRAND_CAM_COOKIE_SECRET") + .action(ArgAction::Set), ) .arg( Arg::new("force_camera_sync_mode") @@ -206,16 +160,12 @@ fn parse_args( .action(clap::ArgAction::Count) .help("Force the camera to synchronize to external trigger. (incompatible with braid)."), ); - } - // #[cfg(feature = "braid-config")] - { - parser = parser.arg( - Arg::new("braid_addr") - .long("braid_addr") - .help("Braid HTTP API address (e.g. 'http://host:port/')"), - ); - } + parser = parser.arg( + Arg::new("braid_url") + .long("braid-url") + .help("Braid HTTP URL address (e.g. 'http://host:port/')"), + ); #[cfg(feature = "posix_sched_fifo")] { @@ -227,13 +177,11 @@ fn parse_args( .help("The scheduler priority (integer, e.g. 99). Requires also sched-policy.")) } - { - parser = parser.arg( - Arg::new("led_box_device") - .long("led-box") - .help("The filename of the LED box device"), - ) - } + parser = parser.arg( + Arg::new("led_box_device") + .long("led-box") + .help("The filename of the LED box device"), + ); #[cfg(feature = "flydratrax")] { @@ -265,7 +213,10 @@ fn parse_args( parser.get_matches_from(cli_args) }; - let secret = jwt_secret(&matches); + let secret = matches + .get_one::("strand_cam_cookie_secret") + .map(Clone::clone) + .clone(); let mkv_filename_template = matches .get_one::("mkv_filename_template") @@ -322,14 +273,6 @@ fn parse_args( .get_one::("http_server_addr") .map(Into::into); - let no_browser = match matches.get_count("no_browser") { - 0 => match matches.get_count("browser") { - 0 => no_browser_default(), - _ => false, - }, - _ => true, - }; - #[cfg(feature = "flydratrax")] let save_empty_data2d = match matches.get_count("no_save_empty_data2d") { 0 => true, @@ -348,27 +291,11 @@ fn parse_args( let led_box_device_path = parse_led_box_device(&matches); - let braid_addr: Option = matches.get_one::("braid_addr").map(Into::into); + let braid_url: Option = matches.get_one::("braid_url").map(Into::into); - let ( - mainbrain_internal_addr, - camdata_addr, - pixel_format, - force_camera_sync_mode, - software_limit_framerate, - tracker_cfg_src, - acquisition_duration_allowed_imprecision_msec, - http_server_addr, - no_browser, - show_url, - ) = if let Some(braid_addr) = braid_addr { - for argname in &[ - "pixel_format", - "JWT_SECRET", - "camera_settings_filename", - "http_server_addr", - ] { - // Typically these values are not relevant or are set via + let standalone_or_braid = if let Some(braid_url) = braid_url { + for argname in &["pixel_format", "JWT_SECRET", "camera_settings_filename"] { + // These values are not relevant or are set via // [flydra_types::RemoteCameraInfoResponse]. if matches.contains_id(argname) { anyhow::bail!( @@ -385,123 +312,78 @@ fn parse_args( ); } - let (mainbrain_internal_addr, camdata_addr, tracker_cfg_src, config_from_braid) = { - log::info!("Will connect to braid at \"{}\"", braid_addr); - let mainbrain_internal_addr = flydra_types::MainbrainBuiLocation( - flydra_types::StrandCamBuiServerInfo::parse_url_with_token(&braid_addr)?, - ); - - let mut mainbrain_session = handle.block_on( - braid_http_session::mainbrain_future_session(mainbrain_internal_addr.clone()), - )?; - - let camera_name = camera_name - .as_ref() - .ok_or(crate::StrandCamError::CameraNameRequired)?; - - let camera_name = flydra_types::RawCamName::new(camera_name.to_string()); - - let config_from_braid: flydra_types::RemoteCameraInfoResponse = - handle.block_on(mainbrain_session.get_remote_info(&camera_name))?; - - let camdata_addr = { - let camdata_addr = config_from_braid - .camdata_addr - .parse::()?; - let addr_info_ip = flydra_types::AddrInfoIP::from_socket_addr(&camdata_addr); - - Some(flydra_types::RealtimePointsDestAddr::IpAddr(addr_info_ip)) - }; - - let tracker_cfg_src = crate::ImPtDetectCfgSource::ChangesNotSavedToDisk( - config_from_braid.config.point_detection_config.clone(), - ); - - ( - Some(mainbrain_internal_addr), - camdata_addr, - tracker_cfg_src, - config_from_braid, + let camera_name = camera_name.ok_or_else(|| { + anyhow::anyhow!( + "camera name must be set using command-line argument when running with braid" ) - }; - - let pixel_format = config_from_braid.config.pixel_format; - let force_camera_sync_mode = config_from_braid.force_camera_sync_mode; - let software_limit_framerate = config_from_braid.software_limit_framerate; - let acquisition_duration_allowed_imprecision_msec = config_from_braid - .config - .acquisition_duration_allowed_imprecision_msec; - - ( - mainbrain_internal_addr, - camdata_addr, - pixel_format, - force_camera_sync_mode, - software_limit_framerate, - tracker_cfg_src, - acquisition_duration_allowed_imprecision_msec, - Some("127.0.0.1:0".to_string()), - true, - false, - ) + })?; + + StandaloneOrBraid::Braid(BraidArgs { + braid_url, + camera_name, + }) } else { // not braid - - let mainbrain_internal_addr = None; - let camdata_addr = None; let pixel_format = matches.get_one::("pixel_format").map(Into::into); let force_camera_sync_mode = !matches!(matches.get_count("force_camera_sync_mode"), 0); let software_limit_framerate = flydra_types::StartSoftwareFrameRateLimit::NoChange; - let tracker_cfg_src = get_tracker_cfg(&matches)?; - let acquisition_duration_allowed_imprecision_msec = flydra_types::DEFAULT_ACQUISITION_DURATION_ALLOWED_IMPRECISION_MSEC; - ( - mainbrain_internal_addr, - camdata_addr, + + let tracker_cfg_src = get_tracker_cfg(&matches)?; + + #[cfg(not(feature = "flydra_feat_detect"))] + let _ = tracker_cfg_src; // This is unused without `flydra_feat_detect` feature. + + StandaloneOrBraid::Standalone(StandaloneArgs { + camera_name, pixel_format, force_camera_sync_mode, software_limit_framerate, - tracker_cfg_src, acquisition_duration_allowed_imprecision_msec, - http_server_addr, - no_browser, - true, - ) + camera_settings_filename, + #[cfg(feature = "flydra_feat_detect")] + tracker_cfg_src, + }) }; let raise_grab_thread_priority = process_frame_priority.is_some(); + let no_browser_default = match &standalone_or_braid { + StandaloneOrBraid::Braid(_) => true, + StandaloneOrBraid::Standalone(_) => false, + }; + + let no_browser = match matches.get_count("no_browser") { + 0 => match matches.get_count("browser") { + 0 => no_browser_default, + _ => false, + }, + _ => true, + }; + #[cfg(feature = "fiducial")] let apriltag_csv_filename_template = strand_cam_storetype::APRILTAG_CSV_TEMPLATE_DEFAULT.to_string(); - #[cfg(not(feature = "flydra_feat_detect"))] - std::mem::drop(tracker_cfg_src); // prevent compiler warning of unused variable - - let defaults = StrandCamArgs::default(); - + // There are some fields set by `Default::default()` but only when various + // cargo features are used. So turn off this clippy warning. + #[allow(clippy::needless_update)] Ok(StrandCamArgs { - handle: Some(handle.clone()), + standalone_or_braid, secret, - camera_name, - pixel_format, http_server_addr, no_browser, mp4_filename_template: mkv_filename_template, fmf_filename_template, ufmf_filename_template, - #[cfg(feature = "flydra_feat_detect")] - tracker_cfg_src, + csv_save_dir, raise_grab_thread_priority, led_box_device_path, #[cfg(feature = "posix_sched_fifo")] process_frame_priority, - mainbrain_internal_addr, - camdata_addr, - show_url, #[cfg(feature = "flydratrax")] flydratrax_calibration_source, #[cfg(feature = "flydratrax")] @@ -510,10 +392,6 @@ fn parse_args( model_server_addr, #[cfg(feature = "fiducial")] apriltag_csv_filename_template, - force_camera_sync_mode, - software_limit_framerate, - camera_settings_filename, - acquisition_duration_allowed_imprecision_msec, - ..defaults + ..Default::default() }) } diff --git a/strand-cam/src/datagram_socket.rs b/strand-cam/src/datagram_socket.rs index c235cbaac..19d4f9ae5 100644 --- a/strand-cam/src/datagram_socket.rs +++ b/strand-cam/src/datagram_socket.rs @@ -2,6 +2,7 @@ use std::backtrace::Backtrace; use std::net::UdpSocket; +use tracing::{error, warn}; use crate::StrandCamError; diff --git a/strand-cam/src/flydratrax_handle_msg.rs b/strand-cam/src/flydratrax_handle_msg.rs index f773f85a8..be76fd5e2 100644 --- a/strand-cam/src/flydratrax_handle_msg.rs +++ b/strand-cam/src/flydratrax_handle_msg.rs @@ -1,4 +1,5 @@ use crate::*; +use parking_lot::RwLock; use flydra2::{SendKalmanEstimatesRow, SendType}; diff --git a/strand-cam/src/strand-cam.rs b/strand-cam/src/strand-cam.rs index 9253d5d60..476f2b7a9 100644 --- a/strand-cam/src/strand-cam.rs +++ b/strand-cam/src/strand-cam.rs @@ -10,62 +10,58 @@ #[cfg(feature = "backtrace")] use std::backtrace::Backtrace; -#[macro_use] -extern crate log; - use anyhow::Context; #[cfg(feature = "fiducial")] use ads_apriltag as apriltag; +use async_change_tracker::ChangeTracker; +use event_stream_types::{ + AcceptsEventStream, ConnectionEvent, ConnectionEventType, ConnectionSessionKey, + EventBroadcaster, TolerantJson, +}; +use futures::{sink::SinkExt, stream::StreamExt}; +use http::StatusCode; use http_video_streaming as video_streaming; -use machine_vision_formats as formats; - -#[cfg(feature = "flydratrax")] -use nalgebra as na; - +use hyper_tls::HttpsConnector; +use hyper_util::{client::legacy::Client, rt::TokioExecutor}; #[cfg(feature = "fiducial")] use libflate::finish::AutoFinishUnchecked; #[cfg(feature = "fiducial")] use libflate::gzip::Encoder; - -use futures::{channel::mpsc, sink::SinkExt, stream::StreamExt}; -use serde::{Deserialize, Serialize}; - -use hyper_tls::HttpsConnector; -use hyper_util::{client::legacy::Client, rt::TokioExecutor}; - +use machine_vision_formats as formats; +#[cfg(feature = "flydratrax")] +use nalgebra as na; #[allow(unused_imports)] -use preferences::{AppInfo, Preferences}; +use preferences_serde1::{AppInfo, Preferences}; +use serde::{Deserialize, Serialize}; +use tower_http::trace::TraceLayer; +use tracing::{debug, error, info, trace, warn}; +use basic_frame::{match_all_dynamic_fmts, DynamicFrame}; +use bui_backend_session_types::{AccessToken, ConnectionKey, SessionKey}; use ci2::{Camera, CameraInfo, CameraModule}; use ci2_async::AsyncCamera; use fmf::FMFWriter; - -use async_change_tracker::ChangeTracker; -use basic_frame::{match_all_dynamic_fmts, DynamicFrame}; use formats::PixFmt; use timestamped_frame::ExtraTimeData; -use bui_backend::highlevel::{create_bui_app_inner, BuiAppInner}; -use bui_backend::{AccessControl, CallbackHandler}; -use bui_backend_types::CallbackDataAndSession; - #[cfg(feature = "flydratrax")] use http_video_streaming_types::{DrawableShape, StrokeStyle}; use video_streaming::{AnnotatedFrame, FirehoseCallback}; -use std::{error::Error as StdError, future::Future, path::Path, pin::Pin}; +use std::{path::Path, result::Result as StdResult}; #[cfg(feature = "flydra_feat_detect")] use ci2_remote_control::CsvSaveConfig; use ci2_remote_control::{ CamArg, CodecSelection, Mp4Codec, Mp4RecordingConfig, NvidiaH264Options, RecordingFrameRate, }; +#[cfg(feature = "flydratrax")] +use flydra_types::BuiServerAddrInfo; use flydra_types::{ - MainbrainBuiLocation, RawCamName, RealtimePointsDestAddr, RosCamName, - StartSoftwareFrameRateLimit, StrandCamBuiServerInfo, StrandCamHttpServerInfo, + BuiServerInfo, RawCamName, RealtimePointsDestAddr, StartSoftwareFrameRateLimit, }; use flydra_feature_detector_types::ImPtDetectCfg; @@ -80,8 +76,9 @@ use strand_cam_csv_config_types::{FullCfgFview2_0_26, SaveCfgFview2_0_25}; #[cfg(feature = "fiducial")] use strand_cam_storetype::ApriltagState; -use strand_cam_storetype::ToLedBoxDevice; -use strand_cam_storetype::{CallbackType, ImOpsState, RangedValue, StoreType}; +use strand_cam_storetype::{ + CallbackType, ImOpsState, RangedValue, StoreType, ToLedBoxDevice, STRAND_CAM_EVENT_NAME, +}; use strand_cam_storetype::{KalmanTrackingConfig, LedProgramConfig}; @@ -93,15 +90,11 @@ use strand_cam_pseudo_cal::PseudoCameraCalibrationData; use rust_cam_bui_types::RecordingPath; -use parking_lot::RwLock; use std::fs::File; use std::io::Write; use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, ToSocketAddrs, UdpSocket}; use std::sync::Arc; -/// default strand-cam HTTP port when not running in Braid. -const DEFAULT_HTTP_ADDR: &str = "127.0.0.1:3440"; - pub const APP_INFO: AppInfo = AppInfo { name: "strand-cam", author: "AndrewStraw", @@ -121,7 +114,9 @@ pub use flydra_pt_detect_cfg::default_absdiff as default_im_pt_detect; #[cfg(feature = "imtrack-dark-circle")] pub use flydra_pt_detect_cfg::default_dark_circle as default_im_pt_detect; -include!(concat!(env!("OUT_DIR"), "/frontend.rs")); // Despite slash, this does work on Windows. +#[cfg(feature = "bundle_files")] +static ASSETS_DIR: include_dir::Dir<'static> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/yew_frontend/pkg"); #[cfg(feature = "flydratrax")] const KALMAN_TRACKING_PREFS_KEY: &'static str = "kalman-tracking"; @@ -129,6 +124,8 @@ const KALMAN_TRACKING_PREFS_KEY: &'static str = "kalman-tracking"; #[cfg(feature = "flydratrax")] const LED_PROGRAM_PREFS_KEY: &'static str = "led-config"; +const COOKIE_SECRET_KEY: &str = "cookie-secret-base64"; + #[cfg(feature = "flydratrax")] mod flydratrax_handle_msg; @@ -185,8 +182,8 @@ pub enum StrandCamError { }, #[error("try send error")] TrySendError, - #[error("BUI backend error: {0}")] - BuiBackendError(#[from] bui_backend::Error), + // #[error("BUI backend error: {0}")] + // BuiBackendError(#[from] bui_backend::Error), #[error("BUI backend session error: {0}")] BuiBackendSessionError(#[from] bui_backend_session::Error), #[error("Braid HTTP session error: {0}")] @@ -203,11 +200,11 @@ pub enum StrandCamError { PluginDisconnected, #[error("video streaming error")] VideoStreamingError(#[from] video_streaming::Error), - #[error( - "The --jwt-secret argument must be passed or the JWT_SECRET environment \ - variable must be set." - )] - JwtError, + // #[error( + // "The --jwt-secret argument must be passed or the JWT_SECRET environment \ + // variable must be set." + // )] + // JwtError, #[cfg(feature = "flydratrax")] #[error("MVG error: {0}")] MvgError( @@ -313,7 +310,7 @@ impl CloseAppOnThreadExit { } } - fn check(&self, result: std::result::Result) -> T + fn check(&self, result: StdResult) -> T where E: std::convert::Into, { @@ -399,7 +396,7 @@ pub(crate) enum Msg { SetIsSavingObjDetectionCsv(CsvSaveConfig), #[cfg(feature = "flydra_feat_detect")] SetExpConfig(ImPtDetectCfg), - Store(Arc>>), + Store(Arc>>), #[cfg(feature = "flydra_feat_detect")] TakeCurrentImageAsBackground, #[cfg(feature = "flydra_feat_detect")] @@ -412,7 +409,7 @@ pub(crate) enum Msg { } impl std::fmt::Debug for Msg { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> StdResult<(), std::fmt::Error> { write!(f, "strand_cam::Msg{{..}}") } } @@ -636,33 +633,19 @@ struct FlydraConfigState { } #[cfg(feature = "checkercal")] -type CollectedCornersArc = Arc>>>; - -async fn register_node_and_update_image( - api_http_address: flydra_types::MainbrainBuiLocation, - msg: flydra_types::RegisterNewCamera, - mut transmit_msg_rx: mpsc::Receiver, -) -> Result<()> { - let mut mainbrain_session = - braid_http_session::mainbrain_future_session(api_http_address).await?; - mainbrain_session.register_flydra_camnode(&msg).await?; - while let Some(msg) = transmit_msg_rx.next().await { - mainbrain_session.send_message(msg).await?; - } - Ok(()) -} +type CollectedCornersArc = Arc>>>; async fn convert_stream( - ros_cam_name: RosCamName, + raw_cam_name: RawCamName, mut transmit_feature_detect_settings_rx: tokio::sync::mpsc::Receiver< flydra_feature_detector_types::ImPtDetectCfg, >, - mut transmit_msg_tx: mpsc::Sender, + transmit_msg_tx: tokio::sync::mpsc::Sender, ) -> Result<()> { while let Some(val) = transmit_feature_detect_settings_rx.recv().await { let msg = - flydra_types::HttpApiCallback::UpdateFeatureDetectSettings(flydra_types::PerCam { - ros_cam_name: ros_cam_name.clone(), + flydra_types::BraidHttpApiCallback::UpdateFeatureDetectSettings(flydra_types::PerCam { + raw_cam_name: raw_cam_name.clone(), inner: flydra_types::UpdateFeatureDetectSettings { current_feature_detect_settings: val, }, @@ -672,19 +655,12 @@ async fn convert_stream( Ok(()) } -struct MainbrainInfo { - mainbrain_internal_addr: MainbrainBuiLocation, - transmit_msg_rx: mpsc::Receiver, - transmit_msg_tx: mpsc::Sender, -} - // We perform image analysis in its own task. async fn frame_process_task( - my_runtime: tokio::runtime::Handle, - #[cfg(feature = "flydratrax")] flydratrax_model_server: ( - tokio::sync::mpsc::Sender<(flydra2::SendType, flydra2::TimeDataPassthrough)>, - flydra2::ModelServer, - ), + #[cfg(feature = "flydratrax")] model_server_data_tx: tokio::sync::mpsc::Sender<( + flydra2::SendType, + flydra2::TimeDataPassthrough, + )>, #[cfg(feature = "flydratrax")] flydratrax_calibration_source: CalSource, cam_name: RawCamName, #[cfg(feature = "flydra_feat_detect")] camera_cfg: CameraCfgFview2_0_26, @@ -702,13 +678,12 @@ async fn frame_process_task( >, #[cfg(feature = "plugin-process-frame")] plugin_wait_dur: std::time::Duration, #[cfg(feature = "flydratrax")] led_box_tx_std: tokio::sync::mpsc::Sender, - mut quit_rx: tokio::sync::oneshot::Receiver<()>, is_starting_tx: tokio::sync::oneshot::Sender<()>, - #[cfg(feature = "flydratrax")] http_camserver_info: StrandCamBuiServerInfo, + #[cfg(feature = "flydratrax")] http_camserver_info: BuiServerAddrInfo, process_frame_priority: Option<(i32, i32)>, - mainbrain_info: Option, + transmit_msg_tx: Option>, camdata_addr: Option, - led_box_heartbeat_update_arc: Arc>>, + led_box_heartbeat_update_arc: Arc>>, #[cfg(feature = "plugin-process-frame")] do_process_frame_callback: bool, #[cfg(feature = "checkercal")] collected_corners_arc: CollectedCornersArc, #[cfg(feature = "flydratrax")] save_empty_data2d: SaveEmptyData2dType, @@ -720,9 +695,11 @@ async fn frame_process_task( frame_info_extractor: &dyn ci2::ExtractFrameInfo, #[cfg(feature = "flydra_feat_detect")] app_name: &'static str, ) -> anyhow::Result<()> { + let my_runtime: tokio::runtime::Handle = tokio::runtime::Handle::current(); + let is_braid = camdata_addr.is_some(); - let ros_cam_name: RosCamName = new_cam_data.ros_cam_name.clone(); + let raw_cam_name: RawCamName = new_cam_data.raw_cam_name.clone(); #[cfg(feature = "posix_sched_fifo")] { @@ -792,31 +769,25 @@ async fn frame_process_task( Some(0) }; - let (transmit_feature_detect_settings_tx, transmit_msg_tx) = if let Some(info) = mainbrain_info - { - let addr = info.mainbrain_internal_addr; - let transmit_msg_tx = info.transmit_msg_tx.clone(); - + let (transmit_feature_detect_settings_tx, mut transmit_msg_tx) = if is_braid { let (transmit_feature_detect_settings_tx, transmit_feature_detect_settings_rx) = tokio::sync::mpsc::channel::(10); + let transmit_msg_tx = transmit_msg_tx.unwrap(); + my_runtime.spawn(convert_stream( - ros_cam_name.clone(), + raw_cam_name.clone(), transmit_feature_detect_settings_rx, - transmit_msg_tx, + transmit_msg_tx.clone(), )); - let transmit_msg_rx = info.transmit_msg_rx; - my_runtime.spawn(register_node_and_update_image( - addr, - new_cam_data, - // current_image_png, - transmit_msg_rx, - )); + transmit_msg_tx + .send(flydra_types::BraidHttpApiCallback::NewCamera(new_cam_data)) + .await?; ( Some(transmit_feature_detect_settings_tx), - Some(info.transmit_msg_tx), + Some(transmit_msg_tx), ) } else { (None, None) @@ -850,7 +821,7 @@ async fn frame_process_task( )?; #[cfg(feature = "flydra_feat_detect")] let mut csv_save_state = SavingState::NotSaving; - let mut shared_store_arc: Option>>> = None; + let mut shared_store_arc: Option>>> = None; let mut fps_calc = FpsCalc::new(100); // average 100 frames to get mean fps #[cfg(feature = "flydratrax")] let mut kalman_tracking_config = KalmanTrackingConfig::default(); // this is replaced below @@ -870,7 +841,7 @@ async fn frame_process_task( #[cfg(feature = "flydratrax")] let red_style = StrokeStyle::from_rgb(255, 100, 100); - let expected_framerate_arc = Arc::new(RwLock::new(None)); + let expected_framerate_arc = Arc::new(parking_lot::RwLock::new(None)); is_starting_tx.send(()).ok(); // signal that we are we are no longer starting @@ -902,14 +873,14 @@ async fn frame_process_task( #[cfg(feature = "checkercal")] let mut checkerboard_loop_dur = std::time::Duration::from_millis(500); - let current_image_timer_arc = Arc::new(RwLock::new(std::time::Instant::now())); + let current_image_timer_arc = Arc::new(parking_lot::RwLock::new(std::time::Instant::now())); let mut im_ops_socket: Option = None; let mut opt_clock_model = None; let mut opt_frame_offset = None; - while quit_rx.try_recv() == Err(tokio::sync::oneshot::error::TryRecvError::Empty) { + loop { #[cfg(feature = "flydra_feat_detect")] { if let Some(ref ssa) = shared_store_arc { @@ -1020,9 +991,9 @@ async fn frame_process_task( let expected_framerate_arc2 = expected_framerate_arc.clone(); let cam_name2 = cam_name.clone(); let http_camserver = - StrandCamHttpServerInfo::Server(http_camserver_info.clone()); + BuiServerInfo::Server(http_camserver_info.clone()); let recon2 = recon.clone(); - let flydratrax_model_server2 = flydratrax_model_server.clone(); + let model_server_data_tx2 = model_server_data_tx.clone(); let valve2 = valve.clone(); let cam_manager = flydra2::ConnectedCamerasManager::new_single_cam( @@ -1040,13 +1011,11 @@ async fn frame_process_task( ignore_latency, mini_arena_debug_image_dir: None, }, - tokio::runtime::Handle::current(), cam_manager, Some(recon), flydra2::BraidMetadataBuilder::saving_program_name( "strand-cam", ), - valve.clone(), ) .expect("create CoordProcessor"); @@ -1055,8 +1024,7 @@ async fn frame_process_task( opt_braidz_write_tx_weak = Some(braidz_write_tx_weak); - let (model_server_data_tx, _model_server) = - flydratrax_model_server2; + let model_server_data_tx = model_server_data_tx2; coord_processor.add_listener(model_sender); // the local LED control thing coord_processor.add_listener(model_server_data_tx); // the HTTP thing @@ -1110,7 +1078,7 @@ async fn frame_process_task( match braidz_write_tx.send(SaveToDiskMsg::StopSavingCsv).await { Ok(()) => {} Err(_) => { - log::info!("Channel to data writing task closed. Ending."); + info!("Channel to data writing task closed. Ending."); break; } } @@ -1190,7 +1158,7 @@ async fn frame_process_task( let local = chrono::Local::now(); // Get start time, either from buffered frames if present or current time. - let creation_time = if let Some(frame0) = frames.get(0) { + let creation_time = if let Some(frame0) = frames.front() { frame0.extra().host_timestamp().into() } else { local @@ -1520,10 +1488,7 @@ async fn frame_process_task( { Ok(_n_bytes) => {} Err(e) => { - log::error!( - "Unable to send image moment data. {}", - e - ); + error!("Unable to send image moment data. {}", e); } } } @@ -1583,7 +1548,7 @@ async fn frame_process_task( datetime_conversion::datetime_to_f64(&process_new_frame_start); let tracker_annotation = flydra_types::FlydraRawUdpPacket { - cam_name: ros_cam_name.as_str().to_string(), + cam_name: raw_cam_name.as_str().to_string(), timestamp: opt_trigger_stamp, cam_received_time: acquire_stamp, device_timestamp, @@ -1675,7 +1640,7 @@ async fn frame_process_task( let cam_num = 0.into(); // Only one camera, so this must be correct. let frame_data = flydra2::FrameData::new( - ros_cam_name.clone(), + raw_cam_name.clone(), cam_num, SyncFno( frame.extra().host_framenumber().try_into().unwrap(), @@ -1945,30 +1910,43 @@ async fn frame_process_task( { // send current image every 2 seconds - let mut timer = current_image_timer_arc.write(); - let elapsed = timer.elapsed(); - if elapsed > std::time::Duration::from_millis(2000) { - *timer = std::time::Instant::now(); + let send_msg = { + let mut timer = current_image_timer_arc.write(); + let elapsed = timer.elapsed(); + let mut send_msg = false; + if elapsed > std::time::Duration::from_millis(2000) { + *timer = std::time::Instant::now(); + send_msg = true; + } + send_msg + }; + + if send_msg { // encode frame to png buf - if let Some(mut transmit_msg_tx) = transmit_msg_tx.clone() { - let ros_cam_name = ros_cam_name.clone(); + if let Some(cb_sender) = transmit_msg_tx.as_ref() { let current_image_png = match_all_dynamic_fmts!(&frame, x, { convert_image::frame_to_image(x, convert_image::ImageOptions::Png) .unwrap() }); - my_runtime.spawn(async move { - let msg = flydra_types::HttpApiCallback::UpdateCurrentImage( - flydra_types::PerCam { - ros_cam_name, - inner: flydra_types::UpdateImage { - current_image_png: current_image_png.into(), - }, + let raw_cam_name = raw_cam_name.clone(); + + let msg = flydra_types::BraidHttpApiCallback::UpdateCurrentImage( + flydra_types::PerCam { + raw_cam_name, + inner: flydra_types::UpdateImage { + current_image_png: current_image_png.into(), }, - ); - transmit_msg_tx.send(msg).await.unwrap(); - }); + }, + ); + match cb_sender.send(msg).await { + Ok(()) => {} + Err(e) => { + tracing::error!("While sending current image: {e}"); + transmit_msg_tx = None; + } + }; } } } @@ -2003,20 +1981,25 @@ async fn frame_process_task( #[cfg(not(feature = "flydratrax"))] let annotations = vec![]; - let name = None; if firehose_tx.capacity() == 0 { - debug!("cannot transmit frame for viewing: channel full"); + trace!("cannot transmit frame for viewing: channel full"); } else { - firehose_tx + let result = firehose_tx .send(AnnotatedFrame { frame, found_points, valid_display, annotations, - name, }) - .await - .unwrap(); + .await; + match result { + Ok(()) => {} + Err(e) => { + tracing::error!( + "error while sending frame for display in browser: {e} {e:?}" + ); + } + } } } #[cfg(feature = "flydra_feat_detect")] @@ -2088,7 +2071,7 @@ async fn frame_process_task( match braidz_write_tx.send(SaveToDiskMsg::StopSavingCsv).await { Ok(()) => {} Err(_) => { - log::info!("Channel to data writing task closed. Ending."); + info!("Channel to data writing task closed. Ending."); break; } } @@ -2242,21 +2225,19 @@ trait IgnoreSendError { fn ignore_send_error(self); } -impl IgnoreSendError - for std::result::Result<(), tokio::sync::mpsc::error::SendError> -{ +impl IgnoreSendError for StdResult<(), tokio::sync::mpsc::error::SendError> { fn ignore_send_error(self) { match self { Ok(()) => {} Err(e) => { - log::debug!("Ignoring send error ({}:{}): {:?}", file!(), line!(), e) + debug!("Ignoring send error ({}:{}): {:?}", file!(), line!(), e) } } } } #[derive(Clone)] -struct MyCallbackHandler { +struct StrandCamCallbackSenders { firehose_callback_tx: tokio::sync::mpsc::Sender, cam_args_tx: tokio::sync::mpsc::Sender, led_box_tx_std: tokio::sync::mpsc::Sender, @@ -2264,169 +2245,12 @@ struct MyCallbackHandler { tx_frame: tokio::sync::mpsc::Sender, } -impl CallbackHandler for MyCallbackHandler { - type Data = CallbackType; - - /// HTTP request to "/callback" has been made with payload which as been - /// deserialized into `Self::Data` and session data stored in - /// [CallbackDataAndSession]. - fn call<'a>( - &'a self, - data_sess: CallbackDataAndSession, - ) -> Pin>> + Send + 'a>> - { - let payload = data_sess.payload; - let fut = async { - match payload { - CallbackType::ToCamera(cam_arg) => { - debug!("in cb: {:?}", cam_arg); - self.cam_args_tx.send(cam_arg).await.ignore_send_error(); - } - CallbackType::FirehoseNotify(inner) => { - let arrival_time = chrono::Utc::now(); - let fc = FirehoseCallback { - arrival_time, - inner, - }; - self.firehose_callback_tx.send(fc).await.ignore_send_error(); - } - CallbackType::TakeCurrentImageAsBackground => { - #[cfg(feature = "flydra_feat_detect")] - self.tx_frame - .send(Msg::TakeCurrentImageAsBackground) - .await - .ignore_send_error(); - } - CallbackType::ClearBackground(value) => { - #[cfg(feature = "flydra_feat_detect")] - self.tx_frame - .send(Msg::ClearBackground(value)) - .await - .ignore_send_error(); - #[cfg(not(feature = "flydra_feat_detect"))] - let _ = value; - } - CallbackType::ToLedBox(led_box_arg) => futures::executor::block_on(async { - // todo: make this whole block async and remove the `futures::executor::block_on` aspect here. - info!("in led_box callback: {:?}", led_box_arg); - self.led_box_tx_std - .send(led_box_arg) - .await - .ignore_send_error(); - }), - } - }; - Box::pin(async { - fut.await; - Ok(()) - }) - } -} - -pub struct StrandCamApp { - inner: BuiAppInner, -} - -impl StrandCamApp { - async fn new( - rt_handle: tokio::runtime::Handle, - shared_store_arc: Arc>>, - secret: Option>, - http_server_addr: &str, - config: Config, - cam_args_tx: tokio::sync::mpsc::Sender, - led_box_tx_std: tokio::sync::mpsc::Sender, - tx_frame: tokio::sync::mpsc::Sender, - shutdown_rx: tokio::sync::oneshot::Receiver<()>, - ) -> std::result::Result< - ( - tokio::sync::mpsc::Receiver, - Self, - tokio::sync::mpsc::Receiver, - ), - StrandCamError, - > { - let chan_size = 10; - - let addr: std::net::SocketAddr = http_server_addr.parse().unwrap(); - let auth = if let Some(ref secret) = secret { - bui_backend::highlevel::generate_random_auth(addr, secret.clone())? - } else if addr.ip().is_loopback() { - AccessControl::Insecure(addr) - } else { - return Err(StrandCamError::JwtError); - }; - - // A channel for the data sent from the client browser. - let (firehose_callback_tx, firehose_callback_rx) = tokio::sync::mpsc::channel(10); - - let callback_handler = Box::new(MyCallbackHandler { - cam_args_tx, - firehose_callback_tx, - led_box_tx_std, - tx_frame, - }); - - let (rx_conn, bui_server) = bui_backend::lowlevel::launcher( - config.clone(), - &auth, - chan_size, - strand_cam_storetype::STRAND_CAM_EVENTS_URL_PATH, - None, - callback_handler, - ); - - let (new_conn_rx, inner) = create_bui_app_inner( - rt_handle.clone(), - Some(shutdown_rx), - &auth, - shared_store_arc, - Some(strand_cam_storetype::STRAND_CAM_EVENT_NAME.to_string()), - rx_conn, - bui_server, - ) - .await?; - - // let mut new_conn_rx_valved = valve.wrap(new_conn_rx); - // let new_conn_future = async move { - // while let Some(msg) = new_conn_rx_valved.next().await { - // connection_callback_tx.send(msg).await.unwrap(); - // } - // debug!("new_conn_future closing {}:{}", file!(), line!()); - // }; - // let txers = Arc::new(RwLock::new(HashMap::new())); - // let txers2 = txers.clone(); - // let mut new_conn_rx_valved = valve.wrap(new_conn_rx); - // let new_conn_future = async move { - // while let Some(msg) = new_conn_rx_valved.next().await { - // let mut txers = txers2.write(); - // match msg.typ { - // ConnectionEventType::Connect(chunk_sender) => { - // txers.insert( - // msg.connection_key, - // (msg.session_key, chunk_sender, msg.path), - // ); - // } - // ConnectionEventType::Disconnect => { - // txers.remove(&msg.connection_key); - // } - // } - // } - // debug!("new_conn_future closing {}:{}", file!(), line!()); - // }; - // let _task_join_handle = rt_handle.spawn(new_conn_future); - - let my_app = StrandCamApp { inner }; - - Ok((firehose_callback_rx, my_app, new_conn_rx)) - } - - fn inner(&self) -> &BuiAppInner { - &self.inner - } - // fn inner_mut(&mut self) -> &mut BuiAppInner { - // &mut self.inner - // } +#[derive(Clone)] +struct StrandCamAppState { + event_broadcaster: EventBroadcaster, + callback_senders: StrandCamCallbackSenders, + tx_new_connection: tokio::sync::mpsc::Sender, + shared_store_arc: Arc>>, } #[cfg(feature = "fiducial")] @@ -2462,7 +2286,7 @@ async fn check_version( MyBody, // http_body_util::Empty, >, - known_version: Arc>, + known_version: Arc>, app_name: &'static str, ) -> Result<()> { let url = format!("https://version-check.strawlab.org/{app_name}"); @@ -2493,7 +2317,7 @@ async fn check_version( let known_version3 = known_version2.clone(); let body = res.into_body(); - let chunks: std::result::Result, _> = { + let chunks: StdResult, _> = { use http_body_util::BodyExt; body.collect().await }; @@ -2502,7 +2326,7 @@ async fn check_version( let version: VersionResponse = match serde_json::from_slice(&data) { Ok(version) => version, Err(e) => { - log::warn!("Could not parse version response JSON from {}: {}", url, e); + warn!("Could not parse version response JSON from {}: {}", url, e); return Ok(()); } }; @@ -2542,7 +2366,7 @@ fn display_qr_url(url: &str) { writeln!(stdout_handle).expect("write failed"); } -#[derive(Debug)] +#[derive(Debug, Clone)] /// Defines whether runtime changes from the user are persisted to disk. /// /// If they are persisted to disk, upon program re-start, the disk @@ -2553,6 +2377,13 @@ pub enum ImPtDetectCfgSource { ChangedSavedToDisk((&'static AppInfo, String)), } +#[cfg(feature = "flydra_feat_detect")] +impl Default for ImPtDetectCfgSource { + fn default() -> Self { + ImPtDetectCfgSource::ChangesNotSavedToDisk(default_im_pt_detect()) + } +} + #[cfg(feature = "plugin-process-frame")] pub struct ProcessFrameCbData { pub func_ptr: plugin_defs::ProcessFrameFunc, @@ -2603,31 +2434,70 @@ enum ToDevice { Centroid(MomentCentroid), } -// #[derive(Debug, Serialize, Deserialize)] +/// CLI args for the case when we will connect to Braid. +/// +/// Prior to the connection, we don't know much about what our configuration +/// should be. +#[derive(Debug, Default, Clone)] +pub struct BraidArgs { + pub braid_url: String, + pub camera_name: String, +} + +/// CLI args for the case when we run standalone. +#[derive(Debug, Clone, Default)] +pub struct StandaloneArgs { + pub camera_name: Option, + pub pixel_format: Option, + /// If set, camera acquisition will external trigger. + pub force_camera_sync_mode: bool, + /// If enabled, limit framerate (FPS) at startup. + /// + /// Despite the name ("software"), this actually sets the hardware + /// acquisition rate via the `AcquisitionFrameRate` camera parameter. + pub software_limit_framerate: StartSoftwareFrameRateLimit, + /// Threshold duration before logging error (msec). + /// + /// If the image acquisition timestamp precedes the computed trigger + /// timestamp, clearly an error has happened. This error must lie in the + /// computation of the trigger timestamp. This specifies the threshold error + /// at which an error is logged. (The underlying source of such errors + /// remains unknown.) + pub acquisition_duration_allowed_imprecision_msec: Option, + /// Filename of vendor-specific camera settings file. + pub camera_settings_filename: Option, + #[cfg(feature = "flydra_feat_detect")] + pub tracker_cfg_src: ImPtDetectCfgSource, +} + +#[derive(Debug)] +pub enum StandaloneOrBraid { + Standalone(StandaloneArgs), + Braid(BraidArgs), +} + +impl Default for StandaloneOrBraid { + fn default() -> Self { + Self::Standalone(Default::default()) + } +} + #[derive(Debug)] pub struct StrandCamArgs { - /// A handle to the tokio runtime. - pub handle: Option, /// Is Strand Cam running inside Braid context? - pub is_braid: bool, - pub secret: Option>, - pub camera_name: Option, - pub pixel_format: Option, + pub standalone_or_braid: StandaloneOrBraid, + /// base64 encoded secret. minimum 256 bits. + pub secret: Option, pub http_server_addr: Option, pub no_browser: bool, pub mp4_filename_template: String, pub fmf_filename_template: String, pub ufmf_filename_template: String, - #[cfg(feature = "flydra_feat_detect")] - pub tracker_cfg_src: ImPtDetectCfgSource, pub csv_save_dir: String, pub raise_grab_thread_priority: bool, #[cfg(feature = "posix_sched_fifo")] pub process_frame_priority: Option<(i32, i32)>, pub led_box_device_path: Option, - pub mainbrain_internal_addr: Option, - pub camdata_addr: Option, - pub show_url: bool, #[cfg(feature = "plugin-process-frame")] pub process_frame_callback: Option, #[cfg(feature = "plugin-process-frame")] @@ -2640,27 +2510,6 @@ pub struct StrandCamArgs { pub flydratrax_calibration_source: CalSource, #[cfg(feature = "fiducial")] pub apriltag_csv_filename_template: String, - - /// If set, camera acquisition will external trigger. - pub force_camera_sync_mode: bool, - - /// If enabled, limit framerate (FPS) at startup. - /// - /// Despite the name ("software"), this actually sets the hardware - /// acquisition rate via the `AcquisitionFrameRate` camera parameter. - pub software_limit_framerate: StartSoftwareFrameRateLimit, - - /// Filename of vendor-specific camera settings file. - pub camera_settings_filename: Option, - - /// Threshold duration before logging error (msec). - /// - /// If the image acquisition timestamp precedes the computed trigger - /// timestamp, clearly an error has happened. This error must lie in the - /// computation of the trigger timestamp. This specifies the threshold error - /// at which an error is logged. (The underlying source of such errors - /// remains unknown.) - pub acquisition_duration_allowed_imprecision_msec: Option, } pub type SaveEmptyData2dType = bool; @@ -2678,11 +2527,8 @@ pub enum CalSource { impl Default for StrandCamArgs { fn default() -> Self { Self { - handle: None, - is_braid: false, + standalone_or_braid: Default::default(), secret: None, - camera_name: None, - pixel_format: None, http_server_addr: None, no_browser: true, mp4_filename_template: "movie%Y%m%d_%H%M%S.%f_{CAMNAME}.mp4".to_string(), @@ -2691,31 +2537,21 @@ impl Default for StrandCamArgs { #[cfg(feature = "fiducial")] apriltag_csv_filename_template: strand_cam_storetype::APRILTAG_CSV_TEMPLATE_DEFAULT .to_string(), - #[cfg(feature = "flydra_feat_detect")] - tracker_cfg_src: ImPtDetectCfgSource::ChangesNotSavedToDisk(default_im_pt_detect()), csv_save_dir: "/dev/null".to_string(), raise_grab_thread_priority: false, #[cfg(feature = "posix_sched_fifo")] process_frame_priority: None, led_box_device_path: None, - mainbrain_internal_addr: None, - camdata_addr: None, - show_url: true, #[cfg(feature = "plugin-process-frame")] process_frame_callback: None, #[cfg(feature = "plugin-process-frame")] plugin_wait_dur: std::time::Duration::from_millis(5), - force_camera_sync_mode: false, - software_limit_framerate: StartSoftwareFrameRateLimit::NoChange, - camera_settings_filename: None, #[cfg(feature = "flydratrax")] flydratrax_calibration_source: CalSource::PseudoCal, #[cfg(feature = "flydratrax")] save_empty_data2d: true, #[cfg(feature = "flydratrax")] model_server_addr: flydra_types::DEFAULT_MODEL_SERVER_ADDR.parse().unwrap(), - acquisition_duration_allowed_imprecision_msec: - flydra_types::DEFAULT_ACQUISITION_DURATION_ALLOWED_IMPRECISION_MSEC, } } } @@ -2768,57 +2604,273 @@ fn test_nvenc_save(frame: DynamicFrame) -> Result { Ok(true) } +fn to_event_frame(state: &StoreType) -> String { + let buf = serde_json::to_string(&state).unwrap(); + let frame_string = format!("event: {STRAND_CAM_EVENT_NAME}\ndata: {buf}\n\n"); + frame_string +} + +async fn events_handler( + axum::extract::State(app_state): axum::extract::State, + session_key: axum_token_auth::SessionKey, + axum::extract::ConnectInfo(addr): axum::extract::ConnectInfo, + _: AcceptsEventStream, + req: axum::extract::Request, +) -> impl axum::response::IntoResponse { + tracing::trace!("events"); + // Connection wants to subscribe to event stream. + + let key = ConnectionSessionKey::new(session_key.0, addr); + let (tx, body) = app_state.event_broadcaster.new_connection(key); + + // Send an initial copy of our state. + let shared_store = app_state.shared_store_arc.read().as_ref().clone(); + let frame_string = to_event_frame(&shared_store); + match tx + .send(Ok(http_body::Frame::data(frame_string.into()))) + .await + { + Ok(()) => {} + Err(tokio::sync::mpsc::error::SendError(_)) => { + // The receiver was dropped because the connection closed. Should probably do more here. + tracing::debug!("initial send error"); + } + } + + // Create a new channel in which the receiver is used to send responses to + // the new connection. The sender receives changes from a global change + // receiver. + let typ = ConnectionEventType::Connect(tx); + let path = req.uri().path().to_string(); + let connection_key = ConnectionKey { addr }; + let session_key = SessionKey(session_key.0); + + match app_state + .tx_new_connection + .send(ConnectionEvent { + typ, + session_key, + connection_key, + path, + }) + .await + { + Ok(()) => Ok(body), + Err(_) => Err(( + StatusCode::INTERNAL_SERVER_ERROR, + "sending new connection failed", + )), + } +} + +async fn callback_handler( + axum::extract::State(app_state): axum::extract::State, + _session_key: axum_token_auth::SessionKey, + TolerantJson(payload): TolerantJson, +) -> impl axum::response::IntoResponse { + tracing::trace!("callback"); + match payload { + CallbackType::ToCamera(cam_arg) => { + debug!("in cb: {:?}", cam_arg); + app_state + .callback_senders + .cam_args_tx + .send(cam_arg) + .await + .ignore_send_error(); + } + CallbackType::FirehoseNotify(inner) => { + let arrival_time = chrono::Utc::now(); + let fc = FirehoseCallback { + arrival_time, + inner, + }; + app_state + .callback_senders + .firehose_callback_tx + .send(fc) + .await + .ignore_send_error(); + } + CallbackType::TakeCurrentImageAsBackground => { + #[cfg(feature = "flydra_feat_detect")] + app_state + .callback_senders + .tx_frame + .send(Msg::TakeCurrentImageAsBackground) + .await + .ignore_send_error(); + } + CallbackType::ClearBackground(value) => { + #[cfg(feature = "flydra_feat_detect")] + app_state + .callback_senders + .tx_frame + .send(Msg::ClearBackground(value)) + .await + .ignore_send_error(); + #[cfg(not(feature = "flydra_feat_detect"))] + let _ = value; + } + CallbackType::ToLedBox(led_box_arg) => futures::executor::block_on(async { + info!("in led_box callback: {:?}", led_box_arg); + app_state + .callback_senders + .led_box_tx_std + .send(led_box_arg) + .await + .ignore_send_error(); + }), + } + Ok::<_, axum::extract::rejection::JsonRejection>(axum::Json(())) +} + +async fn handle_auth_error(err: tower::BoxError) -> (StatusCode, &'static str) { + match err.downcast::() { + Ok(err) => { + tracing::error!( + "Validation error(s): {:?}", + err.errors().collect::>() + ); + (StatusCode::UNAUTHORIZED, "Request is not authorized") + } + Err(orig_err) => { + tracing::error!("Unhandled internal error: {orig_err}"); + (StatusCode::INTERNAL_SERVER_ERROR, "internal server error") + } + } +} + +#[derive(Debug)] +struct BraidInfo { + mainbrain_session: braid_http_session::MainbrainSession, + camdata_addr: flydra_types::RealtimePointsDestAddr, + tracker_cfg_src: ImPtDetectCfgSource, + config_from_braid: flydra_types::RemoteCameraInfoResponse, +} + +// ----------- + +/// top-level function once args are parsed from CLI. pub fn run_app( mymod: ci2_async::ThreadedAsyncCameraModule, args: StrandCamArgs, app_name: &'static str, -) -> Result<()> +) -> anyhow::Result<()> where - M: ci2::CameraModule, + M: ci2::CameraModule + 'static, C: 'static + ci2::Camera + Send, { - let handle = args - .handle - .clone() - .ok_or_else(|| anyhow::anyhow!("no tokio runtime handle"))?; + // Start tokio runtime here. + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(4) + .thread_name("strand-cam-runtime") + .thread_stack_size(3 * 1024 * 1024) + .build()?; - let my_handle = handle.clone(); + runtime.block_on(run_after_maybe_connecting_to_braid(mymod, args, app_name))?; - let (_bui_server_info, tx_cam_arg2, fut, _my_app) = - handle.block_on(setup_app(mymod, my_handle, args, app_name))?; - - ctrlc::set_handler(move || { - info!("got Ctrl-C, shutting down"); + info!("done"); + Ok(()) +} - // Send quit message. - debug!("starting to send quit message {}:{}", file!(), line!()); - match tx_cam_arg2.blocking_send(CamArg::DoQuit) { - Ok(()) => {} - Err(e) => { - error!("failed sending quit command: {}", e); +/// First, connect to Braid if requested, then run. +async fn run_after_maybe_connecting_to_braid( + mymod: ci2_async::ThreadedAsyncCameraModule, + args: StrandCamArgs, + app_name: &'static str, +) -> anyhow::Result<()> +where + M: ci2::CameraModule + 'static, + C: 'static + ci2::Camera + Send, +{ + let default_bui_addr = match &args.standalone_or_braid { + StandaloneOrBraid::Braid(braid_args) => { + let braid_info = + flydra_types::BuiServerAddrInfo::parse_url_with_token(&braid_args.braid_url)?; + if braid_info.addr().ip().is_loopback() { + "127.0.0.1:3440" + } else { + "0.0.0.0:3440" } } - debug!("done sending quit message {}:{}", file!(), line!()); - }) - .expect("Error setting Ctrl-C handler"); + StandaloneOrBraid::Standalone(_) => "127.0.0.1:3440", + }; - handle.block_on(fut)?; + let strand_cam_bui_http_address_string = args + .http_server_addr + .as_ref() + .map(Clone::clone) + .unwrap_or_else(|| default_bui_addr.to_string()); + tracing::debug!("Strand Camera HTTP server: {strand_cam_bui_http_address_string}"); + + // If connecting to braid, do it here. + let res_braid: std::result::Result = { + match &args.standalone_or_braid { + StandaloneOrBraid::Braid(braid_args) => { + info!("Will connect to braid at \"{}\"", braid_args.braid_url); + let mainbrain_bui_loc = flydra_types::MainbrainBuiLocation( + flydra_types::BuiServerAddrInfo::parse_url_with_token(&braid_args.braid_url)?, + ); - info!("done"); - Ok(()) + let mut mainbrain_session = + braid_http_session::mainbrain_future_session(mainbrain_bui_loc).await?; + + let camera_name = flydra_types::RawCamName::new(braid_args.camera_name.clone()); + + let config_from_braid: flydra_types::RemoteCameraInfoResponse = + mainbrain_session.get_remote_info(&camera_name).await?; + + let camdata_addr = { + let camdata_addr = config_from_braid + .camdata_addr + .parse::()?; + let addr_info_ip = flydra_types::AddrInfoIP::from_socket_addr(&camdata_addr); + + flydra_types::RealtimePointsDestAddr::IpAddr(addr_info_ip) + }; + + let tracker_cfg_src = crate::ImPtDetectCfgSource::ChangesNotSavedToDisk( + config_from_braid.config.point_detection_config.clone(), + ); + + Ok(BraidInfo { + mainbrain_session, + config_from_braid, + camdata_addr, + tracker_cfg_src, + }) + } + StandaloneOrBraid::Standalone(standalone_args) => Err(standalone_args.clone()), + } + }; + + run_until_done( + mymod, + args, + app_name, + res_braid, + &strand_cam_bui_http_address_string, + ) + .await } -pub async fn setup_app( +// ----------- + +/// This is the main function where we spend all time after parsing startup args +/// and, in case of connecting to braid, getting the inital connection +/// information. +/// +/// This function is way too huge and should be refactored. +#[tracing::instrument(level = "debug", skip_all)] +async fn run_until_done( mut mymod: ci2_async::ThreadedAsyncCameraModule, - rt_handle: tokio::runtime::Handle, args: StrandCamArgs, app_name: &'static str, -) -> anyhow::Result<( - StrandCamBuiServerInfo, - tokio::sync::mpsc::Sender, - impl futures::Future>, - StrandCamApp, -)> + res_braid: std::result::Result, + strand_cam_bui_http_address_string: &str, +) -> anyhow::Result<()> where M: ci2::CameraModule, C: 'static + ci2::Camera + Send, @@ -2830,7 +2882,12 @@ where warn!("Package 'imops' was not compiled with simd support. Image processing with imops will be slow."); } - debug!("CLI request for camera {:?}", args.camera_name); + let requested_camera_name = match &args.standalone_or_braid { + StandaloneOrBraid::Standalone(args) => args.camera_name.clone(), + StandaloneOrBraid::Braid(args) => Some(args.camera_name.clone()), + }; + + debug!("Request for camera \"{requested_camera_name:?}\""); // ----------------------------------------------- @@ -2845,7 +2902,7 @@ where info!(" camera {:?} detected", cam_info.name()); } - let name = match args.camera_name { + let use_camera_name = match requested_camera_name { Some(ref name) => name, None => cam_infos[0].name(), }; @@ -2853,7 +2910,7 @@ where let frame_info_extractor = mymod.frame_info_extractor(); let settings_file_ext = mymod.settings_file_extension().to_string(); - let mut cam = match mymod.threaded_async_camera(name) { + let mut cam = match mymod.threaded_async_camera(use_camera_name) { Ok(cam) => cam, Err(e) => { let msg = format!("{e}"); @@ -2864,24 +2921,44 @@ where let raw_name = cam.name().to_string(); info!(" got camera {}", raw_name); - let cam_name = RawCamName::new(raw_name); - let ros_cam_name = cam_name.to_ros(); + let raw_cam_name = RawCamName::new(raw_name); + let ros_cam_name = raw_cam_name.to_ros(); let camera_gamma = cam .feature_float("Gamma") - .map_err(|e| log::warn!("Ignoring error getting gamma: {}", e)) + .map_err(|e| warn!("Ignoring error getting gamma: {}", e)) .ok() .map(|x: f64| x as f32); + let camera_settings_filename = match &res_braid { + Ok(bi) => bi.config_from_braid.config.camera_settings_filename.clone(), + Err(a) => a.camera_settings_filename.clone(), + }; + + let pixel_format = match &res_braid { + Ok(bi) => bi.config_from_braid.config.pixel_format.clone(), + Err(a) => a.pixel_format.clone(), + }; + + let acquisition_duration_allowed_imprecision_msec = match &res_braid { + Ok(bi) => { + bi.config_from_braid + .config + .acquisition_duration_allowed_imprecision_msec + } + Err(a) => a.acquisition_duration_allowed_imprecision_msec, + }; + #[cfg(not(feature = "flydra_feat_detect"))] + let _ = acquisition_duration_allowed_imprecision_msec; + let (frame_rate_limit_supported, mut frame_rate_limit_enabled) = - if let Some(camera_settings_filename) = &args.camera_settings_filename { - let settings = - std::fs::read_to_string(camera_settings_filename).with_context(|| { - format!( - "Failed to read camera settings from file \"{}\"", - camera_settings_filename.display() - ) - })?; + if let Some(fname) = &camera_settings_filename { + let settings = std::fs::read_to_string(fname).with_context(|| { + format!( + "Failed to read camera settings from file \"{}\"", + fname.display() + ) + })?; cam.node_map_load(&settings)?; (false, false) @@ -2890,7 +2967,7 @@ where debug!(" possible pixel format: {}", pixfmt); } - if let Some(ref pixfmt_str) = args.pixel_format { + if let Some(ref pixfmt_str) = pixel_format { use std::str::FromStr; let pixfmt = PixFmt::from_str(pixfmt_str) .map_err(|e: &str| StrandCamError::StringError(e.to_string()))?; @@ -2954,7 +3031,6 @@ where // Buffer 20 frames to be processed before dropping them. let (tx_frame, rx_frame) = tokio::sync::mpsc::channel::(20); let tx_frame2 = tx_frame.clone(); - let tx_frame3 = tx_frame.clone(); // Get initial frame to determine width, height and pixel_format. debug!(" started acquisition, waiting for first frame"); @@ -2976,6 +3052,18 @@ where let (firehose_tx, firehose_rx) = tokio::sync::mpsc::channel::(5); + // Put first frame in channel. + firehose_tx + .send(AnnotatedFrame { + frame: frame.clone(), + found_points: vec![], + valid_display: None, + annotations: vec![], + }) + .await + .unwrap(); + // .map_err(|e| anhow::anyhow!("failed to send frame"))?; + let image_width = frame.width(); let image_height = frame.height(); @@ -2991,14 +3079,30 @@ where let raise_grab_thread_priority = args.raise_grab_thread_priority; - #[cfg(feature = "flydra_feat_detect")] - let tracker_cfg_src = args.tracker_cfg_src; - #[cfg(feature = "flydratrax")] let save_empty_data2d = args.save_empty_data2d; #[cfg(feature = "flydra_feat_detect")] - let tracker_cfg = match &tracker_cfg_src { + let tracker_cfg_src = match &res_braid { + Ok(bi) => bi.tracker_cfg_src.clone(), + Err(a) => a.tracker_cfg_src.clone(), + }; + + #[cfg(not(feature = "flydra_feat_detect"))] + match &res_braid { + Ok(bi) => { + let _ = bi.tracker_cfg_src.clone(); // silence unused field warning. + } + Err(_) => {} + }; + + // Here we just create some default, it does not matter what, because it + // will not be used for anything. + #[cfg(not(feature = "flydra_feat_detect"))] + let im_pt_detect_cfg = flydra_pt_detect_cfg::default_absdiff(); + + #[cfg(feature = "flydra_feat_detect")] + let im_pt_detect_cfg = match &tracker_cfg_src { ImPtDetectCfgSource::ChangedSavedToDisk(src) => { // Retrieve the saved preferences let (app_info, ref prefs_key) = src; @@ -3016,25 +3120,52 @@ where ImPtDetectCfgSource::ChangesNotSavedToDisk(cfg) => cfg.clone(), }; - #[cfg(feature = "flydra_feat_detect")] - let im_pt_detect_cfg = tracker_cfg.clone(); + let force_camera_sync_mode = match &res_braid { + Ok(bi) => bi.config_from_braid.force_camera_sync_mode, + Err(a) => a.force_camera_sync_mode, + }; - let mainbrain_info = args.mainbrain_internal_addr.map(|addr| { - let (transmit_msg_tx, transmit_msg_rx) = mpsc::channel::(10); + let camdata_addr = match &res_braid { + Ok(bi) => Some(bi.camdata_addr.clone()), + Err(_a) => None, + }; - MainbrainInfo { - mainbrain_internal_addr: addr, - transmit_msg_rx, - transmit_msg_tx, - } - }); + let software_limit_framerate = match &res_braid { + Ok(bi) => bi.config_from_braid.software_limit_framerate.clone(), + Err(a) => a.software_limit_framerate.clone(), + }; - let transmit_msg_tx = mainbrain_info.as_ref().map(|i| i.transmit_msg_tx.clone()); + let mut mainbrain_session = match res_braid { + Ok(bi) => Some(bi.mainbrain_session), + Err(_a) => None, + }; + + // spawn channel to send data to mainbrain + let (mainbrain_msg_tx, mut mainbrain_msg_rx) = tokio::sync::mpsc::channel(10); + let transmit_msg_tx = if mainbrain_session.is_some() { + Some(mainbrain_msg_tx.clone()) + } else { + None + }; + + let mainbrain_transmitter_fut = async move { + while let Some(msg) = mainbrain_msg_rx.recv().await { + if let Some(ref mut mainbrain_session) = &mut mainbrain_session { + match mainbrain_session.post_callback_message(msg).await { + Ok(()) => {} + Err(e) => { + tracing::error!("failed sending message to mainbrain: {e}"); + break; + } + } + } + } + }; let (cam_args_tx, cam_args_rx) = tokio::sync::mpsc::channel(100); let (led_box_tx_std, mut led_box_rx) = tokio::sync::mpsc::channel(20); - let led_box_heartbeat_update_arc = Arc::new(RwLock::new(None)); + let led_box_heartbeat_update_arc = Arc::new(parking_lot::RwLock::new(None)); let gain_ranged = RangedValue { name: "gain".into(), @@ -3068,19 +3199,21 @@ where let current_cam_settings_extension = settings_file_ext.to_string(); - if args.force_camera_sync_mode { + if force_camera_sync_mode { cam.start_default_external_triggering().unwrap(); - send_cam_settings_to_braid( - &cam.node_map_save()?, - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save()?, + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await?; + } } - if args.camera_settings_filename.is_none() { - if let StartSoftwareFrameRateLimit::Enable(fps_limit) = &args.software_limit_framerate { + if camera_settings_filename.is_none() { + if let StartSoftwareFrameRateLimit::Enable(fps_limit) = &software_limit_framerate { // Set the camera. cam.set_software_frame_rate_limit(*fps_limit).unwrap(); // Store the values we set. @@ -3194,18 +3327,16 @@ where let im_ops_state = ImOpsState::default(); - // Here we just create some default, it does not matter what, because it - // will not be used for anything. - #[cfg(not(feature = "flydra_feat_detect"))] - let im_pt_detect_cfg = flydra_pt_detect_cfg::default_absdiff(); - #[cfg(feature = "flydra_feat_detect")] let has_image_tracker_compiled = true; #[cfg(not(feature = "flydra_feat_detect"))] let has_image_tracker_compiled = false; - let is_braid = args.is_braid; + let is_braid = match &args.standalone_or_braid { + StandaloneOrBraid::Braid(_) => true, + StandaloneOrBraid::Standalone(_) => false, + }; // ----------------------------------------------- // Check if we can use nv h264 and, if so, set that as default. @@ -3220,18 +3351,18 @@ where let mp4_filename_template = args .mp4_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", raw_cam_name.as_str()); let fmf_filename_template = args .fmf_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", raw_cam_name.as_str()); let ufmf_filename_template = args .ufmf_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", raw_cam_name.as_str()); #[cfg(feature = "fiducial")] let format_str_apriltag_csv = args .apriltag_csv_filename_template - .replace("{CAMNAME}", cam_name.as_str()); + .replace("{CAMNAME}", use_camera_name); #[cfg(not(feature = "fiducial"))] let format_str_apriltag_csv = "".into(); @@ -3273,7 +3404,7 @@ where measured_fps: 0.0, is_saving_im_pt_detect_csv: None, has_image_tracker_compiled, - im_pt_detect_cfg, + im_pt_detect_cfg: im_pt_detect_cfg.clone(), has_flydratrax_compiled, kalman_tracking_config, led_program_config, @@ -3294,65 +3425,143 @@ where camera_calibration: None, }); - let frame_processing_error_state = Arc::new(RwLock::new(FrameProcessingErrorState::default())); + let frame_processing_error_state = Arc::new(parking_lot::RwLock::new( + FrameProcessingErrorState::default(), + )); - let camdata_addr = args.camdata_addr; + // let mut config = get_default_config(); + // config.cookie_name = "strand-camclient".to_string(); - let mut config = get_default_config(); - config.cookie_name = "strand-camclient".to_string(); + let mut shared_store_changes_rx = shared_store.get_changes(1); - let shared_store_arc = Arc::new(RwLock::new(shared_store)); + // A channel for the data sent from the client browser. + let (firehose_callback_tx, firehose_callback_rx) = tokio::sync::mpsc::channel(10); - let cam_args_tx2 = cam_args_tx.clone(); - let secret = args.secret.clone(); + let callback_senders = StrandCamCallbackSenders { + cam_args_tx: cam_args_tx.clone(), + firehose_callback_tx, + led_box_tx_std: led_box_tx_std.clone(), + tx_frame: tx_frame.clone(), + }; - // todo: integrate with quit_channel and quit_rx elsewhere. - let (quit_trigger, valve) = stream_cancel::Valve::new(); + let (tx_new_connection, rx_new_connection) = tokio::sync::mpsc::channel(10); - let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); + let shared_state = Arc::new(parking_lot::RwLock::new(shared_store)); + let shared_store_arc = shared_state.clone(); + + // Create our app state. + let app_state = StrandCamAppState { + event_broadcaster: Default::default(), + callback_senders, + tx_new_connection, + shared_store_arc, + }; + + let shared_store_arc = shared_state.clone(); - let http_server_addr = if let Some(http_server_addr) = args.http_server_addr.as_ref() { - // In braid, this will be `127.0.0.1:0` to get a free port. - http_server_addr.clone() + // This future will send state updates to all connected event listeners. + let event_broadcaster = app_state.event_broadcaster.clone(); + let send_updates_future = async move { + while let Some((_prev_state, next_state)) = shared_store_changes_rx.next().await { + let frame_string = to_event_frame(&next_state); + event_broadcaster.broadcast_frame(frame_string).await; + } + }; + + #[cfg(feature = "bundle_files")] + let serve_dir = tower_serve_static::ServeDir::new(&ASSETS_DIR); + + #[cfg(feature = "serve_files")] + let serve_dir = tower_http::services::fs::ServeDir::new( + std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("yew_frontend") + .join("pkg"), + ); + + let (listener, http_camserver_info) = + flydra_types::start_listener(strand_cam_bui_http_address_string).await?; + + let persistent_secret_base64 = if let Some(secret) = args.secret { + secret } else { - // This will be `127.0.0.1:3440` to get a free port. - DEFAULT_HTTP_ADDR.to_string() + match String::load(&APP_INFO, COOKIE_SECRET_KEY) { + Ok(secret_base64) => secret_base64, + Err(_) => { + tracing::debug!("No secret loaded from preferences file, generating new."); + let persistent_secret = cookie::Key::generate(); + let persistent_secret_base64 = base64::encode(persistent_secret.master()); + persistent_secret_base64.save(&APP_INFO, COOKIE_SECRET_KEY)?; + persistent_secret_base64 + } + } }; - let (firehose_callback_rx, my_app, connection_callback_rx) = StrandCamApp::new( - rt_handle.clone(), - shared_store_arc.clone(), - secret, - &http_server_addr, - config, - cam_args_tx2.clone(), - led_box_tx_std.clone(), - tx_frame3, - shutdown_rx, - ) - .await?; + let persistent_secret = base64::decode(persistent_secret_base64)?; + let persistent_secret = cookie::Key::try_from(persistent_secret.as_slice())?; - // The value `args.http_server_addr` is transformed to - // `local_addr` by doing things like replacing port 0 - // with the actual open port number. + // Setup our auth layer. + let token_config = match http_camserver_info.token() { + AccessToken::PreSharedToken(value) => Some(axum_token_auth::TokenConfig { + name: "token".to_string(), + value: value.clone(), + }), + AccessToken::NoToken => None, + }; + let cfg = axum_token_auth::AuthConfig { + token_config, + persistent_secret, + cookie_name: "strand-cam-session", + ..Default::default() + }; - let (is_loopback, http_camserver_info) = { - let local_addr = *my_app.inner().local_addr(); - let is_loopback = local_addr.ip().is_loopback(); - let token = my_app.inner().token(); - (is_loopback, StrandCamBuiServerInfo::new(local_addr, token)) + let auth_layer = cfg.into_layer(); + // Create axum router. + let router = axum::Router::new() + .route("/strand-cam-events", axum::routing::get(events_handler)) + .route("/callback", axum::routing::post(callback_handler)) + .nest_service("/", serve_dir) + .layer( + tower::ServiceBuilder::new() + .layer(TraceLayer::new_for_http()) + // Auth layer will produce an error if the request cannot be + // authorized so we must handle that. + .layer(axum::error_handling::HandleErrorLayer::new( + handle_auth_error, + )) + .layer(auth_layer), + ) + .with_state(app_state); + + // create future for our app + let http_serve_future = { + use std::future::IntoFuture; + axum::serve( + listener, + router.into_make_service_with_connect_info::(), + ) + .into_future() }; - let url = http_camserver_info.guess_base_url_with_token(); + // todo: integrate with quit_channel and quit_rx elsewhere. + let (quit_trigger, valve) = stream_cancel::Valve::new(); - if args.show_url { - println!("Depending on things, you may be able to login with this url: {url}",); + let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); - if !is_loopback { + let url = http_camserver_info.build_urls()?.pop().unwrap(); + + // if !is_braid { + // Display where we are listening. + println!("Listening at {}", http_camserver_info.addr()); + + println!("Predicted URL(s):"); + for url in http_camserver_info.build_urls()?.iter() { + println!(" * {url}"); + if !flydra_types::is_loopback(url) { println!("This same URL as a QR code:"); - display_qr_url(&url); + display_qr_url(&format!("{url}")); } } + // } #[cfg(feature = "plugin-process-frame")] let do_process_frame_callback = args.process_frame_callback.is_some(); @@ -3361,14 +3570,11 @@ where let process_frame_callback = args.process_frame_callback; #[cfg(feature = "checkercal")] - let collected_corners_arc: CollectedCornersArc = Arc::new(RwLock::new(Vec::new())); + let collected_corners_arc: CollectedCornersArc = Arc::new(parking_lot::RwLock::new(Vec::new())); - let frame_process_cjh = { + let frame_process_task_jh = { let (is_starting_tx, is_starting_rx) = tokio::sync::oneshot::channel(); - #[cfg(feature = "flydra_feat_detect")] - let acquisition_duration_allowed_imprecision_msec = - args.acquisition_duration_allowed_imprecision_msec; #[cfg(feature = "flydra_feat_detect")] let csv_save_dir = args.csv_save_dir.clone(); #[cfg(feature = "flydratrax")] @@ -3378,39 +3584,20 @@ where #[cfg(feature = "flydratrax")] let http_camserver_info2 = http_camserver_info.clone(); let led_box_heartbeat_update_arc2 = led_box_heartbeat_update_arc.clone(); - - let handle2 = rt_handle.clone(); #[cfg(feature = "flydratrax")] - let (model_server_data_tx, model_server, flydratrax_calibration_source) = { + let (model_server_data_tx, flydratrax_calibration_source) = { info!("send_pose server at {}", model_server_addr); - let info = flydra_types::StaticMainbrainInfo { - name: env!("CARGO_PKG_NAME").into(), - version: env!("CARGO_PKG_VERSION").into(), - }; - let (model_server_data_tx, data_rx) = tokio::sync::mpsc::channel(50); - - // we need the tokio reactor already by here - let model_server = flydra2::new_model_server( - data_rx, - valve.clone(), - &model_server_addr, - info, - handle2.clone(), - ) - .await?; + let model_server_future = flydra2::new_model_server(data_rx, model_server_addr); + tokio::spawn(async { model_server_future.await }); let flydratrax_calibration_source = args.flydratrax_calibration_source; - ( - model_server_data_tx, - model_server, - flydratrax_calibration_source, - ) + (model_server_data_tx, flydratrax_calibration_source) }; let new_cam_data = flydra_types::RegisterNewCamera { - orig_cam_name: cam_name.clone(), + raw_cam_name: raw_cam_name.clone(), ros_cam_name: ros_cam_name.clone(), - http_camserver_info: Some(StrandCamHttpServerInfo::Server(http_camserver_info.clone())), + http_camserver_info: Some(BuiServerInfo::Server(http_camserver_info.clone())), cam_settings_data: Some(flydra_types::UpdateCamSettings { current_cam_settings_buf: settings_on_start, current_cam_settings_extension: settings_file_ext, @@ -3420,14 +3607,12 @@ where #[cfg(feature = "flydratrax")] let valve2 = valve.clone(); - let cam_name2 = cam_name.clone(); - let (quit_channel, quit_rx) = tokio::sync::oneshot::channel(); + let cam_name2 = raw_cam_name.clone(); let frame_process_task_fut = { { frame_process_task( - handle2, #[cfg(feature = "flydratrax")] - (model_server_data_tx, model_server), + model_server_data_tx, #[cfg(feature = "flydratrax")] flydratrax_calibration_source, cam_name2, @@ -3439,7 +3624,7 @@ where image_height, rx_frame, #[cfg(feature = "flydra_feat_detect")] - tracker_cfg, + im_pt_detect_cfg, #[cfg(feature = "flydra_feat_detect")] std::path::Path::new(&csv_save_dir).to_path_buf(), firehose_tx, @@ -3451,12 +3636,11 @@ where plugin_wait_dur, #[cfg(feature = "flydratrax")] led_box_tx_std, - quit_rx, is_starting_tx, #[cfg(feature = "flydratrax")] http_camserver_info2, process_frame_priority, - mainbrain_info, + transmit_msg_tx.clone(), camdata_addr, led_box_heartbeat_update_arc2, #[cfg(feature = "plugin-process-frame")] @@ -3479,11 +3663,7 @@ where let join_handle = tokio::spawn(frame_process_task_fut); debug!("waiting for frame acquisition thread to start"); is_starting_rx.await?; - // TODO: how to check if task still running? - ControlledTaskJoinHandle { - quit_channel, - join_handle, - } + join_handle }; debug!("frame_process_task spawned"); @@ -3528,13 +3708,12 @@ where // install frame handling let n_buffered_frames = 100; - let frame_stream = cam.frames(n_buffered_frames, async_thread_start)?; - let mut frame_valved = valve.wrap(frame_stream); + let mut frame_stream = cam.frames(n_buffered_frames, async_thread_start)?; let cam_stream_future = { let shared_store_arc = shared_store_arc.clone(); let frame_processing_error_state = frame_processing_error_state.clone(); async move { - while let Some(frame_msg) = frame_valved.next().await { + while let Some(frame_msg) = frame_stream.next().await { match frame_msg { ci2_async::FrameResult::Frame(frame) => { let frame: DynamicFrame = frame; @@ -3602,7 +3781,7 @@ where // TODO I just used Arc and RwLock to code this quickly. Convert to single-threaded // versions later. - let known_version = Arc::new(RwLock::new(app_version)); + let known_version = Arc::new(parking_lot::RwLock::new(app_version)); // Create a stream to call our closure now and every 30 minutes. let interval_stream = tokio::time::interval(std::time::Duration::from_secs(1800)); @@ -3627,18 +3806,18 @@ where } debug!("version check future done {}:{}", file!(), line!()); }; - rt_handle.spawn(Box::pin(stream_future)); // confirmed: valved and finishes + tokio::spawn(Box::pin(stream_future)); // confirmed: valved and finishes debug!("version check future spawned {}:{}", file!(), line!()); } - rt_handle.spawn(Box::pin(cam_stream_future)); // confirmed: valved and finishes + tokio::spawn(Box::pin(cam_stream_future)); // confirmed: valved and finishes debug!("cam_stream_future future spawned {}:{}", file!(), line!()); let cam_arg_future = { let shared_store_arc = shared_store_arc.clone(); #[cfg(feature = "checkercal")] - let cam_name2 = cam_name.clone(); + let cam_name2 = raw_cam_name.clone(); let mut cam_args_rx = tokio_stream::wrappers::ReceiverStream::new(cam_args_rx); @@ -3677,13 +3856,16 @@ where } CamArg::SetExposureTime(v) => match cam.set_exposure_time(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|tracker| tracker.exposure_time.current = v); } @@ -3693,13 +3875,16 @@ where }, CamArg::SetGain(v) => match cam.set_gain(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|tracker| tracker.gain.current = v); } @@ -3709,13 +3894,16 @@ where }, CamArg::SetGainAuto(v) => match cam.set_gain_auto(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| match cam.gain_auto() { Ok(latest) => { @@ -3753,13 +3941,16 @@ where } CamArg::SetExposureAuto(v) => match cam.set_exposure_auto(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| match cam.exposure_auto() { Ok(latest) => { @@ -3778,13 +3969,16 @@ where CamArg::SetFrameRateLimitEnabled(v) => { match cam.set_acquisition_frame_rate_enable(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| { match cam.acquisition_frame_rate_enable() { @@ -3804,13 +3998,16 @@ where } CamArg::SetFrameRateLimit(v) => match cam.set_acquisition_frame_rate(v) { Ok(()) => { - send_cam_settings_to_braid( - &cam.node_map_save().unwrap(), - transmit_msg_tx.as_ref(), - ¤t_cam_settings_extension, - &ros_cam_name, - ) - .map(|fut| rt_handle.spawn(fut)); + if let Some(transmit_msg_tx) = &transmit_msg_tx { + send_cam_settings_to_braid( + &cam.node_map_save().unwrap(), + transmit_msg_tx, + ¤t_cam_settings_extension, + &raw_cam_name, + ) + .await + .unwrap(); + } let mut tracker = shared_store_arc.write(); tracker.modify(|shared| match cam.acquisition_frame_rate() { Ok(latest) => { @@ -4359,7 +4556,7 @@ where intrinsics, width: image_width as usize, height: image_height as usize, - name: ros_cam_name.as_str().to_string(), + name: raw_cam_name.as_str().to_string(), } .into(); @@ -4456,29 +4653,23 @@ where // sleep to let the webserver start before opening browser std::thread::sleep(std::time::Duration::from_millis(100)); - open_browser(url)?; + open_browser(format!("{url}"))?; } else { info!("listening at {}", url); } - let (quit_channel, quit_rx) = tokio::sync::oneshot::channel(); - - let join_handle = tokio::spawn(video_streaming::firehose_task( - connection_callback_rx, - firehose_rx, - firehose_callback_rx, - false, - strand_cam_storetype::STRAND_CAM_EVENTS_URL_PATH, - quit_rx, - )); - - let video_streaming_cjh = ControlledTaskJoinHandle { - quit_channel, - join_handle, - }; + let connection_callback_rx = rx_new_connection; + let firehose_task_join_handle = tokio::spawn(async { + // The first thing this task does is pop a frame from firehose_rx, so we + // should ensure there is one present. + video_streaming::firehose_task(connection_callback_rx, firehose_rx, firehose_callback_rx) + .await + .unwrap(); + }); #[cfg(feature = "plugin-process-frame")] - let plugin_streaming_cjh = { + let (plugin_streaming_control, plugin_streaming_jh) = { + let cam_args_tx2 = cam_args_tx.clone(); let (flag, control) = thread_control::make_pair(); let join_handle = std::thread::Builder::new() .name("plugin_streaming".to_string()) @@ -4496,14 +4687,9 @@ where } } thread_closer.success(); - })? - .into(); - ControlledThreadJoinHandle { - control, - join_handle, - } + })?; + (control, join_handle) }; - debug!(" running forever"); { @@ -4668,102 +4854,40 @@ where } } - let ajh = AllJoinHandles { - frame_process_cjh, - video_streaming_cjh, - #[cfg(feature = "plugin-process-frame")] - plugin_streaming_cjh, - }; - let cam_arg_future2 = async move { cam_arg_future.await?; // we get here once the whole program is trying to shut down. info!("Now stopping spawned tasks."); - let result: Result<()> = ajh.close_and_join_all().await; - result + Ok::<_, StrandCamError>(()) }; - Ok((http_camserver_info, cam_args_tx, cam_arg_future2, my_app)) -} - -#[cfg(feature = "plugin-process-frame")] -pub struct ControlledThreadJoinHandle { - control: thread_control::Control, - join_handle: std::thread::JoinHandle, -} + // Now run until first future returns, then exit. + info!("Strand Cam launched."); + tokio::select! { + res = http_serve_future => {res?}, + res = cam_arg_future2 => {res?}, + _ = mainbrain_transmitter_fut => {}, + _ = send_updates_future => {}, + _ = shutdown_rx => {}, + res = frame_process_task_jh => {res?.unwrap()}, + res = firehose_task_join_handle=> {res?}, + } -#[cfg(feature = "plugin-process-frame")] -impl ControlledThreadJoinHandle { - fn thead_close_and_join(self) -> std::thread::Result { - debug!( - "sending stop {:?} {:?}", - self.join_handle.thread().name(), - self.join_handle.thread().id() - ); - self.control.stop(); - while !self.control.is_done() { + #[cfg(feature = "plugin-process-frame")] + { + plugin_streaming_control.stop(); + while !plugin_streaming_control.is_done() { debug!( "waiting for stop {:?} {:?}", - self.join_handle.thread().name(), - self.join_handle.thread().id() + plugin_streaming_jh.thread().name(), + plugin_streaming_jh.thread().id() ); std::thread::sleep(std::time::Duration::from_millis(100)); } - debug!( - "joining {:?} {:?}", - self.join_handle.thread().name(), - self.join_handle.thread().id() - ); - let result = self.join_handle.join(); - debug!("joining done"); - result } -} -pub struct ControlledTaskJoinHandle { - quit_channel: tokio::sync::oneshot::Sender<()>, - join_handle: tokio::task::JoinHandle, -} - -impl ControlledTaskJoinHandle { - async fn close_and_join(self) -> std::result::Result { - debug!("sending stop"); - - // debug!( - // "sending stop {:?} {:?}", - // self.join_handle.thread().name(), - // self.join_handle.thread().id() - // ); - self.quit_channel.send(()).ok(); - debug!("joining"); - - // debug!( - // "joining {:?} {:?}", - // self.join_handle.thread().name(), - // self.join_handle.thread().id() - // ); - let result = self.join_handle.await?; - debug!("joining done"); - Ok(result) - } -} - -pub struct AllJoinHandles { - frame_process_cjh: ControlledTaskJoinHandle>, - video_streaming_cjh: ControlledTaskJoinHandle>, - #[cfg(feature = "plugin-process-frame")] - plugin_streaming_cjh: ControlledThreadJoinHandle<()>, -} - -impl AllJoinHandles { - async fn close_and_join_all(self) -> Result<()> { - self.frame_process_cjh.close_and_join().await??; - self.video_streaming_cjh.close_and_join().await??; - #[cfg(feature = "plugin-process-frame")] - self.plugin_streaming_cjh.thead_close_and_join().unwrap(); - Ok(()) - } + Ok(()) } #[cfg(feature = "plugin-process-frame")] @@ -4844,31 +4968,25 @@ fn make_family(family: &ci2_remote_control::TagFamily) -> apriltag::Family { } } -fn send_cam_settings_to_braid( +async fn send_cam_settings_to_braid( cam_settings: &str, - transmit_msg_tx: Option<&mpsc::Sender>, + transmit_msg_tx: &tokio::sync::mpsc::Sender, current_cam_settings_extension: &str, - ros_cam_name: &RosCamName, -) -> Option> { - if let Some(transmit_msg_tx) = transmit_msg_tx { - let current_cam_settings_buf = cam_settings.to_string(); - let current_cam_settings_extension = current_cam_settings_extension.to_string(); - let ros_cam_name = ros_cam_name.clone(); - let mut transmit_msg_tx = transmit_msg_tx.clone(); - let fut = async move { - let msg = flydra_types::HttpApiCallback::UpdateCamSettings(flydra_types::PerCam { - ros_cam_name, - inner: flydra_types::UpdateCamSettings { - current_cam_settings_buf, - current_cam_settings_extension, - }, - }); - transmit_msg_tx.send(msg).await.unwrap(); - }; - Some(fut) - } else { - None - } + raw_cam_name: &RawCamName, +) -> StdResult<(), tokio::sync::mpsc::error::SendError> { + let current_cam_settings_buf = cam_settings.to_string(); + let current_cam_settings_extension = current_cam_settings_extension.to_string(); + let raw_cam_name = raw_cam_name.clone(); + let transmit_msg_tx = transmit_msg_tx.clone(); + + let msg = flydra_types::BraidHttpApiCallback::UpdateCamSettings(flydra_types::PerCam { + raw_cam_name, + inner: flydra_types::UpdateCamSettings { + current_cam_settings_buf, + current_cam_settings_extension, + }, + }); + transmit_msg_tx.send(msg).await } fn bitrate_to_u32(br: &ci2_remote_control::BitrateSelection) -> u32 { diff --git a/strand-cam/strand-cam-pylon/Cargo.toml b/strand-cam/strand-cam-pylon/Cargo.toml index a75999f2a..2547f98ec 100644 --- a/strand-cam/strand-cam-pylon/Cargo.toml +++ b/strand-cam/strand-cam-pylon/Cargo.toml @@ -15,6 +15,7 @@ ci2-async = { path = "../../ci2-async" } ci2-pyloncxx = { path = "../../ci2-pyloncxx" } strand-cam = {path="..", default-features=false} +axum = "0.7.4" [features] default = ["strand-cam/bundle_files"] diff --git a/strand-cam/yew_frontend/src/lib.rs b/strand-cam/yew_frontend/src/lib.rs index 2eedcbfcf..8d69a367a 100644 --- a/strand-cam/yew_frontend/src/lib.rs +++ b/strand-cam/yew_frontend/src/lib.rs @@ -231,22 +231,6 @@ impl Component for Model { }, ); - // let task = { - // // let notification = link.callback(|status| { - // // if status == EventSourceStatus::Error { - // // log::error!("event source error"); - // // } - // // Msg::EsCheckState - // // }); - // // let mut task = EventSourceService::new() - // // .connect( - // // strand_cam_storetype::STRAND_CAM_EVENTS_URL_PATH, - // // notification, - // // ) - // // .unwrap(); - // task - // }; - Self { video_data: Rc::new(RefCell::new(VideoData::new(None))), server_state: None, @@ -1314,39 +1298,6 @@ fn to_rate(rate_enum: &RecordingFrameRate) -> Option { } } -// impl Model { -// fn send_message(&mut self, args: &CallbackType) -> Option { -// let post_request = Request::post("callback") -// .header("Content-Type", "application/json;charset=UTF-8") -// .body(Json(&args)) -// .expect("Failed to build request."); - -// let callback = -// self.link -// .callback(move |response: Response>>| { -// if let (meta, Json(Ok(_body))) = response.into_parts() { -// if meta.status.is_success() { -// return Msg::Ignore; -// } -// } -// log::error!("failed sending message"); -// Msg::Ignore -// }); -// let options = FetchOptions { -// credentials: Some(Credentials::SameOrigin), -// ..Default::default() -// }; - -// match FetchService::fetch_with_options(post_request, options, callback) { -// Ok(task) => Some(task), -// Err(err) => { -// log::error!("sending message failed with error: {}", err); -// None -// } -// } -// } -// } - // ----------------------------------------------------------------------------- async fn post_message(msg: &CallbackType) -> Result<(), FetchError> { @@ -1354,11 +1305,13 @@ async fn post_message(msg: &CallbackType) -> Result<(), FetchError> { let mut opts = RequestInit::new(); opts.method("POST"); opts.cache(web_sys::RequestCache::NoStore); - // opts.mode(web_sys::RequestMode::Cors); - // opts.headers("Content-Type", "application/json;charset=UTF-8") - // set SameOrigin let buf = serde_json::to_string(&msg).unwrap_throw(); opts.body(Some(&JsValue::from_str(&buf))); + let headers = web_sys::Headers::new().unwrap_throw(); + headers + .append("Content-Type", "application/json") + .unwrap_throw(); + opts.headers(&headers); let url = "callback"; let request = Request::new_with_str_and_init(url, &opts)?;