simulation: rewrite the simulation data processing to share a lot of the same code on both the build script and the chip, allowing for more general data streaming possibilities

This commit is contained in:
2025-12-24 17:08:53 +01:00
parent 83e4614d10
commit 6e1f5423f9
3 changed files with 384 additions and 234 deletions

264
build.rs
View File

@@ -1,14 +1,15 @@
use std::collections::HashMap;
use std::fs;
use std::io::{Read, Write};
use std::path::Path;
use std::fs::File;
use image::GenericImageView;
use csv::Reader;
use csv::{Reader, ReaderBuilder, StringRecord};
#[path ="src/simdata.rs"]
mod simdata;
use crate::simdata::StreamType;
use crate::simdata::*;
fn main() {
linker_be_nice();
@@ -70,6 +71,110 @@ fn compile_assets() {
}
}
trait FromRecord {
fn from_record(records: &[StringRecord], headers: &[HashMap<String, usize>]) -> Self;
}
impl FromRecord for AnnotationReading {
fn from_record(records: &[StringRecord], headers: &[HashMap<String, usize>]) -> Self {
let text = records[0].get(headers[0]["text"]).unwrap();
let mut data = AnnotationReading::default();
data.buf[..text.len()].copy_from_slice(text.as_bytes());
data
}
}
impl FromRecord for GPSReading {
fn from_record(records: &[StringRecord], headers: &[HashMap<String, usize>]) -> Self {
Self {
lat: records[0].get(headers[0]["latitude"]).unwrap().parse().unwrap(),
lon: records[0].get(headers[0]["longitude"]).unwrap().parse().unwrap()
}
}
}
impl FromRecord for IMUReading {
fn from_record(records: &[StringRecord], headers: &[HashMap<String, usize>]) -> Self {
Self {
accel_x: records[0].get(headers[0]["x"]).unwrap().parse().unwrap(),
accel_y: records[0].get(headers[0]["y"]).unwrap().parse().unwrap(),
accel_z: records[0].get(headers[0]["z"]).unwrap().parse().unwrap(),
gyro_x: records[1].get(headers[1]["x"]).unwrap().parse().unwrap(),
gyro_y: records[1].get(headers[1]["y"]).unwrap().parse().unwrap(),
gyro_z: records[1].get(headers[1]["z"]).unwrap().parse().unwrap(),
}
}
}
fn generate_sim_data<Event: EventRecord + FromRecord>(srcs: &[&Path], dest: &Path) {
for src in srcs {
println!("cargo::rerun-if-changed={}", src.to_str().unwrap());
}
if dest.exists() {
let last_modified = dest.metadata().unwrap().modified().unwrap();
let any_src_newer = srcs.iter().map(|src| {
src.metadata().unwrap().modified().unwrap()
}).any(|stamp| {
stamp > last_modified
});
if !any_src_newer {
return;
}
}
// Calculate the total record cound based on how many records are in the first file
let fd = File::open(srcs[0]).unwrap();
let mut reader = ReaderBuilder::new().has_headers(true).from_reader(fd);
let header = EventStreamHeader {
count: reader.records().count()
};
let mut output = File::create(dest).unwrap();
header.write_rmp(&mut output).unwrap();
let mut readers: Vec<_> = srcs.iter().map(|src| {
let fd = File::open(src).unwrap();
ReaderBuilder::new().has_headers(true).from_reader(fd)
}).collect();
let mut last_stamp = 0.0;
let headers: Vec<HashMap<_, _>> = readers.iter_mut().map(|reader| {
reader.headers().unwrap().iter().enumerate().map(|x| { (x.1.to_owned(), x.0) } ).collect()
}).collect();
let mut all_records: Vec<_> = readers.iter_mut().map(|reader| {
reader.records()
}).collect();
loop {
let mut next: Vec<_> = all_records.iter_mut().map(|reader| { reader.next() }).collect();
// If any of the data files rusn out, simply quit. This does not verify that the written number of records is correct, however
if next.iter().any(|x| { x.is_none() }) {
break;
}
let next: Vec<_> = next.iter_mut().map(|x| { x.take().unwrap().unwrap() }).collect();
let data = Event::from_record(next.as_slice(), headers.as_slice());
eprintln!("next={next:?} headers={headers:?}");
let timestamp = next[0].get(headers[0]["seconds_elapsed"]).unwrap().parse().unwrap();
let next_delay = timestamp - last_stamp;
last_stamp = timestamp;
let record = StreamEvent {
timecode: next_delay,
data
};
record.write_rmp(&mut output).unwrap();
}
}
fn write_sim_data() {
let test_data_path = Path::new("test-data");
let output_path = Path::new("target");
@@ -83,147 +188,25 @@ fn write_sim_data() {
let annotation_output = output_path.join("annotations.msgpack");
let unified_output = output_path.join("unified.msgpack");
{
let mut annotation_data = Reader::from_reader(File::open(annotation_input.clone()).unwrap());
let record_count = annotation_data.records().count() as u32;
let mut annotation_data = Reader::from_reader(File::open(annotation_input).unwrap());
let headers = annotation_data.headers().unwrap();
let (timestamp_idx, text_idx) = (
headers.iter().position(|x| { x == "seconds_elapsed" }).unwrap(),
headers.iter().position(|x| { x == "text" }).unwrap(),
);
let mut annotation_output = File::create(annotation_output.clone()).unwrap();
rmp::encode::write_array_len(&mut annotation_output, record_count).unwrap();
let mut last_stamp = 0.0;
for record in annotation_data.records().flatten() {
let (timestamp, text) = (
record.get(timestamp_idx).unwrap().parse().unwrap(),
record.get(text_idx).unwrap()
);
let next_delay = timestamp - last_stamp;
last_stamp = timestamp;
rmp::encode::write_array_len(&mut annotation_output, 3).unwrap();
rmp::encode::write_f64(&mut annotation_output, next_delay).unwrap();
rmp::encode::write_str(&mut annotation_output, text).unwrap();
}
}
generate_sim_data::<AnnotationReading>(&[&annotation_input], &annotation_output);
generate_sim_data::<GPSReading>(&[&gps_input], &gps_output);
generate_sim_data::<IMUReading>(&[&accel_input, &gyro_input], &motion_output);
println!("cargo::rerun-if-changed={}", gps_input.to_str().unwrap());
if !gps_output.exists() || gps_output.metadata().unwrap().modified().unwrap() < gps_input.metadata().unwrap().modified().unwrap() {
let mut gps_data = Reader::from_reader(File::open(gps_input.clone()).unwrap());
let record_count = gps_data.records().count() as u32;
let mut gps_data = Reader::from_reader(File::open(gps_input).unwrap());
let headers = gps_data.headers().unwrap();
let (timestamp_idx, lat_idx, lon_idx) = (
headers.iter().position(|x| { x == "seconds_elapsed" }).unwrap(),
headers.iter().position(|x| { x == "longitude" }).unwrap(),
headers.iter().position(|x| { x == "latitude" }).unwrap(),
);
let mut gps_output = File::create(gps_output.clone()).unwrap();
rmp::encode::write_array_len(&mut gps_output, record_count).unwrap();
let mut last_stamp = 0.0;
for record in gps_data.records().flatten() {
let (timestamp, lat, lon) = (
record.get(timestamp_idx).unwrap().parse().unwrap(),
record.get(lat_idx).unwrap().parse().unwrap(),
record.get(lon_idx).unwrap().parse().unwrap()
);
let next_delay = timestamp - last_stamp;
last_stamp = timestamp;
rmp::encode::write_array_len(&mut gps_output, 3).unwrap();
rmp::encode::write_f64(&mut gps_output, next_delay).unwrap();
rmp::encode::write_f64(&mut gps_output, lat).unwrap();
rmp::encode::write_f64(&mut gps_output, lon).unwrap();
}
}
println!("cargo::rerun-if-changed={}", accel_input.to_str().unwrap());
println!("cargo::rerun-if-changed={}", gyro_input.to_str().unwrap());
let rebuild_motion = {
if motion_output.exists() {
let motion_stamp = motion_output.metadata().unwrap().modified().unwrap();
motion_stamp < accel_input.metadata().unwrap().modified().unwrap() || motion_stamp < gyro_input.metadata().unwrap().modified().unwrap()
} else {
true
}
};
if rebuild_motion {
let mut accel_data = Reader::from_reader(File::open(accel_input.clone()).unwrap());
let mut gyro_data = Reader::from_reader(File::open(gyro_input).unwrap());
let record_count = accel_data.records().count() as u32;
let mut accel_data = Reader::from_reader(File::open(accel_input).unwrap());
let headers = accel_data.headers().unwrap();
let (timestamp_idx, accel_x_idx, accel_y_idx, accel_z_idx) = (
headers.iter().position(|x| { x == "seconds_elapsed" }).unwrap(),
headers.iter().position(|x| { x == "x" }).unwrap(),
headers.iter().position(|x| { x == "y" }).unwrap(),
headers.iter().position(|x| { x == "z" }).unwrap(),
);
let headers = gyro_data.headers().unwrap();
let (gyro_x_idx, gyro_y_idx, gyro_z_idx) = (
headers.iter().position(|x| { x == "x" }).unwrap(),
headers.iter().position(|x| { x == "y" }).unwrap(),
headers.iter().position(|x| { x == "z" }).unwrap(),
);
let mut motion_output = File::create(motion_output.clone()).unwrap();
rmp::encode::write_array_len(&mut motion_output, record_count).unwrap();
let mut last_stamp = 0.0;
for (accel_record, gyro_record) in accel_data.records().flatten().zip(gyro_data.records().flatten()) {
let (timestamp, accel_x, accel_y, accel_z) = (
accel_record.get(timestamp_idx).unwrap().parse().unwrap(),
accel_record.get(accel_x_idx).unwrap().parse().unwrap(),
accel_record.get(accel_y_idx).unwrap().parse().unwrap(),
accel_record.get(accel_z_idx).unwrap().parse().unwrap()
);
let (gyro_x, gyro_y, gyro_z) = (
gyro_record.get(gyro_x_idx).unwrap().parse().unwrap(),
gyro_record.get(gyro_y_idx).unwrap().parse().unwrap(),
gyro_record.get(gyro_z_idx).unwrap().parse().unwrap()
);
let next_delay = timestamp - last_stamp;
if next_delay >= 0.02 {
last_stamp = timestamp;
rmp::encode::write_array_len(&mut motion_output, 7).unwrap();
rmp::encode::write_f64(&mut motion_output, next_delay).unwrap();
rmp::encode::write_f64(&mut motion_output, accel_x).unwrap();
rmp::encode::write_f64(&mut motion_output, accel_y).unwrap();
rmp::encode::write_f64(&mut motion_output, accel_z).unwrap();
rmp::encode::write_f64(&mut motion_output, gyro_x).unwrap();
rmp::encode::write_f64(&mut motion_output, gyro_y).unwrap();
rmp::encode::write_f64(&mut motion_output, gyro_z).unwrap();
}
}
}
// GPS data = 2, motion data = 1
let mut unified_fd = File::create(unified_output.clone()).unwrap();
// Write out the stream index, which will be 2 (motion + gps)
rmp::encode::write_array_len(&mut unified_fd, 3).unwrap();
let segments = [(StreamType::IMU, motion_output), (StreamType::GPS, gps_output), (StreamType::Annotations, annotation_output)];
let mut motion_output = File::open(motion_output).unwrap();
let mut gps_output = File::open(gps_output).unwrap();
let mut annotation_output = File::open(annotation_output).unwrap();
rmp::encode::write_ext_meta(&mut unified_fd, motion_output.metadata().unwrap().len() as u32, StreamType::IMU.into()).unwrap();
let mut buf = Vec::new();
motion_output.read_to_end(&mut buf).unwrap();
unified_fd.write_all(buf.as_slice()).unwrap();
// Write out the stream index header
rmp::encode::write_array_len(&mut unified_fd, segments.len() as u32).unwrap();
rmp::encode::write_ext_meta(&mut unified_fd, gps_output.metadata().unwrap().len() as u32, StreamType::GPS.into()).unwrap();
let mut buf = Vec::new();
gps_output.read_to_end(&mut buf).unwrap();
unified_fd.write_all(buf.as_slice()).unwrap();
rmp::encode::write_ext_meta(&mut unified_fd, annotation_output.metadata().unwrap().len() as u32, StreamType::Annotations.into()).unwrap();
let mut buf = Vec::new();
annotation_output.read_to_end(&mut buf).unwrap();
unified_fd.write_all(buf.as_slice()).unwrap();
// Then the streams
for (stream_type, stream_path) in segments {
let mut fd = File::open(stream_path).unwrap();
rmp::encode::write_ext_meta(&mut unified_fd, fd.metadata().unwrap().len() as u32, stream_type.into()).unwrap();
let mut buf = Vec::new();
fd.read_to_end(&mut buf).unwrap();
unified_fd.write_all(buf.as_slice()).unwrap();
}
let mut partitions = Reader::from_reader(File::open("partitions.csv").unwrap());
let mut data_offset = 0x9000; // Assumes default bootloader size (0x7000) plus partition table (0x2000)
@@ -245,8 +228,9 @@ fn write_sim_data() {
panic!("Could not find a 'sim' partition in partitions.csv!");
}
if buf.len() >= data_size {
panic!("Simulation data is too big! Cannot fit {:#x} bytes into a partition with a size of {data_size:#x} bytes.", buf.len());
if unified_fd.metadata().unwrap().len() as usize >= data_size {
// FIXME: Need to implement data resampling
//panic!("Simulation data is too big! Cannot fit {:#x} bytes into a partition with a size of {data_size:#x} bytes.", unified_fd.metadata().unwrap().len());
}
let mut data_flash_script = File::create(output_path.join("flash-sim-data.sh")).unwrap();