major refactor

* uccm add parsers and metrics for gps sats, loop diag
* cleanups and improvements
* fix refclock to survive chrony restart
* cargo updates
* etc
This commit is contained in:
Keenan Tims 2025-05-03 23:06:19 -07:00
parent a828f3267c
commit d7c57cf23a
Signed by: ktims
GPG Key ID: 11230674D69038D4
8 changed files with 1343 additions and 677 deletions

1522
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
[package]
name = "chimemon"
version = "0.1.0"
version = "0.2.0"
edition = "2021"
[dependencies]
@ -24,7 +24,8 @@ tokio-serial = "5.4.4"
bytes = "1.2.1"
chrono = "0.4.23"
libc = "0.2.137"
reqwest = { version = "0.11.13", features = ["rustls-tls"], default-features = false }
async-stream = "0.3.6"
itertools = "0.14.0"
[dependencies.chrony-candm]
git = "https://github.com/aws/chrony-candm"

View File

@ -2,18 +2,17 @@ use async_trait::async_trait;
use chimemon::{ChimemonSource, ChimemonSourceChannel, Config};
use chrony_candm::reply::{self, ReplyBody, SourceMode};
use chrony_candm::request::{self, RequestBody};
use chrony_candm::{blocking_query, ClientOptions};
use influxdb2::models::DataPoint;
use log::{debug, info, warn};
use log::{info, warn};
use std::net::{SocketAddr, ToSocketAddrs};
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use tokio::runtime::Handle;
use tokio::{join, time::timeout};
use tokio::join;
pub struct ChronyClient {
pub server: SocketAddr,
client_options: ClientOptions,
config: Config,
client: chrony_candm::Client,
timeout: std::time::Duration,
}
fn datapoint_from_tracking(
@ -104,21 +103,31 @@ impl ChronyClient {
.unwrap()
.next()
.expect("Unable to parse host:port:");
let client = chrony_candm::Client::spawn(&Handle::current(), Default::default());
let timeout = Duration::from_secs(config.sources.chrony.timeout);
let client_options = ClientOptions {
n_tries: 3,
timeout: Duration::from_secs(config.sources.chrony.timeout),
};
ChronyClient {
server,
client_options,
config,
client,
timeout,
}
}
pub async fn get_tracking(&self) -> Result<reply::Tracking, std::io::Error> {
let reply = timeout(
self.timeout,
self.client.query(RequestBody::Tracking, self.server),
async fn query(&self, request: RequestBody) -> Result<reply::Reply, std::io::Error> {
let server = self.server.clone();
let client_options = self.client_options.clone();
tokio::task::spawn_blocking(move || blocking_query(request, client_options, &server))
.await
.map_err(|e| {
std::io::Error::new(
std::io::ErrorKind::Other,
format!("Error joining thread: {}", e),
)
.await??;
})?
}
pub async fn get_tracking(&self) -> Result<reply::Tracking, std::io::Error> {
let reply = self.query(RequestBody::Tracking).await?;
match reply.body {
ReplyBody::Tracking(tracking) => Ok(tracking),
@ -130,11 +139,7 @@ impl ChronyClient {
}
pub async fn get_sources(&self) -> Result<Vec<reply::SourceData>, std::io::Error> {
let reply = timeout(
self.timeout,
self.client.query(RequestBody::NSources, self.server),
)
.await??;
let reply = self.query(RequestBody::NSources).await?;
let nsources = match reply.body {
ReplyBody::NSources(ns) => Ok(i32::try_from(ns.n_sources).unwrap()),
@ -158,14 +163,9 @@ impl ChronyClient {
}
async fn get_source(&self, index: i32) -> Result<reply::SourceData, std::io::Error> {
let reply = timeout(
self.timeout,
self.client.query(
RequestBody::SourceData(request::SourceData { index: index }),
self.server,
),
)
.await??;
let reply = self
.query(RequestBody::SourceData(request::SourceData { index }))
.await?;
let sourcedata = match reply.body {
ReplyBody::SourceData(sourcedata) => Ok(sourcedata),

View File

@ -2,15 +2,14 @@ use async_trait::async_trait;
use chimemon::{ChimemonMessage, ChimemonTarget, ChimemonTargetChannel, ChronySockConfig};
use libc::{c_double, c_int, timeval};
use log::debug;
use std::io::prelude::*;
use std::mem;
use std::os::unix::net::UnixDatagram;
use std::path::Path;
use std::path::PathBuf;
const CHRONY_MAGIC: c_int = 0x534f434b;
pub struct ChronySockServer {
sock: UnixDatagram,
sock_path: PathBuf,
}
#[repr(C)]
@ -26,15 +25,9 @@ pub struct ChronyTimeReport {
impl ChronySockServer {
pub fn new(config: ChronySockConfig) -> Self {
debug!(
"Size of chrony refclock report: {}",
mem::size_of::<ChronyTimeReport>()
);
let sock = UnixDatagram::unbound().unwrap();
// TODO: Don't connect to the socket or we break when chrony restarts
// use sock.send_to instead and fail gracefully
sock.connect(&config.sock).expect("Unable to open socket");
ChronySockServer { sock }
ChronySockServer {
sock_path: config.sock.into(),
}
}
}
@ -49,8 +42,16 @@ impl ChimemonTarget for ChronySockServer {
{
let frame = ChronyTimeReport {
tv: timeval {
tv_sec: tr.system_time.timestamp().try_into().unwrap_or_default(),
tv_usec: tr.system_time.timestamp_subsec_micros().try_into().unwrap_or_default(),
tv_sec: tr
.system_time
.timestamp()
.try_into()
.unwrap_or_default(),
tv_usec: tr
.system_time
.timestamp()
.try_into()
.unwrap_or_default(),
},
offset: tr.offset.num_nanoseconds().unwrap() as f64 / 1e9,
leap: if tr.leap_flag { 1 } else { 0 },
@ -58,14 +59,15 @@ impl ChimemonTarget for ChronySockServer {
_pad: 0,
magic: CHRONY_MAGIC,
};
unsafe {
let bs = std::slice::from_raw_parts(
let bs = unsafe {
std::slice::from_raw_parts(
(&frame as *const ChronyTimeReport) as *const u8,
mem::size_of::<ChronyTimeReport>(),
);
debug!("Sending to chrony sock {:#?}", frame);
self.sock.send(bs).unwrap();
)
};
debug!("Sending to chrony sock {:#?}", frame);
let sock = UnixDatagram::unbound().unwrap();
sock.send_to(bs, &self.sock_path).unwrap();
}
}
}

View File

@ -63,8 +63,8 @@ impl ChimemonSource for HwmonSource {
sensor_val
);
let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let mut builder =
DataPoint::builder(&self.config.sources.hwmon.measurement).timestamp(now.as_nanos().try_into().unwrap());
let mut builder = DataPoint::builder(&self.config.sources.hwmon.measurement)
.timestamp(now.as_nanos().try_into().unwrap());
for (key, value) in &self.config.influxdb.tags {
builder = builder.tag(key, value)
}

View File

@ -1,7 +1,7 @@
use async_trait::async_trait;
use chrono::NaiveDateTime;
use chrono::{DateTime, Utc};
use figment::{
providers::{Data, Format, Serialized, Toml},
providers::{Format, Serialized, Toml},
util::map,
value::Map,
Figment,
@ -9,7 +9,7 @@ use figment::{
use gethostname::gethostname;
use influxdb2::models::DataPoint;
use serde_derive::{Deserialize, Serialize};
use std::{path::Path, time::Duration};
use std::path::Path;
use tokio::sync::broadcast::*;
#[derive(Serialize, Deserialize, Clone)]
@ -102,7 +102,7 @@ impl Default for HwmonConfig {
}
#[derive(Clone, Debug)]
pub struct TimeReport {
pub system_time: NaiveDateTime,
pub system_time: DateTime<Utc>,
pub offset: chrono::Duration,
pub leaps: isize,
pub leap_flag: bool,

View File

@ -3,11 +3,10 @@ mod chrony_refclock;
mod hwmon;
mod uccm;
use chrono::NaiveDateTime;
use clap::{Parser, ValueEnum};
use env_logger::{self, Env};
use futures::{future::join_all, prelude::*};
use log::{debug, error, info, warn};
use futures::future::join_all;
use log::{debug, info};
use std::path::Path;
use tokio::sync::broadcast;
@ -115,33 +114,20 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
};
let mut influxrx = sourcechan.subscribe();
tasks.push(tokio::spawn(async move {
loop {
match influxrx.recv().await {
Ok(msg) => match msg {
ChimemonMessage::DataPoint(dp) => {
debug!("Writing datapoint to influx: {:?}", dp);
influx
.write(&config.influxdb.bucket, stream::iter([dp]))
.await
.unwrap_or_else(|e| error!("Error writing to influxdb {:?}", e));
}
ChimemonMessage::DataPoints(dps) => {
debug!("Writing datapoints to influx: {:?}", dps);
influx
.write(&config.influxdb.bucket, stream::iter(dps))
.await
.unwrap_or_else(|e| error!("Error writing to influxdb {:?}", e));
}
ChimemonMessage::TimeReport(tr) => {
debug!("GPS TOD: {:?}", tr);
}
},
Err(e) => error!("Unable to receive from channel: {:?}", e),
}
let stream = async_stream::stream! {
while let Ok(msg) = influxrx.recv().await {
match msg { ChimemonMessage::DataPoint(dp) => {
yield dp
}, ChimemonMessage::DataPoints(dps) => {
for p in dps {
yield p
}
}, ChimemonMessage::TimeReport(_tr) => {}
} }
};
influx.write(&config.influxdb.bucket, stream).await.unwrap();
}));
// let mut debugrx = sourcechan.subscribe();

View File

@ -2,21 +2,20 @@ use async_trait::async_trait;
use bitflags::bitflags;
use byteorder::{BigEndian, ReadBytesExt};
use bytes::{Buf, BytesMut};
use chimemon::{
ChimemonMessage, ChimemonSource, ChimemonSourceChannel, Config, TimeReport, UCCMConfig,
};
use chrono::{Duration, NaiveDateTime, Utc};
use chimemon::{ChimemonMessage, ChimemonSource, ChimemonSourceChannel, Config, TimeReport};
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use figment::value::Map;
use influxdb2::models::data_point::DataPointBuilder;
use influxdb2::models::DataPoint;
use itertools::Itertools;
use log::{debug, info, warn};
use std::io::Cursor;
use std::io::{BufRead, Cursor};
use std::str;
use std::sync::Arc;
use std::time::UNIX_EPOCH;
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader, ReadHalf, WriteHalf};
use tokio::join;
use tokio::sync::Mutex;
use tokio::time::{interval, sleep, Interval};
use tokio::time::sleep;
use tokio_serial::{SerialPort, SerialStream};
pub const GPS_EPOCH: i64 = 315964800; // Doesn't seem possible to have a const DateTime object
@ -39,14 +38,15 @@ pub struct UCCMMonitor {
#[derive(Debug)]
pub struct UCCMTODReport {
pub time: NaiveDateTime, // TAI timestamp
pub time: DateTime<Utc>, // TAI timestamp
pub leaps: i8,
pub flags: UCCMFlags,
}
impl UCCMTODReport {
pub fn as_builder(&self, measurement: &String) -> DataPointBuilder {
let mut builder = DataPoint::builder(measurement).timestamp(self.time.timestamp_nanos());
pub fn as_builder(&self, measurement: &String, tags: &Map<String, String>) -> DataPointBuilder {
let mut builder =
DataPoint::builder(measurement).timestamp(self.time.timestamp_nanos_opt().unwrap());
builder = builder.field("leaps", self.leaps as i64);
builder = builder.field("osc_lock", self.flags.contains(UCCMFlags::OSC_LOCK));
builder = builder.field("leap_flag", self.flags.contains(UCCMFlags::LEAP_FLAG));
@ -62,10 +62,64 @@ impl UCCMTODReport {
builder = builder.field("ant_fault", self.flags.contains(UCCMFlags::NO_ANT));
builder = builder.field("gps_los", self.flags.contains(UCCMFlags::GPS_LOS));
builder = tags
.iter()
.fold(builder, |builder, (k, v)| builder.tag(k, v));
builder
}
}
#[derive(Debug)]
pub struct UCCMLoopDiagReport {
pub ocxo: f32,
}
impl UCCMLoopDiagReport {
pub fn as_builder(&self, measurement: &String, tags: &Map<String, String>) -> DataPointBuilder {
let mut builder = DataPoint::builder(measurement);
builder = builder.field("ocxo_offset", self.ocxo as f64);
builder = tags
.iter()
.fold(builder, |builder, (k, v)| builder.tag(k, v));
builder
}
}
#[derive(Debug)]
pub struct UCCMGpsSvTracking {
pub sv: u8,
pub cno: u8,
}
impl UCCMGpsSvTracking {
fn as_builder(&self, measurement: &String, tags: &Map<String, String>) -> DataPointBuilder {
let mut builder = DataPoint::builder(measurement)
.field("sv_cno", self.cno as i64)
.tag("sv_id", self.sv.to_string());
builder = tags
.iter()
.fold(builder, |builder, (k, v)| builder.tag(k, v));
builder
}
}
#[derive(Debug)]
pub struct UCCMGPSSatsReport {
pub tracked_svs: Vec<UCCMGpsSvTracking>,
}
impl UCCMGPSSatsReport {
pub fn build(&self, measurement: &String, tags: &Map<String, String>) -> Vec<DataPoint> {
self.tracked_svs
.iter()
.map(|sv| sv.as_builder(measurement, tags))
.map(|b| b.build().unwrap())
.collect()
}
}
bitflags! {
pub struct UCCMFlags: u32 {
const OSC_LOCK = (1<<29);
@ -129,15 +183,15 @@ pub struct GPSSVInfo {
pub struct UCCMStatusReport {
pub tfom: u8,
pub ffom: u8,
pub gpsPhase: f32,
pub gpsPPSValid: bool,
pub gpsSVs: [GPSSVInfo; 32],
pub gpsTime: NaiveDateTime,
pub antVoltage: f32,
pub antCurrent: f32,
pub gps_phase: f32,
pub gps_pps_valid: bool,
pub gps_svs: [GPSSVInfo; 32],
pub gps_time: NaiveDateTime,
pub ant_voltage: f32,
pub ant_current: f32,
pub temp: f32,
pub efcDac: u32,
pub freqError: f32,
pub efc_dac: u32,
pub freq_error: f32,
}
pub struct UCCMInfo {
@ -178,13 +232,74 @@ impl TryFrom<&[u8]> for UCCMTODReport {
debug!("TOD time: {} leaps: {} flags: {}", time, leaps, flags);
Ok(UCCMTODReport {
time: NaiveDateTime::from_timestamp_opt(GPS_EPOCH + time as i64, 0).unwrap(),
time: DateTime::from_timestamp(GPS_EPOCH + time as i64, 0).unwrap(),
leaps,
flags: UCCMFlags::from_bits_truncate(flags),
})
}
}
impl TryFrom<&str> for UCCMLoopDiagReport {
type Error = std::io::Error;
fn try_from(strbuf: &str) -> Result<Self, Self::Error> {
debug!("LoopDiag buffer: `{:#?}`", strbuf);
if &strbuf[0..4] != "OCXO" {
Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid response (expected `OCXO`)",
))
} else {
let mut cursor = Cursor::new(strbuf);
let mut lines = BufRead::lines(&mut cursor);
let ocxo_line = lines.next().ok_or(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"No lines!",
))??;
let ocxo_val = ocxo_line
.split(':')
.skip(1)
.next()
.ok_or(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"no colon!",
))?
.trim();
let ocxo_val_f = ocxo_val
.parse::<f32>()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?;
Ok(UCCMLoopDiagReport { ocxo: ocxo_val_f })
}
}
}
impl TryFrom<&str> for UCCMGPSSatsReport {
type Error = std::io::Error;
fn try_from(strbuf: &str) -> Result<Self, Self::Error> {
let resp = strbuf.split_once('\n').unwrap().0.trim();
let (nsats, cnos) = resp.split_once(' ').ok_or(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid response (expected `NSATS CNOS`)",
))?;
let nsats = nsats.parse::<u8>().map_err(|e| {
std::io::Error::new(std::io::ErrorKind::InvalidData, "Invalid number of sats")
})?;
let tracked_svs = cnos
.split(',')
.chunks(2)
.into_iter()
.map(|chunk| chunk.collect_tuple().unwrap())
.inspect(|chunk| debug!(" chunk: {:?}", chunk))
.map(|(sv_s, cno_s)| UCCMGpsSvTracking {
sv: sv_s.parse().unwrap(),
cno: cno_s.parse().unwrap(),
})
.collect_vec();
assert_eq!(nsats as usize, tracked_svs.len());
Ok(UCCMGPSSatsReport { tracked_svs })
}
}
impl UCCMMonitor {
pub fn new(config: Config) -> Self {
let builder = tokio_serial::new(&config.sources.uccm.port, config.sources.uccm.baud)
@ -213,7 +328,7 @@ impl UCCMMonitor {
pub async fn send_cmd(&mut self, cmd: &[u8]) -> Result<String, std::io::Error> {
debug!("cmd: `{:?}`", String::from_utf8_lossy(cmd));
self.tx.write_all(cmd).await.unwrap();
self.tx.write(&[b'\n']).await.unwrap();
self.tx.write_u8(b'\n').await.unwrap();
let mut reader = BufReader::new(&mut self.rx);
let mut resp = String::new();
while !resp.contains("UCCM>") {
@ -258,18 +373,20 @@ impl UCCMMonitor {
);
Ok(())
}
}
async fn rx_loop(
mut rx: ReadHalf<SerialStream>,
mut self,
chan: ChimemonSourceChannel,
state: Arc<Mutex<UCCMMonitorParseState>>,
config: Config,
) {
let mut rdbuf = BytesMut::with_capacity(1024);
let mut last_sent_report = Utc::now().naive_utc() - config.sources.uccm.status_interval;
let mut last_loop_diag: Option<UCCMLoopDiagReport> = None;
let mut last_gps_sats: Option<UCCMGPSSatsReport> = None;
let mut last_sent_report = Utc::now() - self.config.sources.uccm.status_interval;
loop {
match tokio::io::AsyncReadExt::read_buf(&mut rx, &mut rdbuf).await {
match tokio::io::AsyncReadExt::read_buf(&mut self.rx, &mut rdbuf).await {
Ok(n) => {
if n == 0 {
continue;
@ -289,7 +406,7 @@ async fn rx_loop(
let frame = rdbuf.split_to(44 * 2 + 43);
match UCCMTODReport::try_from(&frame[..]) {
Ok(tod) => {
let sysnow = Utc::now().naive_utc();
let sysnow = Utc::now();
let offset = tod.time - Duration::seconds(tod.leaps as i64) - sysnow;
debug!(
"System time: {:#?} GPS time: {:#?} Leaps: {:#?}",
@ -314,13 +431,35 @@ async fn rx_loop(
}))
.expect("Unable to send to channel");
if sysnow - last_sent_report
>= Duration::from_std(config.sources.uccm.status_interval).unwrap()
>= Duration::from_std(self.config.sources.uccm.status_interval)
.unwrap()
{
let mut builder = tod.as_builder(&config.sources.uccm.measurement);
for (key, value) in &config.influxdb.tags {
builder = builder.tag(key, value)
let mut points = vec![tod
.as_builder(
&self.config.sources.uccm.measurement,
&self.config.influxdb.tags,
)
.build()
.unwrap()];
if let Some(loop_diag) = &last_loop_diag {
points.push(
loop_diag
.as_builder(
&self.config.sources.uccm.measurement,
&self.config.influxdb.tags,
)
.build()
.unwrap(),
)
}
chan.send(ChimemonMessage::DataPoint(builder.build().unwrap()))
if let Some(gps_sats) = &last_gps_sats {
points.extend(gps_sats.build(
&self.config.sources.uccm.measurement,
&self.config.influxdb.tags,
));
}
chan.send(ChimemonMessage::DataPoints(points))
.expect("Unable to send to channel");
last_sent_report = sysnow;
}
@ -330,13 +469,36 @@ async fn rx_loop(
rdbuf.clear();
}
}
let loop_diag_resp = self.send_cmd(b"DIAG:LOOP?").await.unwrap();
let gps_sats_resp = self.send_cmd(b"GPSSAT").await.unwrap();
let loop_report = UCCMLoopDiagReport::try_from(loop_diag_resp.as_str());
let gps_report = UCCMGPSSatsReport::try_from(gps_sats_resp.as_str());
if let Ok(loop_report) = loop_report {
last_loop_diag = Some(loop_report)
} else {
warn!(
"Unable to parse loop diag report `{}`: {}",
loop_diag_resp,
loop_report.unwrap_err()
);
}
if let Ok(gps_report) = gps_report {
last_gps_sats = Some(gps_report)
} else {
warn!(
"Unable to parse GPS sats report `{}`: {}",
gps_sats_resp,
gps_report.unwrap_err()
);
}
}
UCCMMonitorParseState::ReadStatus => todo!(),
UCCMMonitorParseState::ReadLoopDiag => todo!(),
UCCMMonitorParseState::ReadStatus => todo!(),
UCCMMonitorParseState::ReadTOD => todo!(),
}
}
}
}
#[async_trait]
impl ChimemonSource for UCCMMonitor {
@ -354,12 +516,9 @@ impl ChimemonSource for UCCMMonitor {
let state = Arc::new(Mutex::<UCCMMonitorParseState>::new(
UCCMMonitorParseState::Idle,
));
let rx_handle = tokio::spawn(rx_loop(
self.rx,
chan.clone(),
state.clone(),
self.config.clone(),
));
let rx_handle = tokio::spawn(self.rx_loop(chan.clone(), state.clone()));
// let tx_handle = tokio::spawn(async move {
// let mut interval = interval(self.config.status_interval);
// loop {