Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
adb6d4c
Add sai-cli diagnoze (v1)
kaatrasa Jun 10, 2025
ccdf16e
Refactor & check for duplicate signal values
kaatrasa Jun 11, 2025
9437cd8
Support magnetometer
kaatrasa Jun 11, 2025
be21f00
Support barometer
kaatrasa Jun 11, 2025
40b4985
Clean up
kaatrasa Jun 11, 2025
0374280
Add sanity check for maximum frequency
kaatrasa Jun 11, 2025
ce7bd0a
Add script to run sai-cli without installation (#67)
pekkaran Jun 11, 2025
2274b99
Fix merge conflict
kaatrasa Jun 11, 2025
5d2978c
Check timestamps overlap with IMU
kaatrasa Jun 11, 2025
720bd75
Support GPS
kaatrasa Jun 12, 2025
21ed48c
Add IMU noise analysis & clean up
kaatrasa Jun 13, 2025
de9b7b2
Fix crash if signal length == 1
kaatrasa Jun 16, 2025
a9b6798
Fix parsing features
kaatrasa Jun 16, 2025
458229c
Require either --output_html or --output_json
kaatrasa Jun 17, 2025
8e4b927
Improve IMU noise analysis
kaatrasa Jun 18, 2025
8782f97
Improve delta time plots
kaatrasa Jun 18, 2025
4ebd6d1
Try checking IMU units are correct
kaatrasa Jun 19, 2025
fdbf23a
Clean up invalid JSON warning message
kaatrasa Jun 19, 2025
5771302
Check if accelerometer signal has gravity
kaatrasa Jun 19, 2025
76921f9
Dont warn about bad delta times with allowDataGaps=True
kaatrasa Jun 19, 2025
0c0ef27
Add colors to issues list
kaatrasa Jun 19, 2025
93691ab
Clean up
kaatrasa Jun 19, 2025
5d302e3
Create output folders if they don't exist
kaatrasa Jun 19, 2025
8a4ef98
Change issue list to a table
kaatrasa Jun 19, 2025
6808c41
Fix table being inside h2 tag. Fix report generation to current direc…
Bercon Jun 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions python/cli/calibrate/calibrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ def define_subparser(subparsers):
sub = subparsers.add_parser('calibrate', help=__doc__.strip())
sub.set_defaults(func=call_calibrate)
from spectacularAI.calibration import define_args as define_args_calibration
from .report import define_args as define_args_report
define_args_calibration(sub)
define_args_report(sub)
try:
from .report import define_args as define_args_report
define_args_calibration(sub)
define_args_report(sub)
except:
pass
Empty file added python/cli/diagnose/__init__.py
Empty file.
161 changes: 161 additions & 0 deletions python/cli/diagnose/diagnose.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,161 @@
"""
Visualize and diagnose common issues in data in Spectacular AI format
"""

import json
import pathlib
import os

from .html import generateHtml
from .sensors import *
from .gnss import GnssConverter

def define_args(parser):
parser.add_argument("dataset_path", type=pathlib.Path, help="Path to dataset")
parser.add_argument("--output_html", type=pathlib.Path, help="Path to calibration report HTML output.")
parser.add_argument("--output_json", type=pathlib.Path, help="Path to JSON output.")
parser.add_argument("--zero", help="Rescale time to start from zero", action='store_true')
parser.add_argument("--skip", type=float, help="Skip N seconds from the start")
parser.add_argument("--max", type=float, help="Plot max N seconds from the start")
return parser

def define_subparser(subparsers):
sub = subparsers.add_parser('diagnose', help=__doc__.strip())
sub.set_defaults(func=generateReport)
return define_args(sub)

def generateReport(args):
from datetime import datetime

datasetPath = args.dataset_path
jsonlFile = datasetPath if datasetPath.suffix == ".jsonl" else datasetPath.joinpath("data.jsonl")
if not jsonlFile.is_file():
raise FileNotFoundError(f"{jsonlFile} does not exist")

output = {
'passed': True,
'date': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
'dataset_path': str(jsonlFile.parent)
}

if not args.output_html and not args.output_json:
print("Either --output_html or --output_json is required")
return

data = {
'accelerometer': {"v": [], "t": [], "td": []},
'gyroscope': {"v": [], "t": [], "td": []},
'magnetometer': {"v": [], "t": [], "td": []},
'barometer': {"v": [], "t": [], "td": []},
'gps': {"v": [], "t": [], "td": []},
'cpu': {"v": [], "t": []},
'cameras': {}
}

def addMeasurement(type, t, v):
assert type in data, f"Unknown sensor type: {type}"
sensorData = data[type]
sensorData['v'].append(v)
if len(sensorData["t"]) > 0:
diff = t - sensorData["t"][-1]
sensorData["td"].append(diff)
sensorData["t"].append(t)

startTime = None
timeOffset = 0
gnssConverter = GnssConverter()

with open(jsonlFile) as f:
nSkipped = 0
for line in f.readlines():
try:
measurement = json.loads(line)
except:
print(f"Warning: ignoring non JSON line: '{line}'")
continue
time = measurement.get("time")
sensor = measurement.get("sensor")
barometer = measurement.get("barometer")
gps = measurement.get("gps")
frames = measurement.get("frames")
metrics = measurement.get("systemMetrics")
if frames is None and 'frame' in measurement:
frames = [measurement['frame']]
frames[0]['cameraInd'] = 0

if time is None: continue

if (sensor is None
and frames is None
and metrics is None
and barometer is None
and gps is None): continue

if startTime is None:
startTime = time
if args.zero:
timeOffset = startTime


if (args.skip is not None and time - startTime < args.skip) or (args.max is not None and time - startTime > args.max):
nSkipped += 1
continue

t = time - timeOffset
if sensor is not None:
measurementType = sensor["type"]
if measurementType in ["accelerometer", "gyroscope", "magnetometer"]:
v = [sensor["values"][i] for i in range(3)]
addMeasurement(measurementType, t, v)
elif barometer is not None:
addMeasurement("barometer", t, barometer["pressureHectopascals"])
elif gps is not None:
enu = gnssConverter.enu(gps["latitude"], gps["longitude"], gps["altitude"])
addMeasurement("gps", t, [enu["x"], enu["y"], gps["altitude"]])
elif frames is not None:
for f in frames:
if f.get("missingBitmap", False): continue
cameras = data['cameras']
ind = f["cameraInd"]
if cameras.get(ind) is None:
cameras[ind] = {"td": [], "t": [], "features": []}
else:
diff = t - cameras[ind]["t"][-1]
cameras[ind]["td"].append(diff)
if "features" in f: cameras[ind]["features"].append(len(f["features"]))
cameras[ind]["t"].append(t)
elif metrics is not None and 'cpu' in metrics:
data["cpu"]["t"].append(t)
data["cpu"]["v"].append(metrics['cpu'].get('systemTotalUsagePercent', 0))

if nSkipped > 0: print(f'Skipped {nSkipped} lines')

diagnoseCamera(data, output)
diagnoseAccelerometer(data, output)
diagnoseGyroscope(data, output)
diagnoseMagnetometer(data, output)
diagnoseBarometer(data, output)
diagnoseGps(data, output)
diagnoseCpu(data, output)

if args.output_json:
if os.path.dirname(args.output_json):
os.makedirs(os.path.dirname(args.output_json), exist_ok=True)
with open(args.output_json, "w") as f:
f.write(json.dumps(output, indent=4))
print("Generated JSON report data at:", args.output_json)

if args.output_html:
if os.path.dirname(args.output_html):
os.makedirs(os.path.dirname(args.output_html), exist_ok=True)
generateHtml(output, args.output_html)
print("Generated HTML report at:", args.output_html)

if __name__ == '__main__':
def parse_args():
import argparse
parser = argparse.ArgumentParser(description=__doc__.strip())
parser = define_args(parser)
return parser.parse_args()

generateReport(parse_args())
112 changes: 112 additions & 0 deletions python/cli/diagnose/gnss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import numpy as np

class Ellipsoid:
def __init__(self, a, b):
self.a = a # semi-major axis
self.b = b # semi-minor axis
f = 1.0 - b / a # flattening factor
self.e2 = 2 * f - f ** 2 # eccentricity squared

class GnssConverter:
def __init__(self):
self.ell = Ellipsoid(a=6378137.0, b=6356752.31424518) # WGS-84 ellipsoid
self.initialized = False
self.originECEF = None
self.R_ecef2enu = None
self.R_enu2ecef = None
self.prev = {"x": 0, "y": 0, "z": 0}

def set_origin(self, lat, lon, alt):
def ecef_to_enu_rotation_matrix(lat, lon):
lat = np.deg2rad(lat)
lon = np.deg2rad(lon)

return np.array([
[-np.sin(lon), np.cos(lon), 0],
[-np.sin(lat)*np.cos(lon), -np.sin(lat)*np.sin(lon), np.cos(lat)],
[np.cos(lat)*np.cos(lon), np.cos(lat)*np.sin(lon), np.sin(lat)]
])

self.initialized = True
self.originECEF = self.__geodetic2ecef(lat, lon, alt)
self.R_ecef2enu = ecef_to_enu_rotation_matrix(lat, lon)
self.R_enu2ecef = self.R_ecef2enu.T

def __geodetic2ecef(self, lat, lon, alt):
# https://gssc.esa.int/navipedia/index.php/Ellipsoidal_and_Cartesian_Coordinates_Conversion
lat = np.deg2rad(lat)
lon = np.deg2rad(lon)
a = self.ell.a
e2 = self.ell.e2
N = a / np.sqrt(1 - e2 * np.sin(lat) * np.sin(lat)) # radius of curvature in the prime vertical

x = (N + alt) * np.cos(lat) * np.cos(lon)
y = (N + alt) * np.cos(lat) * np.sin(lon)
z = ((1 - e2) * N + alt) * np.sin(lat)
return np.array([x, y, z])

def __ecef2geodetic(self, x, y, z):
# https://gssc.esa.int/navipedia/index.php/Ellipsoidal_and_Cartesian_Coordinates_Conversion
a = self.ell.a
e2 = self.ell.e2
p = np.sqrt(x**2 + y**2)
lon = np.arctan2(y, x)

# latitude and altitude are computed by an iterative procedure.
MAX_ITERS = 1000
MIN_LATITUDE_CHANGE_RADIANS = 1e-10
MIN_ALTITUDE_CHANGE_METERS = 1e-6
lat_prev = np.arctan(z / ((1-e2)*p)) # initial value
alt_prev = -100000 # arbitrary
for _ in range(MAX_ITERS):
N_i = a / np.sqrt(1-e2*np.sin(lat_prev)**2)
alt_i = p / np.cos(lat_prev) - N_i
lat_i = np.arctan(z / ((1 - e2 * (N_i/(N_i + alt_i)))*p))
if abs(lat_i - lat_prev) < MIN_LATITUDE_CHANGE_RADIANS and abs(alt_i - alt_prev) < MIN_ALTITUDE_CHANGE_METERS: break
alt_prev = alt_i
lat_prev = lat_i

lat = np.rad2deg(lat_i)
lon = np.rad2deg(lon)
return np.array([lat, lon, alt_i])

def __ecef2enu(self, x, y, z):
# https://gssc.esa.int/navipedia/index.php/Transformations_between_ECEF_and_ENU_coordinates
assert(self.initialized)
xyz = np.array([x, y, z])
xyz = xyz - self.originECEF
return self.R_ecef2enu @ xyz

def __enu2ecef(self, e, n, u):
# https://gssc.esa.int/navipedia/index.php/Transformations_between_ECEF_and_ENU_coordinates
assert(self.initialized)
enu = np.array([e, n, u])
xyz = self.R_enu2ecef @ enu
return xyz + self.originECEF

def enu(self, lat, lon, alt=0, accuracy=1.0, minAccuracy=-1.0):
# Filter out inaccurate measurements to make pose alignment easier.
if (minAccuracy > 0.0 and (accuracy > minAccuracy or accuracy < 0.0)):
return self.prev

if not self.initialized:
self.set_origin(lat, lon, alt)

x, y, z = self.__geodetic2ecef(lat, lon, alt)
enu = self.__ecef2enu(x, y, z)
enu = { "x": enu[0], "y": enu[1], "z": enu[2] }
self.prev = enu
return enu

def wgs(self, e, n, u):
assert(self.initialized)
x, y, z = self.__enu2ecef(e, n, u)
wgs = self.__ecef2geodetic(x, y, z)
return { "latitude": wgs[0], "longitude": wgs[1], "altitude": wgs[2] }

def wgs_array(self, pos):
assert(self.initialized)
arr = []
for enu in pos:
arr.append(self.wgs(enu[0], enu[1], enu[2]))
return arr
Loading