Skip to content

Commit

Permalink
sync_analysis - improve and test
Browse files Browse the repository at this point in the history
  • Loading branch information
orgua committed Nov 22, 2023
1 parent 1c03f79 commit 045f770
Show file tree
Hide file tree
Showing 11 changed files with 104 additions and 42 deletions.
3 changes: 3 additions & 0 deletions software/test_timesync/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
*.csv
*.pkl
*.png
14 changes: 14 additions & 0 deletions software/test_timesync/Readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,16 @@ shepherd-herd -v shell-cmd -s 'systemctl status phc2sys@eth0'
shepherd-herd -v shell-cmd -s 'systemctl status ptp4l@eth0'
```

Fixing time-sync problems can be solved be restarting the services and shepehrd-kernel-module

```shell
# when sheep remain unsynced
shepherd-herd -v shell-cmd -s 'systemctl restart ptp4l@eth0'
shepherd-herd -v shell-cmd -s 'systemctl restart phc2sys@eth0'
# signal on gpio missing (typically after clock changes significantly)
shepherd-herd fix
```

Creating CPU-Load

- run harvest first (this will create a measurement file)
Expand Down Expand Up @@ -148,3 +158,7 @@ Configure Logic 2 Software
- set `Timer` to record for `200 s`

- start measurement (Blue Play-Button)

## Observations

- phc2sys reports suddenly high offsets (>100us)
52 changes: 33 additions & 19 deletions software/test_timesync/examples/sync_1channel.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,49 @@
from pathlib import Path

import numpy as np
import pandas as pd
from sync_analysis import LogicTrace
from sync_analysis import LogicTraces
from sync_analysis import logger

path_here = Path(__file__).parent

lt = LogicTraces(path_here)
_dtrans: dict[str, dict[str, np.ndarray]] = {
"low": {},
"rising": {},
ltraces = LogicTraces(path_here, glitch_ns=100)
_stat: dict[str, list] = {
"diff": [],
"low": [],
"rising": [],
}
for trace in lt.traces:

for trace in ltraces.traces:
trace.to_file(path_here)
trace.analyze_inter_jitter(rising=True)

for _ch in range(trace.channel_count):
_data = trace.calc_durations_ns(_ch, edge_a_rising=True, edge_b_rising=True)
_data_r = trace.calc_durations_ns(_ch, edge_a_rising=True, edge_b_rising=True)
_name = trace.name + f"_ch{_ch}"
_expt = trace.calc_expected_value(_data)
trace.plot_series_jitter(_data - _expt, trace.data[_ch], _name, path_here)
_dtrans["rising"][_name] = trace.calc_durations_ns(_ch, edge_a_rising=True, edge_b_rising=True) - _expt
_dtrans["low"][_name] = trace.calc_durations_ns(_ch, edge_a_rising=False, edge_b_rising=True)
lt.plot_comparison_series(start=0)
lt.plot_comparison_series(start=2)
for _state, _ddict in _dtrans.items():
logger.info("State: %s", _state)
header = True
for _name, _data in _ddict.items():
LogicTrace.analyze_series_jitter(_data, _name, with_header=header)
header = False
_expt = trace.calc_expected_value(_data_r[:, 1])
_data_r[:, 1] = _data_r[:, 1] - _expt
trace.plot_series_jitter(_data_r[:, 1], _data_r[:, 0], _name, path_here)
_stat["rising"].append(trace.get_statistics(_data_r, _name))
_data_l = trace.calc_durations_ns(_ch, edge_a_rising=False, edge_b_rising=True)
_stat["low"].append(trace.get_statistics(_data_l, _name))

# sync between channels
for _ch1 in range(trace.channel_count):
_data1 = trace.get_edge_timestamps(_ch1, rising=True)
for _ch2 in range(_ch1 + 1, trace.channel_count):
_data2 = trace.get_edge_timestamps(_ch2, rising=True)
_diff = trace.calc_duration_free_ns(_data1, _data2)
_name = trace.name + f"_diff_{_ch1}u{_ch2}"
trace.plot_series_jitter(_diff[:, 1], _diff[:, 0], _name, path_here)
_stat["diff"].append(trace.get_statistics(_diff, _name))

ltraces.plot_comparison_series(start=0)
_stat_df = {_k: pd.DataFrame(_v, columns=LogicTrace.get_statistics_header()) for _k, _v in _stat.items()}
for _k, _v in _stat_df.items():
logger.info("")
logger.info("TYPE: %s", _k)
logger.info(_v)

# Trigger-Experiment:
# - watch P8_19-low variance under load (currently 29.3 - 49.3 us)
Expand Down
Binary file added software/test_timesync/media/hw_setup.jpg
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added software/test_timesync/media/sw_logic2_export.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 0 additions & 2 deletions software/test_timesync/sync_analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,3 @@
"get_files",
"logger",
]


65 changes: 47 additions & 18 deletions software/test_timesync/sync_analysis/logic_trace.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import pickle
from pathlib import Path
from typing import Self, Optional
from typing_extensions import Self
from typing import Optional

import numpy as np
import pandas as pd
Expand All @@ -15,6 +16,7 @@ def __init__(
data: np.ndarray,
*,
name: Optional[str] = None,
glitch_ns: int = 0,
) -> None:
self.name: str = name
# prepare data
Expand All @@ -26,6 +28,7 @@ def __init__(
_data = data[:, _i]
_data = self._convert_analog2digital(_data)
_data = self._filter_redundant_states(_data, data_ts)
_data = self._filter_glitches(_data, glitch_ns)
self.data.append(_data)
# data = self.filter_cs_falling_edge()

Expand All @@ -34,7 +37,7 @@ def from_file(
cls,
path: Path,
*,
rising_edge: bool = True,
glitch_ns: int = 0,
) -> Self:
if not path.exists():
raise FileNotFoundError()
Expand All @@ -45,7 +48,7 @@ def from_file(
data: np.ndarray = np.loadtxt(
path.as_posix(), delimiter=",", skiprows=1,
)
return cls(data, name=path.stem)
return cls(data, name=path.stem, glitch_ns=glitch_ns)
if path.suffix.lower() == ".pkl":
with path.open("rb") as _fh:
obj = pickle.load(_fh)
Expand Down Expand Up @@ -76,19 +79,29 @@ def _filter_redundant_states(data: np.ndarray, timestamps: np.ndarray) -> np.nda
_d0 = data[:].astype("uint8")
_d1 = np.concatenate([[not _d0[0]], _d0[:-1]])
_df = _d0 + _d1
data = timestamps[(_df == 1)]
_ds = timestamps[_df == 1]
# discard first&last entry AND make sure state=low starts
if _d0[0] == 0:
data = data[2:-1]
_ds = _ds[2:-1]
else:
data = data[1:-1]
if len(_d0) > len(data):
_ds = _ds[1:-1]
if len(_d0) > len(_ds):
logger.info(
"filtered out %d/%d events (redundant)",
len(_d0) - len(data),
len(_d0) - len(_ds),
len(_d0),
)
return data
return _ds

@staticmethod
def _filter_glitches(data: np.ndarray, duration_ns: int = 10):
_diff = ((data[1:] - data[:-1]) * 1e9).astype("uint64")
_filter1 = _diff > duration_ns
_filter2 = np.concatenate([_filter1, [True]]) & np.concatenate([[True], _filter1])
_num = len(_filter1) - _filter1.sum()
if _num > 0:
logger.info("filtered out %d glitches", _num)
return data[_filter2]

def calc_durations_ns(self, channel: int, edge_a_rising: bool, edge_b_rising: bool) -> np.ndarray:
_d0 = self.data[channel]
Expand All @@ -108,9 +121,9 @@ def calc_durations_ns(self, channel: int, edge_a_rising: bool, edge_b_rising: bo
_db = _d0[2::2]
_len = min(len(_da), len(_db))
_diff = _db[:_len] - _da[:_len]
return _diff * 1e9
return np.column_stack([_da[:_len], _diff * 1e9]) # 2 columns: timestamp, duration [ns]

def get_edge(self, channel: int = 0, rising: bool = True) -> np.ndarray:
def get_edge_timestamps(self, channel: int = 0, rising: bool = True) -> np.ndarray:
if rising:
return self.data[channel][1::2]
else:
Expand All @@ -132,7 +145,7 @@ def calc_duration_free_ns(data_a: np.ndarray, data_b: np.ndarray) -> np.ndarray:
data_b = data_b[:_len]
# calculate duration of offset
_diff = data_b[:_len] - data_a[:_len]
return _diff * 1e9
return np.column_stack([data_a[:_len], _diff * 1e9]) # 2 columns: timestamp, duration [ns]

@staticmethod
def calc_expected_value(data: np.ndarray) -> float:
Expand All @@ -151,29 +164,45 @@ def analyze_series_jitter(data: np.ndarray, name: str, with_header: bool = True)
logger.info("%s \t[ %d <| %d || %d || %d |> %d ]",
name, dmin, dq05, dmean, dq95, dmax)

@staticmethod
def get_statistics(data: np.ndarray, name: str) -> list:
dmin = round(data.min())
dmax = round(data.max())
dq05 = round(np.quantile(data, 0.05))
dq95 = round(np.quantile(data, 0.95))
dmean = round(data.mean())
return [name, dmin, dq05, dmean, dq95, dmax]

@staticmethod
def get_statistics_header() -> list:
return ["name", "min", "q05%", "mean", "q95%", "max"]

def analyze_inter_jitter(self, rising: bool = True) -> None:
_len = len(self.data)
first = True
for _i in range(_len):
for _j in range(_i+1, _len):
_di = self.get_edge(_i, rising=rising)
_dj = self.get_edge(_j, rising=rising)
_di = self.get_edge_timestamps(_i, rising=rising)
_dj = self.get_edge_timestamps(_j, rising=rising)
_name = self.name + f"_ch{_i}_ch{_j}"
_dk = LogicTrace.calc_duration_free_ns(_di, _dj)
_dk = LogicTrace.calc_duration_free_ns(_di, _dj)[:, 1]
LogicTrace.analyze_series_jitter(_dk, name=_name, with_header=first)
first = False

@staticmethod
def plot_series_jitter(data: np.ndarray, ts: np.ndarray, name: str, path: Path, size: tuple = (18, 8)) -> None:
def plot_series_jitter(data: np.ndarray, ts: np.ndarray, name: str, path: Path, size: tuple = (18, 8), y_side: int = 1000) -> None:
if path.is_dir():
_path = path / (name + f"_series_jitter.png")
_path = path / (name + f"_jitter.png")
else:
_path = path
_len = min(len(data), len(ts))
_center = np.median(data)
_range = [_center - y_side, _center + y_side]
fig, ax = plt.subplots(figsize=size)
plt.plot(ts[:_len], data[:_len]) # X,Y
ax.set_xlabel("time [s]")
ax.axes.set_ylabel("intra-trigger-jitter [ns]")
ax.axes.set_ylim(_range)
ax.axes.set_ylabel("trigger-jitter [ns]")
ax.axes.set_title(_path.stem)
fig.savefig(_path)
plt.close()
Expand Down
10 changes: 7 additions & 3 deletions software/test_timesync/sync_analysis/logic_traces.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import pickle
from pathlib import Path
from typing import Self, Optional
from typing_extensions import Self
from typing import Optional

import numpy as np
import pandas as pd
Expand All @@ -16,19 +17,22 @@ class LogicTraces:
def __init__(
self,
path: Path,
glitch_ns: int = 0,
) -> None:
self.traces: list[LogicTrace] = []
_fcsv = get_files(path, suffix=".csv")

for _f in _fcsv:
self.traces.append(LogicTrace.from_file(_f))
self.traces.append(LogicTrace.from_file(_f, glitch_ns=glitch_ns))

def plot_comparison_series(self, start: int = 0) -> None:
_names: list = [_t.name for _t in self.traces]
_data: list = [pd.Series(_t.calc_durations_ns(0, True, True)) for _t in self.traces]
_data: list = [pd.Series(_t.calc_durations_ns(0, True, True)[:, 1]) for _t in self.traces]
_len = len(_names)
_names = _names[start:]
_data = _data[start:]
if len(_names) < 1 or len(_data) < 1:
return
# TODO: this just takes first CH0
# file_names_short.reverse()
fig_title = f"improvement_trigger_statistics_boxplot_{start}to{_len}"
Expand Down

0 comments on commit 045f770

Please sign in to comment.