From 492695f8209fa2171714b7a35a371105fcbf3570 Mon Sep 17 00:00:00 2001 From: Aarjav Jain <117491745+AarjavJain101@users.noreply.github.com> Date: Tue, 11 Jun 2024 16:11:24 -0700 Subject: [PATCH] feat: View Raw Messages (#91) --- README.md | 5 ++++- link_telemetry.py | 19 +++++++++++++++++++ parser/main.py | 8 ++++---- tools/README.md | 6 ++++++ 4 files changed, 33 insertions(+), 5 deletions(-) create mode 100644 tools/README.md diff --git a/README.md b/README.md index 595e6235..f7484816 100644 --- a/README.md +++ b/README.md @@ -466,7 +466,10 @@ Here are some example invocations: | `./link_telemetry.py -r all --live-on all --debug` | Makes the link randomly generate message data for CAN, GPS, and IMU (all), requests the parser to write to the debug InfluxDB bucket, and **l**ivestreams all data to Grafana. | | `./link_telemetry.py -r can -f 100 --debug` | Makes the link randomly generate CAN message data at 100Hz and requests the parser to write to the debug InfluxDB bucket. | | `./link_telemetry.py -o --debug` | Makes the link to recieve data from PCAN and requests the parser to write to the debug InfluxDB bucket. | -| `./link_telemetry.py -o --prod` | Makes the link to recieve data from PCAN and requests the parser to write to the CAN InfluxDB bucket. | +| `./link_telemetry.py -o --prod` | Makes the link to recieve data from PCAN and requests the parser to write to the CAN InfluxDB bucket. +| `./link_telemetry.py -o --raw` | Will print out the **hexified** serial messages that will be sent to the parser in the `message` field of the payload | +| `./link_telemetry.py -o --rawest` | This prints the exact `CHUNK_SIZE` of data received from serial as a **hex** string. Because of the chunking algorithm, **the chunk may have incomplete messages** | + | > Previously, the `--prod` option would write to the production InfluxDB bucket. This has been changed to write to the CAN InfluxDB bucket as CAN is currently the only source of data source that is supported in Sunlink. Soon, other buckets will be added along with support for other data sources including GPS, IMU, and VDS (Vehicle Dynamics Sensors) data. New buckets must be created with shell scripts, similar to in the script `scripts/create-influx_debug-bucket.sh`. The .env file must also contain the name for the bucket created on `telemetry.ubcsolar.com:8086`. The parser script must be modified to support posting data to new buckets. diff --git a/link_telemetry.py b/link_telemetry.py index 6ec1deb2..07631f2a 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -399,6 +399,7 @@ def process_response(future: concurrent.futures.Future, args, display_filters: l write_to_log_file(response['message'], os.path.join(FAIL_DIRECTORY, "FAILED_UPLOADS_{}.txt".format(formatted_time)) if args.log_upload else FAIL_FILE_NAME) write_to_log_file(fail_msg + '\n', os.path.join(DEBUG_DIRECTORY, "FAILED_UPLOADS_{}.txt".format(formatted_time)) if args.log_upload else DEBUG_FILE_NAME, convert_to_hex=False) elif response["result"] == "INFLUX_WRITE_FAIL": + fail_msg = f"{ANSI_RED}INFLUX_WRITE_FAIL{ANSI_ESCAPE}: \n" + f"{response['error']}" print(f"Failed to write measurements for {response['type']} message to InfluxDB!") print(response) @@ -474,6 +475,12 @@ def process_message(message: str, buffer: str = "") -> list: if len(parts[-1]) != 30 or len(parts[-1]) != 396 or len(parts[-1]) != 44: buffer = parts.pop() + try: + parts = [part + "0d0a" for part in parts if len(part) == 30 or len(part) == 396 or len(part) == 44] + except ValueError as e: + print(f"{ANSI_RED}Failed to split message: {str([part for part in parts])}{ANSI_ESCAPE}" + f" ERROR: {e}") + return [], buffer return [bytes.fromhex(part).decode('latin-1') for part in parts] , buffer @@ -528,6 +535,12 @@ def main(): source_group.add_argument("--live-off", action="store_true", help=("Will not stream any data to grafana")) + source_group.add_argument("--raw", action="store_true", + help=("Will enable displaying of raw data coming from serial stream AFTER cutting algorithm")) + + source_group.add_argument("--rawest", action="store_true", + help=("Will enable displaying of raw data coming from serial stream in chunk size")) + source_group.add_argument("-l", "--log", nargs='+', help=("Args create a list of message classes or ID's to pretty log to a file. no args for all, all for all")) @@ -705,9 +718,15 @@ def main(): # read in bytes from COM port chunk = ser.read(CHUNK_SIZE) chunk = chunk.hex() + + if args.rawest: + print(chunk) + parts, buffer = process_message(chunk, buffer) for part in parts: + if args.raw: + print(part.encode('latin-1').hex()) sendToParser(part, live_filters, log_filters, display_filters, args, PARSER_ENDPOINT) sendToParser(message, live_filters, log_filters, display_filters, args, PARSER_ENDPOINT) diff --git a/parser/main.py b/parser/main.py index f69a0e2d..2030a7a2 100644 --- a/parser/main.py +++ b/parser/main.py @@ -243,13 +243,13 @@ def parse_request(): # try extracting measurements try: - message = create_message(parse_request["message"]) + message = create_message(msg) except Exception as e: app.logger.warn( - f"Unable to extract measurements for raw message {parse_request['message']}") + f"Unable to extract measurements for raw message {msg}") curr_response = { "result": "PARSE_FAIL", - "message": str(parse_request["message"]), + "message": str(msg), "error": str(e), } all_response.append(curr_response) @@ -372,7 +372,7 @@ def parse_and_write_request_bucket(bucket): app.logger.warning("Unable to write measurement to InfluxDB!") curr_response = { "result": "INFLUX_WRITE_FAIL", - "message": str(parse_request["message"]), + "message": str(msg), "error": str(e), "type": type } diff --git a/tools/README.md b/tools/README.md new file mode 100644 index 00000000..5fcd6b28 --- /dev/null +++ b/tools/README.md @@ -0,0 +1,6 @@ +# Memorator Script Usage Guide +* Ensure that canlib is installed correctly. [**See this guide**](https://github.com/UBC-Solar/firmware_v3/tree/master/tools/t_programs/sendRTC). +* Ensure that you have the SD Card with the logged messages from the memorator +* Then plug in the SD Card into your device which is running sunlink +* Now, navigate to the `tools/` directory and then to the `MemoratorUploader.py` script. +* Inside here you will need to change the `LOG_FOLDER` constant to the directory of the SD Card (directory that contains the `LOG000xx.KMF` files)