Skip to content

Commit

Permalink
feat: View Raw Messages (#91)
Browse files Browse the repository at this point in the history
  • Loading branch information
AarjavJain101 authored Jun 11, 2024
1 parent 1604288 commit 492695f
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 5 deletions.
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,10 @@ Here are some example invocations:
| `./link_telemetry.py -r all --live-on all --debug` | Makes the link randomly generate message data for CAN, GPS, and IMU (all), requests the parser to write to the debug InfluxDB bucket, and **l**ivestreams all data to Grafana. |
| `./link_telemetry.py -r can -f 100 --debug` | Makes the link randomly generate CAN message data at 100Hz and requests the parser to write to the debug InfluxDB bucket. |
| `./link_telemetry.py -o --debug` | Makes the link to recieve data from PCAN and requests the parser to write to the debug InfluxDB bucket. |
| `./link_telemetry.py -o --prod` | Makes the link to recieve data from PCAN and requests the parser to write to the CAN InfluxDB bucket. |
| `./link_telemetry.py -o --prod` | Makes the link to recieve data from PCAN and requests the parser to write to the CAN InfluxDB bucket.
| `./link_telemetry.py -o --raw` | Will print out the **hexified** serial messages that will be sent to the parser in the `message` field of the payload |
| `./link_telemetry.py -o --rawest` | This prints the exact `CHUNK_SIZE` of data received from serial as a **hex** string. Because of the chunking algorithm, **the chunk may have incomplete messages** |
|

> Previously, the `--prod` option would write to the production InfluxDB bucket. This has been changed to write to the CAN InfluxDB bucket as CAN is currently the only source of data source that is supported in Sunlink. Soon, other buckets will be added along with support for other data sources including GPS, IMU, and VDS (Vehicle Dynamics Sensors) data. New buckets must be created with shell scripts, similar to in the script `scripts/create-influx_debug-bucket.sh`. The .env file must also contain the name for the bucket created on `telemetry.ubcsolar.com:8086`. The parser script must be modified to support posting data to new buckets.
Expand Down
19 changes: 19 additions & 0 deletions link_telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,7 @@ def process_response(future: concurrent.futures.Future, args, display_filters: l
write_to_log_file(response['message'], os.path.join(FAIL_DIRECTORY, "FAILED_UPLOADS_{}.txt".format(formatted_time)) if args.log_upload else FAIL_FILE_NAME)
write_to_log_file(fail_msg + '\n', os.path.join(DEBUG_DIRECTORY, "FAILED_UPLOADS_{}.txt".format(formatted_time)) if args.log_upload else DEBUG_FILE_NAME, convert_to_hex=False)
elif response["result"] == "INFLUX_WRITE_FAIL":
fail_msg = f"{ANSI_RED}INFLUX_WRITE_FAIL{ANSI_ESCAPE}: \n" + f"{response['error']}"
print(f"Failed to write measurements for {response['type']} message to InfluxDB!")
print(response)

Expand Down Expand Up @@ -474,6 +475,12 @@ def process_message(message: str, buffer: str = "") -> list:
if len(parts[-1]) != 30 or len(parts[-1]) != 396 or len(parts[-1]) != 44:
buffer = parts.pop()

try:
parts = [part + "0d0a" for part in parts if len(part) == 30 or len(part) == 396 or len(part) == 44]
except ValueError as e:
print(f"{ANSI_RED}Failed to split message: {str([part for part in parts])}{ANSI_ESCAPE}"
f" ERROR: {e}")
return [], buffer
return [bytes.fromhex(part).decode('latin-1') for part in parts] , buffer


Expand Down Expand Up @@ -528,6 +535,12 @@ def main():
source_group.add_argument("--live-off", action="store_true",
help=("Will not stream any data to grafana"))

source_group.add_argument("--raw", action="store_true",
help=("Will enable displaying of raw data coming from serial stream AFTER cutting algorithm"))

source_group.add_argument("--rawest", action="store_true",
help=("Will enable displaying of raw data coming from serial stream in chunk size"))

source_group.add_argument("-l", "--log", nargs='+',
help=("Args create a list of message classes or ID's to pretty log to a file. no args for all, all for all"))

Expand Down Expand Up @@ -705,9 +718,15 @@ def main():
# read in bytes from COM port
chunk = ser.read(CHUNK_SIZE)
chunk = chunk.hex()

if args.rawest:
print(chunk)

parts, buffer = process_message(chunk, buffer)

for part in parts:
if args.raw:
print(part.encode('latin-1').hex())
sendToParser(part, live_filters, log_filters, display_filters, args, PARSER_ENDPOINT)

sendToParser(message, live_filters, log_filters, display_filters, args, PARSER_ENDPOINT)
Expand Down
8 changes: 4 additions & 4 deletions parser/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,13 +243,13 @@ def parse_request():

# try extracting measurements
try:
message = create_message(parse_request["message"])
message = create_message(msg)
except Exception as e:
app.logger.warn(
f"Unable to extract measurements for raw message {parse_request['message']}")
f"Unable to extract measurements for raw message {msg}")
curr_response = {
"result": "PARSE_FAIL",
"message": str(parse_request["message"]),
"message": str(msg),
"error": str(e),
}
all_response.append(curr_response)
Expand Down Expand Up @@ -372,7 +372,7 @@ def parse_and_write_request_bucket(bucket):
app.logger.warning("Unable to write measurement to InfluxDB!")
curr_response = {
"result": "INFLUX_WRITE_FAIL",
"message": str(parse_request["message"]),
"message": str(msg),
"error": str(e),
"type": type
}
Expand Down
6 changes: 6 additions & 0 deletions tools/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Memorator Script Usage Guide
* Ensure that canlib is installed correctly. [**See this guide**](https://github.com/UBC-Solar/firmware_v3/tree/master/tools/t_programs/sendRTC).
* Ensure that you have the SD Card with the logged messages from the memorator
* Then plug in the SD Card into your device which is running sunlink
* Now, navigate to the `tools/` directory and then to the `MemoratorUploader.py` script.
* Inside here you will need to change the `LOG_FOLDER` constant to the directory of the SD Card (directory that contains the `LOG000xx.KMF` files)

0 comments on commit 492695f

Please sign in to comment.