From c5ce3067c3c3a7743eb963cbc0bbd63867185b5b Mon Sep 17 00:00:00 2001 From: AarjavJain101 Date: Wed, 15 May 2024 22:08:09 -0700 Subject: [PATCH 01/13] Created memorator logging script --- tools/MemoratorUploader.py | 124 +++++++++++++++++++++++++++++++++++++ 1 file changed, 124 insertions(+) create mode 100644 tools/MemoratorUploader.py diff --git a/tools/MemoratorUploader.py b/tools/MemoratorUploader.py new file mode 100644 index 00000000..833bd0dc --- /dev/null +++ b/tools/MemoratorUploader.py @@ -0,0 +1,124 @@ +import canlib.kvmlib as kvmlib +import re +import datetime +import struct + +# Script Constants +PATH = "D:\\LOG000{:02d}.KMF" +NUM_LOGS = 15 +MB_TO_KB = 1024 +EPOCH_START = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc) + +# Formatting Constants +ANSI_GREEN = "\033[92m" +ANSI_BOLD = "\033[1m" +ANSI_RED = "\033[91m" +ANSI_RESET = "\033[0m" + +# Regex Patterns for logfile parsing +PATTERN_DATETIME = re.compile(r't:\s+(.*?)\s+DateTime:\s+(.*)') +PATTERN_TRIGGER = re.compile(r't:\s+(.*?)\s+Log Trigger Event.*') +PATTERN_EVENT = re.compile(r't:\s+(.*?)\s+ch:0 f:\s+(.*?) id:\s+(.*?) dlc:\s+(.*?) d:(.*)') + + +def upload(log_file: kvmlib.LogFile, parserCallFunc: callable, live_filters: list, log_filters: list, args: list, endpoint: str): + start_time = None + for event in log_file: + str_event = str(event) + print(str_event) + if PATTERN_DATETIME.search(str_event): + match = PATTERN_DATETIME.search(str_event) + date_time_str = match.group(2) + print(f"Matched DateTime: {date_time_str}") + date_time_obj = datetime.datetime.strptime(date_time_str, '%Y-%m-%d %H:%M:%S') + date_time_obj = date_time_obj.replace(tzinfo=datetime.timezone.utc) + start_time = (date_time_obj - EPOCH_START).total_seconds() + elif PATTERN_TRIGGER.search(str_event): + continue + elif PATTERN_EVENT.search(str_event): + match = PATTERN_EVENT.search(str_event) + timestamp = start_time + float(match.group(1)) + timestamp_str = struct.pack('>d', timestamp).decode('latin-1') + + id = int(match.group(3), 16) + id_str = id.to_bytes(4, 'big').decode('latin-1') + + dlc_str = match.group(4) + + data = bytes.fromhex(match.group(5).replace(' ', '')) + data_str = data.decode('latin-1') + + can_str = timestamp_str + "#" + id_str + data_str + dlc_str + + parserCallFunc(can_str, live_filters, log_filters, args, endpoint) + + +def memorator_upload_script(parserCallFunc: callable, live_filters: list, log_filters: list, args: list, endpoint: str): + # Open each KMF file + for i in range(NUM_LOGS): + kmf_file = kvmlib.openKmf(PATH.format(i)) + print(f"{ANSI_GREEN}Opening file: {PATH.format(i)}{ANSI_RESET}") # Green stdout + + # Access the log attribute of the KMF object + log = kmf_file.log + + # First calculate total number of events + total_events = 0 + for log_file in log: + total_events += log_file.event_count_estimation() + + # Display the number of logs + num_logs = len(log) + print(f"{ANSI_BOLD}Found {num_logs} logs with {total_events} events total{ANSI_RESET}") + + # Iterate over all log files + for j, log_file in enumerate(log): + # Calculate and display the approximate size + num_events = log_file.event_count_estimation() + kmf_file_size = kmf_file.disk_usage[0] + kb_size = kmf_file_size * (num_events / total_events) * MB_TO_KB + + # Display information about each log + start_time = log_file.start_time.isoformat(' ') + end_time = log_file.end_time.isoformat(' ') + print(f"{ANSI_BOLD}\nLog Idx = {j}, Approximate size = {kb_size:.2f} KB:{ANSI_RESET}") + print(f"{ANSI_BOLD}\tEstimated events : {num_events}{ANSI_RESET}") + print(f"{ANSI_BOLD}\tStart time : {start_time}{ANSI_RESET}") + print(f"{ANSI_BOLD}\tEnd time : {end_time}{ANSI_RESET}") + + # Ask the user what to upload + upload_input = input(f"{ANSI_GREEN}Enter what logs to upload ('all' or x-y inclusive ranges comma separated):{ANSI_RESET} ") + if upload_input.lower() == 'all': + # Upload all logs + for j in range(num_logs): + upload(log[j], parserCallFunc, live_filters, log_filters, args, endpoint) + else: + # Parse the user input + ranges = upload_input.split(',') + for r in ranges: + if '-' not in r: + idx = int(r) + if idx < 0 or idx >= num_logs: + print(f"{ANSI_RED}Index {idx} is out of range{ANSI_RESET}") + else: + upload(log[idx], parserCallFunc, live_filters, log_filters, args, endpoint) + else: + start, end = map(int, r.split('-')) + # Upload the specified logs + for j in range(start, end + 1): + if j < 0 or j >= num_logs: + print(f"{ANSI_RED}Index {j} is out of range{ANSI_RESET}") + else: + upload(log[j], parserCallFunc, live_filters, log_filters, args, endpoint) + + # Close the KMF file + kmf_file.close() + + +# TESTING PURPOSES +def main(): + memorator_upload_script() + + +if __name__ == "__main__": + main() From da622255f1ad96919b1e1459e39ca505d672442c Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 01:23:34 -0600 Subject: [PATCH 02/13] Updated link telemetry to use memorator log script. Needs testing --- link_telemetry.py | 82 +++++++++++++++++------------------------------ 1 file changed, 29 insertions(+), 53 deletions(-) diff --git a/link_telemetry.py b/link_telemetry.py index 5c742a72..2be439d0 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -25,6 +25,8 @@ import warnings import concurrent.futures +from tools.MemoratorUploader import memorator_upload_script + __PROGRAM__ = "link_telemetry" __VERSION__ = "0.4" @@ -371,37 +373,34 @@ def read_lines_from_file(file_path): for line in file: yield line.strip() -def upload_logs(args, live_filters): - # Get a list of all .txt files in the logfiles directory - txt_files = [file for file in glob.glob(FAIL_DIRECTORY + '/*.txt') if not file[len(FAIL_DIRECTORY):].startswith('FAILED_UPLOADS')] - print(f"Found {len(txt_files)} .txt files in {FAIL_DIRECTORY}\n") - # Iterate over each .txt file - for file_path in txt_files: - print(f"Reading file {file_path}...") - message_generator = read_lines_from_file(file_path) +""" +Purpose: Sends data and filters to parser and registers a callback to process the response +Parameters: + message - raw byte data to be parsed on parser side + live_filters - filters for which messages to live stream to Grafana + log_filters - filters for which messages to log to file + args - the arguments passed to ./link_telemetry.py + parser_endpoint - the endpoint to send the data to +Returns: None +""" +def sendToParser(message: str, live_filters: list, log_filters: list, args: list, parser_endpoint: str): + payload = { + "message" : message, + "live_filters" : live_filters, + "log_filters" : log_filters + } + + # submit to thread pool + future = executor.submit(parser_request, payload, parser_endpoint) + + # register done callback with future (lambda function to pass in arguments) + future.add_done_callback(lambda future: process_response(future, args)) - while True: - try: - # Converts a string of hex characters to a string of ASCII characters - # Preserves weird characters to be written and copied correctly - log_line = bytes.fromhex(next(message_generator)).decode('latin-1') - except StopIteration: - break - - # Create payload - payload = { - "message" : log_line, - "live_filters" : live_filters - } - - future = executor.submit(parser_request, payload, DEBUG_WRITE_ENDPOINT) - - # register done callback with future (lambda function to pass in arguments) - future.add_done_callback(lambda future: process_response(future, args)) - print(f"Done reading {file_path}") - print() +def upload_logs(args, live_filters, log_filters, endpoint): + # Call the memorator log uploader function + memorator_upload_script(sendToParser, live_filters, log_filters, args, endpoint) """ @@ -430,29 +429,6 @@ def process_message(message: str, buffer: str = "") -> list: return [bytes.fromhex(part).decode('latin-1') for part in parts] , buffer -""" -Purpose: Sends data and filters to parser and registers a callback to process the response -Parameters: - message - raw byte data to be parsed on parser side - live_filters - filters for which messages to live stream to Grafana - log_filters - filters for which messages to log to file - args - the arguments passed to ./link_telemetry.py - parser_endpoint - the endpoint to send the data to -Returns: None -""" -def sendToParser(message: str, live_filters: list, log_filters: list, args: list, parser_endpoint: str): - payload = { - "message" : message, - "live_filters" : live_filters, - "log_filters" : log_filters - } - - # submit to thread pool - future = executor.submit(parser_request, payload, parser_endpoint) - - # register done callback with future (lambda function to pass in arguments) - future.add_done_callback(lambda future: process_response(future, args)) - def main(): """ @@ -625,7 +601,7 @@ def main(): DEBUG_FILE_NAME = os.path.join(DEBUG_DIRECTORY, LOG_FILE) if args.log_upload: - upload_logs(args, live_filters) + upload_logs(args, live_filters, log_filters, PARSER_ENDPOINT) return 0 while True: @@ -683,4 +659,4 @@ def main(): if __name__ == "__main__": signal.signal(signal.SIGINT, sigint_handler) main() - \ No newline at end of file + From aaa259176de102c4c4158e5659743d2292553203 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 01:25:31 -0600 Subject: [PATCH 03/13] added newline --- link_telemetry.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/link_telemetry.py b/link_telemetry.py index 2be439d0..3ff39bc5 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -659,4 +659,5 @@ def main(): if __name__ == "__main__": signal.signal(signal.SIGINT, sigint_handler) main() - + + From 462f04f6e852bd934f7a0c350aabeca3ef6609fc Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 10:25:59 -0700 Subject: [PATCH 04/13] Fixed README --- README.md | 21 +++++++-------------- docker-compose.yaml | 4 ++-- 2 files changed, 9 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index ced6eeef..19e21b68 100644 --- a/README.md +++ b/README.md @@ -188,14 +188,11 @@ INFLUX_ADMIN_PASSWORD="" INFLUX_ORG="UBC Solar" -# used to store random data for debugging purposes -INFLUX_DEBUG_BUCKET="Debug" - -# used to store real data from the car -INFLUX_CAN_BUCKET="CAN" +# Needed to Initialize InfluxDB +INFLUX_INIT_BUCKET="Init_test" +INFLUX_DEBUG_BUCKET="CAN_test" # Parser secret key - SECRET_KEY="" # Access tokens @@ -219,11 +216,9 @@ INFLUX_ADMIN_PASSWORD="new_password" INFLUX_ORG="UBC Solar" -# used to store random data for debugging purposes -INFLUX_DEBUG_BUCKET="Debug" - -# used to store real data from the car -INFLUX_CAN_BUCKET="CAN" +# Needed to Initialize InfluxDB +INFLUX_INIT_BUCKET="Init_test" +INFLUX_DEBUG_BUCKET="CAN_test" # Secret key @@ -349,8 +344,6 @@ If all your tokens are correctly set up, the parser should return the following: - If your output looks like the above, then congratulations! You've finished setting up the telemetry cluster! :heavy_check_mark: -## Seting up PCAN drivers - ## Telemetry link setup The telemetry link must be set up on the host machine on which the radio receiver is connected. This links the radio module to the telemetry cluster and enables using radio as a data source. @@ -479,7 +472,7 @@ Here are some example invocations: ## Running the Offline Log Uploader -To run the offline log uploader the `logfiles` folder should have a generated log file to read and request the parser to write to InfluxDB in the `_test` buckets (like in debug mode). To do this use the -u (--log-upload) flag as follows: +To run the offline log uploader the `logfiles` folder should have a generated log file to read and request the parser to write to InfluxDB in the specified buckets (_test or _prod based on --debug or --prod options respectively). To do this use the -u (--log-upload) flag as follows: ```bash ./link_telemetry.py -u diff --git a/docker-compose.yaml b/docker-compose.yaml index 2de1085d..d3476055 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -27,7 +27,7 @@ services: - INFLUX_ADMIN_PASSWORD=${INFLUX_ADMIN_PASSWORD} - INFLUX_TOKEN=${INFLUX_TOKEN} - INFLUX_ORG=${INFLUX_ORG} - - INFLUX_BUCKET=${INFLUX_PROD_BUCKET} + - INFLUX_BUCKET=${INFLUX_INIT_BUCKET} parser: build: . @@ -54,7 +54,7 @@ services: - DOCKER_INFLUXDB_INIT_USERNAME=${INFLUX_ADMIN_USERNAME} - DOCKER_INFLUXDB_INIT_PASSWORD=${INFLUX_ADMIN_PASSWORD} - DOCKER_INFLUXDB_INIT_ORG=${INFLUX_ORG} - - DOCKER_INFLUXDB_INIT_BUCKET=${INFLUX_PROD_BUCKET} + - DOCKER_INFLUXDB_INIT_BUCKET=${INFLUX_INIT_BUCKET} - INFLUX_DEBUG_BUCKET=${INFLUX_DEBUG_BUCKET} restart: always From 784bf1b63286b17d2fa77aa1dde67e3f2ec241c1 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 10:30:28 -0700 Subject: [PATCH 05/13] Fixed 'PROD' bucket instance --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 19e21b68..595e6235 100644 --- a/README.md +++ b/README.md @@ -236,7 +236,7 @@ For the `GRAFANA_ADMIN_USERNAME` and `GRAFANA_ADMIN_PASSWORD` fields, you may ch The `SECRET_KEY` field must be generated. -> :warning: **WARNING: Make sure not to change the `INFLUX_ORG`, `INFLUX_DEBUG_BUCKET`, and `INFLUX_PROD_BUCKET` variables from their defaults since that might break the provisioned Grafana dashboards.** +> :warning: **WARNING: Make sure not to change the `INFLUX_ORG`, `INFLUX_INIT_BUCKET`, and `INFLU_DEBUG_BUCKET` variables from their defaults since that might break the provisioned Grafana dashboards.** #### Generating the secret key From fe60126cb06a6474be794ec4e2a93e2a079e7202 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 19:16:28 -0700 Subject: [PATCH 06/13] Fixed template --- templates/template_dotenv.env | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/templates/template_dotenv.env b/templates/template_dotenv.env index df02a703..9a13c813 100644 --- a/templates/template_dotenv.env +++ b/templates/template_dotenv.env @@ -12,11 +12,9 @@ INFLUX_ORG="UBC Solar" MESSAGE_TYPES="CAN,GPS,IMU" -# used to store random data for debugging purposes -INFLUX_DEBUG_BUCKET="Debug" - -# used to store real data from the car -INFLUX_CAN_BUCKET="CAN" +# Needed to Initialize InfluxDB +INFLUX_INIT_BUCKET="Init_test" +INFLUX_DEBUG_BUCKET="CAN_test" DS_INFLUXDB="o2uhkwje8832ha" From 84ae151d5f5ea645d5133283a80f4fd1064fbcce Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 20:07:34 -0700 Subject: [PATCH 07/13] only tables on optin + data padded --- link_telemetry.py | 63 ++++++++++++++++++++------------------ tools/MemoratorUploader.py | 3 +- 2 files changed, 35 insertions(+), 31 deletions(-) diff --git a/link_telemetry.py b/link_telemetry.py index 3ff39bc5..1983ef42 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -312,36 +312,37 @@ def process_response(future: concurrent.futures.Future, args): return if parse_response["result"] == "OK": - # Create a table - table = BeautifulTable() - - # Set the table title - table.set_style(BeautifulTable.STYLE_RST) - table.column_widths = [110] - table.width_exceed_policy = BeautifulTable.WEP_WRAP - - # Title - table.rows.append([f"{ANSI_GREEN}{parse_response['type']}{ANSI_ESCAPE}"]) - display_data = parse_response['message'] - - # Add columns as subtable - subtable = BeautifulTable() - subtable.set_style(BeautifulTable.STYLE_GRID) - - cols = display_data["COL"] - subtable.rows.append(cols.keys()) - for i in range(len(list(cols.values())[0])): - subtable.rows.append([val[i] for val in cols.values()]) - - table.rows.append([subtable]) - - # Add rows - rows = display_data["ROW"] - for row_head, row_data in rows.items(): - table.rows.append([f"{ANSI_BOLD}{row_head}{ANSI_ESCAPE}"]) - table.rows.append(row_data) - - print(table) + if args.table_on: + # Create a table + table = BeautifulTable() + + # Set the table title + table.set_style(BeautifulTable.STYLE_RST) + table.column_widths = [110] + table.width_exceed_policy = BeautifulTable.WEP_WRAP + + # Title + table.rows.append([f"{ANSI_GREEN}{parse_response['type']}{ANSI_ESCAPE}"]) + display_data = parse_response['message'] + + # Add columns as subtable + subtable = BeautifulTable() + subtable.set_style(BeautifulTable.STYLE_GRID) + + cols = display_data["COL"] + subtable.rows.append(cols.keys()) + for i in range(len(list(cols.values())[0])): + subtable.rows.append([val[i] for val in cols.values()]) + + table.rows.append([subtable]) + + # Add rows + rows = display_data["ROW"] + for row_head, row_data in rows.items(): + table.rows.append([f"{ANSI_BOLD}{row_head}{ANSI_ESCAPE}"]) + table.rows.append(row_data) + + print(table) if parse_response["logMessage"]: write_to_log_file(table, LOG_FILE_NAME, convert_to_hex=False) @@ -460,6 +461,8 @@ def main(): help=("Requests parser to write parsed data to the debug InfluxDB bucket.")) write_group.add_argument("--prod", action="store_true", help=("Requests parser to write parsed data to the production InfluxDB bucket.")) + write_group.add_argument("--table-on", action="store_true", + help=("Will display pretty tables. Normally off and parse fails only show")) write_group.add_argument("--no-write", action="store_true", help=(("Requests parser to skip writing to the InfluxDB bucket and streaming" "to Grafana. Cannot be used with --debug and --prod options."))) diff --git a/tools/MemoratorUploader.py b/tools/MemoratorUploader.py index 833bd0dc..595f4d76 100644 --- a/tools/MemoratorUploader.py +++ b/tools/MemoratorUploader.py @@ -46,7 +46,7 @@ def upload(log_file: kvmlib.LogFile, parserCallFunc: callable, live_filters: lis dlc_str = match.group(4) data = bytes.fromhex(match.group(5).replace(' ', '')) - data_str = data.decode('latin-1') + data_str = data.ljust(8, b'\0').decode('latin-1') can_str = timestamp_str + "#" + id_str + data_str + dlc_str @@ -122,3 +122,4 @@ def main(): if __name__ == "__main__": main() + From 3e544632c3c61d440bd76dc040394c9ff1f7af48 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 20:11:03 -0700 Subject: [PATCH 08/13] remov eextra orint --- tools/MemoratorUploader.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/MemoratorUploader.py b/tools/MemoratorUploader.py index 595f4d76..fa6afc69 100644 --- a/tools/MemoratorUploader.py +++ b/tools/MemoratorUploader.py @@ -25,7 +25,6 @@ def upload(log_file: kvmlib.LogFile, parserCallFunc: callable, live_filters: lis start_time = None for event in log_file: str_event = str(event) - print(str_event) if PATTERN_DATETIME.search(str_event): match = PATTERN_DATETIME.search(str_event) date_time_str = match.group(2) From 3c93a46b3a406a989d1e58d45d16b01d847a3f94 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 20:40:08 -0700 Subject: [PATCH 09/13] Upload everytihign and delete all after. --- tools/MemoratorUploader.py | 44 ++++++++++++++++---------------------- 1 file changed, 19 insertions(+), 25 deletions(-) diff --git a/tools/MemoratorUploader.py b/tools/MemoratorUploader.py index fa6afc69..091c627d 100644 --- a/tools/MemoratorUploader.py +++ b/tools/MemoratorUploader.py @@ -85,34 +85,28 @@ def memorator_upload_script(parserCallFunc: callable, live_filters: list, log_f print(f"{ANSI_BOLD}\tStart time : {start_time}{ANSI_RESET}") print(f"{ANSI_BOLD}\tEnd time : {end_time}{ANSI_RESET}") - # Ask the user what to upload - upload_input = input(f"{ANSI_GREEN}Enter what logs to upload ('all' or x-y inclusive ranges comma separated):{ANSI_RESET} ") - if upload_input.lower() == 'all': - # Upload all logs - for j in range(num_logs): - upload(log[j], parserCallFunc, live_filters, log_filters, args, endpoint) - else: - # Parse the user input - ranges = upload_input.split(',') - for r in ranges: - if '-' not in r: - idx = int(r) - if idx < 0 or idx >= num_logs: - print(f"{ANSI_RED}Index {idx} is out of range{ANSI_RESET}") - else: - upload(log[idx], parserCallFunc, live_filters, log_filters, args, endpoint) - else: - start, end = map(int, r.split('-')) - # Upload the specified logs - for j in range(start, end + 1): - if j < 0 or j >= num_logs: - print(f"{ANSI_RED}Index {j} is out of range{ANSI_RESET}") - else: - upload(log[j], parserCallFunc, live_filters, log_filters, args, endpoint) - # Close the KMF file kmf_file.close() + upload_input = input(f"{ANSI_GREEN}Do you want to upload all logs now (y/n)?: {ANSI_RESET} ") + if upload_input.lower() == 'y' or upload_input.lower() == '\n': + for i in range(NUM_LOGS): + kmf_file = kvmlib.openKmf(PATH.format(i)) + print(f"{ANSI_GREEN}Opening file: {PATH.format(i)}{ANSI_RESET}") # Green stdout + + # Access the log attribute of the KMF object + log = kmf_file.log + + # Iterate over all log files + for j, log_file in enumerate(log): + upload(log[j], parserCallFunc, live_filters, log_filters, args, endpoint) + + # Clear the log files + log.delete_all() + + # Close the KMF file + kmf_file.close() + # TESTING PURPOSES def main(): From 41d17305e19f1e13572f656b6d4ab88c31568de4 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Tue, 21 May 2024 21:11:07 -0700 Subject: [PATCH 10/13] added log buckets --- link_telemetry.py | 3 ++- parser/main.py | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/link_telemetry.py b/link_telemetry.py index 1983ef42..e8f94e8c 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -67,6 +67,7 @@ # API endpoints DEBUG_WRITE_ENDPOINT = f"{PARSER_URL}/api/v1/parse/write/debug" PROD_WRITE_ENDPOINT = f"{PARSER_URL}/api/v1/parse/write/production" +LOG_WRITE_ENDPOINT = f"{PARSER_URL}/api/v1/parse/write/log" NO_WRITE_ENDPOINT = f"{PARSER_URL}/api/v1/parse" HEALTH_ENDPOINT = f"{PARSER_URL}/api/v1/health" @@ -604,7 +605,7 @@ def main(): DEBUG_FILE_NAME = os.path.join(DEBUG_DIRECTORY, LOG_FILE) if args.log_upload: - upload_logs(args, live_filters, log_filters, PARSER_ENDPOINT) + upload_logs(args, live_filters, log_filters, LOG_WRITE_ENDPOINT) return 0 while True: diff --git a/parser/main.py b/parser/main.py index 1751b5dc..9a3158d0 100644 --- a/parser/main.py +++ b/parser/main.py @@ -289,6 +289,11 @@ def parse_and_write_request(): def parse_and_write_request_to_prod(): return parse_and_write_request_bucket("_prod") +@app.post(f"{API_PREFIX}/parse/write/log") +@auth.login_required +def parse_and_write_request(): + return parse_and_write_request_bucket("_log") + """ Parses incoming request, writes the parsed measurements to InfluxDB bucket (debug or production) that is specifc to the message type (CAN, GPS, IMU, for example). From de7c1eca373fc7a1a5c7dd4ddde1e884e1a98a79 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Wed, 22 May 2024 20:11:31 -0700 Subject: [PATCH 11/13] no show status 200 and fix main.py function name --- link_telemetry.py | 9 +++++---- parser/main.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/link_telemetry.py b/link_telemetry.py index e8f94e8c..327a1253 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -301,9 +301,9 @@ def process_response(future: concurrent.futures.Future, args): print(f"{ANSI_BOLD}Config file location:{ANSI_ESCAPE} \"{TOML_CONFIG_FILE.absolute()}\"\n") return - if response.status_code != 200: - print(f"{ANSI_BOLD}Response HTTP status code:{ANSI_ESCAPE} {ANSI_YELLOW}{response.status_code}{ANSI_ESCAPE}") - print(f"{ANSI_BOLD}Response HTTP status code:{ANSI_ESCAPE} {ANSI_GREEN}{response.status_code}{ANSI_ESCAPE}") + # if response.status_code != 200: + # print(f"{ANSI_BOLD}Response HTTP status code:{ANSI_ESCAPE} {ANSI_YELLOW}{response.status_code}{ANSI_ESCAPE}") + # print(f"{ANSI_BOLD}Response HTTP status code:{ANSI_ESCAPE} {ANSI_GREEN}{response.status_code}{ANSI_ESCAPE}") try: parse_response: dict = response.json() @@ -365,7 +365,6 @@ def process_response(future: concurrent.futures.Future, args): else: print(f"Unexpected response: {parse_response['result']}") - print() def read_lines_from_file(file_path): """ @@ -418,6 +417,8 @@ def process_message(message: str, buffer: str = "") -> list: # Remove 00 0a from the start if present if message.startswith("000a"): message = message[4:] + elif message.startswith("0a"): + message = message[2:] # Add buffer to the start of the message message = buffer + message diff --git a/parser/main.py b/parser/main.py index 9a3158d0..6fbcd3d0 100644 --- a/parser/main.py +++ b/parser/main.py @@ -291,7 +291,7 @@ def parse_and_write_request_to_prod(): @app.post(f"{API_PREFIX}/parse/write/log") @auth.login_required -def parse_and_write_request(): +def parse_and_write_request_to_log(): return parse_and_write_request_bucket("_log") """ From 1296e68bf2074d13c05b4f5f87587de9a76fe997 Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Wed, 22 May 2024 20:17:29 -0700 Subject: [PATCH 12/13] table on and log uploading conflict fixed --- link_telemetry.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/link_telemetry.py b/link_telemetry.py index 327a1253..63dc3944 100755 --- a/link_telemetry.py +++ b/link_telemetry.py @@ -313,7 +313,8 @@ def process_response(future: concurrent.futures.Future, args): return if parse_response["result"] == "OK": - if args.table_on: + table = None + if args.log is not None or args.table_on: # Create a table table = BeautifulTable() @@ -343,6 +344,7 @@ def process_response(future: concurrent.futures.Future, args): table.rows.append([f"{ANSI_BOLD}{row_head}{ANSI_ESCAPE}"]) table.rows.append(row_data) + if args.table_on: print(table) if parse_response["logMessage"]: From 05440ff1a9664d254160fa660a51830f0015272a Mon Sep 17 00:00:00 2001 From: Aarjavjain101 Date: Wed, 22 May 2024 20:25:29 -0700 Subject: [PATCH 13/13] added canlib to requirements.txt --- requirements.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 97f187c9..7ec8260d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,11 @@ +annotated-types==0.7.0 argparse-addons==0.8.0 attrs==22.1.0 autopep8==2.0.1 beautifultable==1.1.0 bitstruct==8.15.1 blinker==1.6.2 +canlib==1.25.393 cantools==37.2.0 certifi==2021.10.8 charset-normalizer==2.0.12 @@ -27,6 +29,8 @@ pluggy==1.0.0 prettytable==3.8.0 py==1.11.0 pycodestyle==2.10.0 +pydantic==2.7.1 +pydantic_core==2.18.2 pyparsing==3.0.9 pyserial==3.5 pytest==7.1.3 @@ -44,7 +48,7 @@ six==1.16.0 textparser==0.24.0 toml==0.10.2 tomli==2.0.1 -typing_extensions==4.4.0 +typing_extensions==4.11.0 urllib3==1.26.9 wcwidth==0.2.6 websockets==11.0.3