Skip to content

Commit

Permalink
Add errored runs to abr tracking sheet (#14845)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

<!--
Use this section to describe your pull-request at a high level. If the
PR addresses any open issues, please tag the issues here.
-->

Improved ABR Error Data Collection

# Test Plan

Tested code on multiple robots.

# Changelog

Added function to download robot logs
Added lines of code to move error documents (run log, calibration log,
robot logs) into folder named after ticket.
Adds robot run to ABR sheet and links JIRA ticket
Added extra lines to abr_scale to read scale more often
Edited ABR calibration script to ensure duplicate calibrations are not
added.

# Review requests

Is 5000 lines of recording enough to capture robot error if script is
run immediately?
Is there any manipulation to robot logs that can be down to make error
analysis more efficient.

# Risk assessment

<!--
Carefully go over your pull request and look at the other parts of the
codebase it may affect. Look for the possibility, even if you think it's
small, that your change may affect some other part of the system - for
instance, changing return tip behavior in protocol may also change the
behavior of labware calibration.

Identify the other parts of the system your codebase may affect, so that
in addition to your own review and testing, other people who may not
have the system internalized as much as you can focus their attention
and testing there.
-->
  • Loading branch information
rclarke0 committed Apr 9, 2024
1 parent 2cff9d2 commit 0c799fe
Show file tree
Hide file tree
Showing 9 changed files with 179 additions and 36 deletions.
1 change: 0 additions & 1 deletion abr-testing/abr_testing/automation/google_drive_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ def __init__(self, credentials: Any, folder_name: str, email: str) -> None:
self.drive_service = build("drive", "v3", credentials=self.credentials)
self.parent_folder = folder_name
self.email = email
self.folder = self.open_folder()

def list_folder(self, delete: Any = False) -> Set[str]:
"""List folders and files in Google Drive."""
Expand Down
7 changes: 7 additions & 0 deletions abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import gspread # type: ignore[import]
import socket
import httplib2
from datetime import datetime
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
from typing import Dict, List, Any, Set, Tuple

Expand Down Expand Up @@ -57,6 +58,12 @@ def write_to_row(self, data: List) -> None:
"""Write data into a row in a List[] format."""
try:
self.row_index += 1
data = [
item.strftime("%Y/%m/%d %H:%M:%S")
if isinstance(item, datetime)
else item
for item in data
]
self.worksheet.insert_row(data, index=self.row_index)
except socket.gaierror:
pass
Expand Down
11 changes: 7 additions & 4 deletions abr-testing/abr_testing/automation/jira_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import json
import webbrowser
import argparse
from typing import List, Tuple
from typing import List


class JiraTicket:
Expand Down Expand Up @@ -41,11 +41,12 @@ def issues_on_board(self, board_id: str) -> List[str]:
issue_ids.append(issue_id)
return issue_ids

def open_issue(self, issue_key: str) -> None:
def open_issue(self, issue_key: str) -> str:
"""Open issue on web browser."""
url = f"{self.url}/browse/{issue_key}"
print(f"Opening at {url}.")
webbrowser.open(url)
return url

def create_ticket(
self,
Expand All @@ -58,7 +59,7 @@ def create_ticket(
components: list,
affects_versions: str,
robot: str,
) -> Tuple[str, str]:
) -> str:
"""Create ticket."""
data = {
"fields": {
Expand Down Expand Up @@ -94,13 +95,15 @@ def create_ticket(
response_str = str(response.content)
issue_url = response.json().get("self")
issue_key = response.json().get("key")
print(f"issue key {issue_key}")
print(f"issue url{issue_url}")
if issue_key is None:
print("Error: Could not create issue. No key returned.")
except requests.exceptions.HTTPError:
print(f"HTTP error occurred. Response content: {response_str}")
except json.JSONDecodeError:
print(f"JSON decoding error occurred. Response content: {response_str}")
return issue_url, issue_key
return issue_key

def post_attachment_to_ticket(self, issue_id: str, attachment_path: str) -> None:
"""Adds attachments to ticket."""
Expand Down
32 changes: 21 additions & 11 deletions abr-testing/abr_testing/data_collection/abr_calibration_logs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Get Calibration logs from robots."""
from typing import Dict, Any, List
from typing import Dict, Any, List, Union
import argparse
import os
import json
Expand All @@ -16,15 +16,18 @@ def check_for_duplicates(
col_2: int,
row: List[str],
headers: List[str],
) -> List[str]:
) -> Union[List[str], None]:
"""Check google sheet for duplicates."""
serials = google_sheet.get_column(col_1)
modify_dates = google_sheet.get_column(col_2)
for serial, modify_date in zip(serials, modify_dates):
if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date:
print(f"Skipped row{row}. Already on Google Sheet.")
continue
read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers)
# check for complete calibration.
if len(row[-1]) > 0:
for serial, modify_date in zip(serials, modify_dates):
if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date:
print(f"Skipped row for instrument {serial}. Already on Google Sheet.")
return None
read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers)
print(f"Writing calibration for: {serial}")
return row


Expand Down Expand Up @@ -64,6 +67,7 @@ def upload_calibration_offsets(
instrument_row,
instrument_headers,
)

# MODULE SHEET
if len(calibration.get("Modules", "")) > 0:
module_headers = (
Expand Down Expand Up @@ -198,13 +202,19 @@ def upload_calibration_offsets(
except FileNotFoundError:
print(f"Add .json file with robot IPs to: {storage_directory}.")
sys.exit()

if ip_or_all == "ALL":
ip_address_list = ip_file["ip_address_list"]
for ip in ip_address_list:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
upload_calibration_offsets(calibration, storage_directory)
print(ip)
try:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
upload_calibration_offsets(calibration, storage_directory)
except Exception:
print(f"ERROR: Failed to read IP address: {ip}")
continue
else:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip_or_all, storage_directory
Expand Down
26 changes: 19 additions & 7 deletions abr-testing/abr_testing/data_collection/abr_google_drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import gspread # type: ignore[import]
from datetime import datetime, timedelta
from abr_testing.data_collection import read_robot_logs
from typing import Set, Dict, Any, Tuple, List
from typing import Set, Dict, Any, Tuple, List, Union
from abr_testing.automation import google_drive_tool, google_sheets_tool


Expand All @@ -30,7 +30,9 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:


def create_data_dictionary(
runs_to_save: Set[str], storage_directory: str
runs_to_save: Union[Set[str], str],
storage_directory: str,
issue_url: str,
) -> Tuple[Dict[Any, Dict[str, Any]], List]:
"""Pull data from run files and format into a dictionary."""
runs_and_robots = {}
Expand All @@ -41,7 +43,7 @@ def create_data_dictionary(
file_results = json.load(file)
else:
continue
run_id = file_results.get("run_id")
run_id = file_results.get("run_id", "NaN")
if run_id in runs_to_save:
robot = file_results.get("robot_name")
protocol_name = file_results["protocol"]["metadata"].get("protocolName", "")
Expand All @@ -56,6 +58,7 @@ def create_data_dictionary(
error_instrument,
error_level,
) = read_robot_logs.get_error_info(file_results)

all_modules = get_modules(file_results)

start_time_str, complete_time_str, start_date, run_time_min = (
Expand Down Expand Up @@ -103,13 +106,14 @@ def create_data_dictionary(
tc_dict = read_robot_logs.thermocycler_commands(file_results)
hs_dict = read_robot_logs.hs_commands(file_results)
tm_dict = read_robot_logs.temperature_module_commands(file_results)
notes = {"Note1": "", "Note2": ""}
notes = {"Note1": "", "Jira Link": issue_url}
row_2 = {**row, **all_modules, **notes, **hs_dict, **tm_dict, **tc_dict}
headers = list(row_2.keys())
runs_and_robots[run_id] = row_2
else:
os.remove(file_path)
print(f"Run ID: {run_id} has a run time of 0 minutes. Run removed.")
continue
# os.remove(file_path)
# print(f"Run ID: {run_id} has a run time of 0 minutes. Run removed.")
return runs_and_robots, headers


Expand Down Expand Up @@ -168,6 +172,14 @@ def create_data_dictionary(
except gspread.exceptions.APIError:
print("ERROR: Check google sheet name. Check credentials file.")
sys.exit()
try:
google_sheet_lpc = google_sheets_tool.google_sheet(
credentials_path, "ABR-LPC", 0
)
print("Connected to google sheet ABR-LPC")
except gspread.exceptions.APIError:
print("ERROR: Check google sheet name. Check credentials file.")
sys.exit()
run_ids_on_gs = google_sheet.get_column(2)
run_ids_on_gs = set(run_ids_on_gs)

Expand All @@ -181,7 +193,7 @@ def create_data_dictionary(
)
# Add missing runs to google sheet
runs_and_robots, headers = create_data_dictionary(
missing_runs_from_gs, storage_directory
missing_runs_from_gs, storage_directory, ""
)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers
Expand Down
1 change: 1 addition & 0 deletions abr-testing/abr_testing/data_collection/abr_lpc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
"""Get Unique LPC Values from Run logs."""
67 changes: 59 additions & 8 deletions abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,13 @@
from abr_testing.data_collection import read_robot_logs, abr_google_drive, get_run_logs
import requests
import argparse
from abr_testing.automation import jira_tool
from abr_testing.automation import jira_tool, google_sheets_tool, google_drive_tool
import shutil
import os
import subprocess
import json
import sys
import gspread # type: ignore[import]


def get_error_runs_from_robot(ip: str) -> List[str]:
Expand Down Expand Up @@ -44,7 +50,6 @@ def get_error_info_from_robot(
# JIRA Ticket Fields
failure_level = "Level " + str(error_level) + " Failure"
components = [failure_level, "Flex-RABR"]
components = ["Flex-RABR"]
affects_version = results["API_Version"]
parent = results.get("robot_name", "")
print(parent)
Expand Down Expand Up @@ -140,18 +145,19 @@ def get_error_info_from_robot(
affects_version,
components,
whole_description_str,
saved_file_path,
run_log_file_path,
) = get_error_info_from_robot(ip, one_run, storage_directory)
# get calibration data
saved_file_path_calibration, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
file_paths = read_robot_logs.get_logs(storage_directory, ip)
print(f"Making ticket for run: {one_run} on robot {robot}.")
# TODO: make argument or see if I can get rid of with using board_id.
project_key = "RABR"
parent_key = project_key + "-" + robot[-1]
issues_ids = ticket.issues_on_board(board_id)
issue_url, issue_key = ticket.create_ticket(
# CREATE TICKET
issue_key = ticket.create_ticket(
summary,
whole_description_str,
project_key,
Expand All @@ -162,6 +168,51 @@ def get_error_info_from_robot(
affects_version,
parent_key,
)
ticket.open_issue(issue_key)
ticket.post_attachment_to_ticket(issue_key, saved_file_path)
ticket.post_attachment_to_ticket(issue_key, saved_file_path_calibration)
# OPEN TICKET
issue_url = ticket.open_issue(issue_key)
# MOVE FILES TO ERROR FOLDER.
error_files = [saved_file_path_calibration, run_log_file_path] + file_paths
error_folder_path = os.path.join(storage_directory, str("RABR-238"))
os.makedirs(error_folder_path, exist_ok=True)
for source_file in error_files:
destination_file = os.path.join(
error_folder_path, os.path.basename(source_file)
)
shutil.move(source_file, destination_file)
# OPEN FOLDER DIRECTORY
subprocess.Popen(["explorer", error_folder_path])
# CONNECT TO GOOGLE DRIVE
credentials_path = os.path.join(storage_directory, "credentials.json")
google_sheet_name = "ABR-run-data"
try:
google_drive = google_drive_tool.google_drive(
credentials_path,
"1Cvej0eadFOTZr9ILRXJ0Wg65ymOtxL4m",
"rhyann.clarke@opentrons.ocm",
)
print("Connected to google drive.")
except json.decoder.JSONDecodeError:
print(
"Credential file is damaged. Get from https://console.cloud.google.com/apis/credentials"
)
sys.exit()
# CONNECT TO GOOGLE SHEET
try:
google_sheet = google_sheets_tool.google_sheet(
credentials_path, google_sheet_name, 0
)
print(f"Connected to google sheet: {google_sheet_name}")
except gspread.exceptions.APIError:
print("ERROR: Check google sheet name. Check credentials file.")
sys.exit()
# WRITE ERRORED RUN TO GOOGLE SHEET
error_run_log = os.path.join(error_folder_path, os.path.basename(run_log_file_path))
google_drive.upload_file(error_run_log)
run_id = os.path.basename(error_run_log).split("_")[1].split(".")[0]
runs_and_robots, headers = abr_google_drive.create_data_dictionary(
run_id, error_folder_path, issue_url
)
read_robot_logs.write_to_local_and_google_sheet(
runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers
)
print("Wrote run to ABR-run-data")
Loading

0 comments on commit 0c799fe

Please sign in to comment.