Skip to content

Commit

Permalink
Merge branch 'edge' into abr-lengthen-protocols
Browse files Browse the repository at this point in the history
  • Loading branch information
rclarke0 authored Dec 18, 2024
2 parents 0c79bde + 9f0bc7d commit 297ccf0
Show file tree
Hide file tree
Showing 173 changed files with 7,429 additions and 1,282 deletions.
1 change: 1 addition & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ module.exports = {
'no-case-declarations': 'warn',
'prefer-regex-literals': 'warn',
'react/prop-types': 'warn',
'react/jsx-curly-brace-presence': 'warn',

// Enforce notification hooks
'no-restricted-imports': [
Expand Down
8 changes: 5 additions & 3 deletions .github/actions/odd-resource-analysis/action/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@ async function run() {
try {
const mixpanelUser = core.getInput('mixpanel-user', { required: true })
const mixpanelSecret = core.getInput('mixpanel-secret', { required: true })
const mixpanelProjectId = core.getInput('mixpanel-project-id', {
required: true,
})
const mixpanelProjectId = parseInt(
core.getInput('mixpanel-project-id', {
required: true,
})
)
const previousVersionCount = parseInt(
core.getInput('previous-version-count') || '2'
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ async function getMixpanelResourceMonitorDataFor({
where,
}) {
const params = new URLSearchParams({
project_id: parseInt(projectId),
project_id: projectId,
from_date: fromDate,
to_date: toDate,
event: '["resourceMonitorReport"]',
Expand Down
56 changes: 54 additions & 2 deletions .github/workflows/api-test-lint-deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,6 @@ jobs:
strategy:
matrix:
os: ['windows-2022', 'ubuntu-22.04', 'macos-latest']
# TODO(mc, 2022-02-24): expand this matrix to 3.8 and 3.9,
# preferably in a nightly cronjob on edge or something
python: ['3.10']
with-ot-hardware: ['true', 'false']
exclude:
Expand Down Expand Up @@ -128,6 +126,60 @@ jobs:
files: ./api/coverage.xml
flags: api

test-package:
name: 'installed package tests on ${{ matrix.os }}'
timeout-minutes: 5
strategy:
matrix:
os: ['ubuntu-22.04', 'macos-latest', 'windows-2022']
runs-on: '${{ matrix.os }}'
steps:
- uses: 'actions/checkout@v4'
- name: 'Fix actions/checkout odd handling of tags'
if: startsWith(github.ref, 'refs/tags')
run: |
git fetch -f origin ${{ github.ref }}:${{ github.ref }}
git checkout ${{ github.ref }}
- uses: 'actions/setup-python@v4'
with:
python-version: '3.10'
- name: Set up package-testing
id: setup
if: ${{ matrix.os != 'windows-2022' }}
working-directory: package-testing
shell: bash
run: make setup
- name: Set up package-testing (Windows)
id: setup-windows
if: ${{ matrix.os == 'windows-2022' }}
working-directory: package-testing
shell: pwsh
run: make setup-windows
- name: Run the tests
if: ${{ matrix.os != 'windows-2022' }}
shell: bash
id: test
working-directory: package-testing
run: make test
- name: Run the tests (Windows)
shell: pwsh
id: test-windows
working-directory: package-testing
run: make test-windows
- name: Save the test results
if: ${{ always() && steps.setup.outcome == 'success' || steps.setup-windows.outcome == 'success' }}
id: results
uses: actions/upload-artifact@v4
with:
name: package-test-results-${{ matrix.os }}
path: package-testing/results
- name: Set job summary
if: ${{ always() }}
run: |
echo "## Opentrons Package Test Results ${{matrix.os}}" >> $GITHUB_STEP_SUMMARY
echo "### Test Outcome: Unixy ${{ steps.test.outcome }} Windows: ${{ steps.test-windows.outcome }}" >> $GITHUB_STEP_SUMMARY
echo "[Download the test results artifact](${{steps.results.outputs.artifact-url}})" >> $GITHUB_STEP_SUMMARY
deploy:
name: 'deploy opentrons package'
needs: [test]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/odd-memory-usage-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,5 @@ jobs:
with:
mixpanel-user: ${{ secrets.MIXPANEL_INGEST_USER }}
mixpanel-secret: ${{ secrets.MIXPANEL_INGEST_SECRET }}
mixpanel-project-id: ${{ secrets.OT_APP_MIXPANEL_ID }}
mixpanel-project-id: ${{ secrets.OT_MIXPANEL_PROJECT_ID }}
previous-version-count: '2'
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -163,3 +163,5 @@ opentrons-robot-app.tar.gz
mock_dir
.npm-cache/
.eslintcache

package-testing/results
73 changes: 63 additions & 10 deletions abr-testing/abr_testing/data_collection/abr_robot_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,35 @@
import sys
import json
import re
from pathlib import Path
import pandas as pd
from statistics import mean, StatisticsError
from abr_testing.tools import plate_reader


def retrieve_protocol_file(
protocol_id: str,
robot_ip: str,
storage: str,
) -> Path | str:
"""Find and copy protocol file on robot with error."""
protocol_dir = f"/var/lib/opentrons-robot-server/7.1/protocols/{protocol_id}"

print(f"FILE TO FIND: {protocol_dir}/{protocol_id}")
# Copy protocol file found in robot oto host computer
save_dir = Path(f"{storage}/protocol_errors")
command = ["scp", "-r", f"root@{robot_ip}:{protocol_dir}", save_dir]
try:
# If file found and copied return path to file
subprocess.run(command, check=True) # type: ignore
print("File transfer successful!")
return save_dir
except subprocess.CalledProcessError as e:
print(f"Error during file transfer: {e}")
# Return empty string if file can't be copied
return ""


def compare_current_trh_to_average(
robot: str,
start_time: Any,
Expand All @@ -38,9 +62,13 @@ def compare_current_trh_to_average(
# Find average conditions of errored time period
df_all_trh = pd.DataFrame(all_trh_data)
# Convert timestamps to datetime objects
df_all_trh["Timestamp"] = pd.to_datetime(
df_all_trh["Timestamp"], format="mixed", utc=True
).dt.tz_localize(None)
print(f'TIMESTAMP: {df_all_trh["Timestamp"]}')
try:
df_all_trh["Timestamp"] = pd.to_datetime(
df_all_trh["Timestamp"], format="mixed", utc=True
).dt.tz_localize(None)
except Exception:
print(f'The following timestamp is invalid: {df_all_trh["Timestamp"]}')
# Ensure start_time is timezone-naive
start_time = start_time.replace(tzinfo=None)
relevant_temp_rhs = df_all_trh[
Expand Down Expand Up @@ -245,20 +273,24 @@ def get_user_id(user_file_path: str, assignee_name: str) -> str:
return assignee_id


def get_error_runs_from_robot(ip: str) -> List[str]:
def get_error_runs_from_robot(ip: str) -> Tuple[List[str], List[str]]:
"""Get runs that have errors from robot."""
error_run_ids = []
protocol_ids = []
response = requests.get(
f"http://{ip}:31950/runs", headers={"opentrons-version": "3"}
)
run_data = response.json()
run_list = run_data.get("data", [])
for run in run_list:
run_id = run["id"]
protocol_id = run["protocolId"]
num_of_errors = len(run["errors"])
if not run["current"] and num_of_errors > 0:
error_run_ids.append(run_id)
return error_run_ids
# Protocol ID will identify the correct folder on the robot of the protocol file
protocol_ids.append(protocol_id)
return (error_run_ids, protocol_ids)


def get_robot_state(
Expand Down Expand Up @@ -335,7 +367,7 @@ def get_robot_state(


def get_run_error_info_from_robot(
ip: str, one_run: str, storage_directory: str
ip: str, one_run: str, storage_directory: str, protocol_found: bool
) -> Tuple[str, str, str, List[str], List[str], str, str]:
"""Get error information from robot to fill out ticket."""
description = dict()
Expand Down Expand Up @@ -369,6 +401,9 @@ def get_run_error_info_from_robot(
description["protocol_name"] = results["protocol"]["metadata"].get(
"protocolName", ""
)

# If Protocol was successfully retrieved from the robot
description["protocol_found_on_robot"] = protocol_found
# Get start and end time of run
start_time = datetime.strptime(
results.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
Expand Down Expand Up @@ -511,12 +546,21 @@ def get_run_error_info_from_robot(
users_file_path = ticket.get_jira_users(storage_directory)
assignee_id = get_user_id(users_file_path, assignee)
run_log_file_path = ""
protocol_found = False
try:
error_runs = get_error_runs_from_robot(ip)
error_runs, protocol_ids = get_error_runs_from_robot(ip)
except requests.exceptions.InvalidURL:
print("Invalid IP address.")
sys.exit()
if len(run_or_other) < 1:
# Retrieve the most recently run protocol file
protocol_files_path = retrieve_protocol_file(
protocol_ids[-1], ip, storage_directory
)
# Set protocol_found to true if python protocol was successfully copied over
if protocol_files_path:
protocol_found = True

one_run = error_runs[-1] # Most recent run with error.
(
summary,
Expand All @@ -526,7 +570,9 @@ def get_run_error_info_from_robot(
labels,
whole_description_str,
run_log_file_path,
) = get_run_error_info_from_robot(ip, one_run, storage_directory)
) = get_run_error_info_from_robot(
ip, one_run, storage_directory, protocol_found
)
else:
(
summary,
Expand Down Expand Up @@ -566,8 +612,15 @@ def get_run_error_info_from_robot(
# OPEN TICKET
issue_url = ticket.open_issue(issue_key)
# MOVE FILES TO ERROR FOLDER.
print(protocol_files_path)
error_files = [saved_file_path_calibration, run_log_file_path] + file_paths
error_folder_path = os.path.join(storage_directory, issue_key)

# Move protocol file(s) to error folder
if protocol_files_path:
for file in os.listdir(protocol_files_path):
error_files.append(os.path.join(protocol_files_path, file))

error_folder_path = os.path.join(storage_directory, "issue_key")
os.makedirs(error_folder_path, exist_ok=True)
for source_file in error_files:
try:
Expand All @@ -577,7 +630,7 @@ def get_run_error_info_from_robot(
shutil.move(source_file, destination_file)
except shutil.Error:
continue
# POST FILES TO TICKET
# POST ALL FILES TO TICKET
list_of_files = os.listdir(error_folder_path)
for file in list_of_files:
file_to_attach = os.path.join(error_folder_path, file)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,6 @@ def wash(vol: float, source: List[Well]) -> None:
src = source[whichwash]
protocol.comment(f"new wash source {whichwash}")
wash_volume_tracker = 0.0

m1000.drop_tip() if TIP_TRASH else m1000.return_tip()
helpers.set_hs_speed(protocol, h_s, heater_shaker_speed * 0.9, wash_time, True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,7 @@ def protocol_function() -> None:
if not dry_run:
h_s.set_and_wait_for_temperature(55)
protocol.delay(

minutes=10 if not dry_run else 0.25,
msg="Please allow another 10 minutes of 55C incubation to complete lysis.",
)
Expand Down Expand Up @@ -423,6 +424,7 @@ def clean() -> None:

for i in range(number_of_runs):
protocol_function()

pip.reset_tipracks()
if i < number_of_runs - 1:
setup()
Expand All @@ -431,3 +433,4 @@ def clean() -> None:
if deactivate_modules:
helpers.deactivate_modules(protocol)
helpers.find_liquid_height_of_all_wells(protocol, pip, [liquid_waste["A1"]])

Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def run(protocol: protocol_api.ProtocolContext) -> None:
wash2 = 9800 / 8
wash2_list = [wash2] * 12
final_elution = 7500 / 8

# Fill up Plates
# Res1
p1000.transfer(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,13 @@ def run(protocol: ProtocolContext) -> None:
# Load Parameters
lids_in_stack: int = protocol.params.lids_in_a_stack # type: ignore[attr-defined]
num_offset = protocol.params.num_offset # type: ignore[attr-defined]

offset = protocol.params.offset # type: ignore[attr-defined]
negative = protocol.params.negative # type: ignore[attr-defined]
thermocycler_bool = protocol.params.thermocycler_bool # type: ignore[attr-defined]
if negative:
num_offset = num_offset * -1

# Thermocycler
if thermocycler_bool:
thermocycler: ThermocyclerContext = protocol.load_module(
Expand Down
2 changes: 2 additions & 0 deletions abr-testing/abr_testing/tools/abr_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ def clean_sheet(sheet_name: str, credentials: str) -> Any:
print("deleted rows")
except Exception:
print("could not delete rows")
return

clean_sheet(sheet_name, credentials)


Expand Down
Loading

0 comments on commit 297ccf0

Please sign in to comment.