diff --git a/.github/workflows/md5srv-tests.yml b/.github/workflows/md5srv-tests.yml index 51cbb248..5da2365b 100644 --- a/.github/workflows/md5srv-tests.yml +++ b/.github/workflows/md5srv-tests.yml @@ -15,28 +15,26 @@ jobs: MD5SRV_TIMEOUT: 5 MD5SRV_DIR: ./test/hitl/scripts BATS_VERSION: 1.10.0 - BATS_LIB_PATH: /usr/lib - # /usr/local/lib on OSX steps: - name: Setup Bats and bats libs - uses: brokenpip3/setup-bats-libs@1.0.0 + id: setup-bats + uses: bats-core/bats-action@3.0.0 with: bats-install: true file-install: false detik-install: false - - name: Setup BATS_LIB_PATH - run: | - if [ -e /usr/local/lib/bats-support ]; then - echo "BATS_LIB_PATH=/usr/local/lib" >> $GITHUB_ENV - fi - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Run Tests + env: + BATS_LIB_PATH: ${{ steps.setup-bats.outputs.lib-path }} run: | cd ${{env.MD5SRV_DIR}} - $HOME/.local/bin/bats -p --print-output-on-failure . - - name: Rerun Tests + bats -p --print-output-on-failure . + - name: Re-run Tests if: failure() + env: + BATS_LIB_PATH: ${{ steps.setup-bats.outputs.lib-path }} run: | cd ${{env.MD5SRV_DIR}} $HOME/.local/bin/bats -p --print-output-on-failure -x . diff --git a/.github/workflows/notecard-binary-tests.yml b/.github/workflows/notecard-binary-tests.yml index 480dd2c9..de45dd1e 100644 --- a/.github/workflows/notecard-binary-tests.yml +++ b/.github/workflows/notecard-binary-tests.yml @@ -1,4 +1,4 @@ -name: Note Binary CI +name: note-c card.binary HIL tests on: pull_request: @@ -6,7 +6,7 @@ on: workflow_dispatch: schedule: # * is a special character in YAML so you have to quote this string - - cron: '45 4 * * 1' # 4.45am every Monday + - cron: '45 4 * * 1' # 4.45am every Monday permissions: checks: write @@ -16,14 +16,17 @@ jobs: uses: ./.github/workflows/md5srv-tests.yml notecard-binary-test: - # needs: md5srv-test - runs-on: [self-hosted, swan, notecard, stlink, notecard-serial, md5srv, notehub-client] + needs: md5srv-test + runs-on: ubuntu-latest defaults: run: shell: bash env: + MD5SRV_PORT: 9178 NOTEHUB: "notehub.io" NOTEHUB_API: "api.notefile.net" + NOTEHUB_PROJECT_UID: "app:458d7b93-8e19-45f8-b030-fb96d03eb1cc" + NOTEHUB_PRODUCT_UID: "com.blues.hitl" NOTEHUB_ROUTE_TIMEOUT: 180 PIO_PROJECT_DIR: ./test/hitl/card.binary NOTEHUB_PROXY_ROUTE_ALIAS: card.binary.${{github.run_id}} @@ -35,32 +38,69 @@ jobs: DELETE_NOTEHUB_ROUTES: true # CREATE_NOTEHUB_ROUTES set to false to use the already created routes on notehub CREATE_NOTEHUB_ROUTES: true - # START_MD5SRV set to false to skip starting the MD5 server. There should be one - # already running locally with MD5SRV_PORT/ADDRESS/TOKEN set correspondingly. - START_MD5SRV: true - # START_TUNNELMOLE: set to false to skip starting tunnel mole. - START_TUNNELMOLE: true - # When neither tunneling solution is used (because they're already instantiated outside of the workflow) - # be sure to set MD5SRV_URL in the environment steps: + - name: Connect to Tailscale + uses: tailscale/github-action@v2 + with: + oauth-client-id: ${{ secrets.TS_OAUTH_CLIENT_ID }} + oauth-secret: ${{ secrets.TS_OAUTH_CLIENT_SECRET }} + tags: tag:ci + + # Needed for asyncio.TaskGroup, which the card_client code uses. + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: '3.11' + - name: Checkout note-c repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 + + - name: Checkout hil_lab repo + uses: actions/checkout@v4 + with: + repository: blues/hil_lab + ref: master + path: hil_lab + # Since hil_lab is a private repo, we need to authenticate. This uses + # the "deploy keys" approach described here: + # https://stackoverflow.com/a/70283191 + ssh-key: ${{ secrets.HIL_LAB_CLONE_PRIV_KEY }} - name: Generate MD5 Server Token run: | [ -n "$MD5SRV_TOKEN" ] || echo "MD5SRV_TOKEN=`uuidgen`" >> $GITHUB_ENV - # propagate the environment variable so that it's available in the `env` context - echo "MD5SRV_PORT=$MD5SRV_PORT" >> $GITHUB_ENV - - name: Check env vars + # gdb-multiarch: We used gdb to remotely flash the test firmware onto the + # Swan attached to the Notestation. Apparently "regular" gdb (i.e. + # installed via apt install gdb) can't cope with the fact that the target + # is ARM. + # ngrok: Used to get a public IP for the MD5 server. + # jq: Used for JSON parsing. + - name: Install apt dependencies run: | - . scripts/check_runner_config.sh - echo NOTEHUB_PROXY_ROUTE_ALIAS=$NOTEHUB_PROXY_ROUTE_ALIAS + curl -sSL https://ngrok-agent.s3.amazonaws.com/ngrok.asc | \ + sudo tee /etc/apt/trusted.gpg.d/ngrok.asc >/dev/null + echo "deb https://ngrok-agent.s3.amazonaws.com buster main" | \ + sudo tee /etc/apt/sources.list.d/ngrok.list + + sudo apt update + sudo apt install gdb-multiarch ngrok jq + + # We need socat 1.7.4.4 because prior to that there are some issues with + # setting the baud rate properly. socat is used by the card_client code + # for various tunnels (e.g. Swan USB port, Swan OpenOCD server, etc.). + - name: Install socat 1.7.4.4 + run: | + wget http://www.dest-unreach.org/socat/download/socat-1.7.4.4.tar.gz + tar xvf socat-1.7.4.4.tar.gz + cd socat-1.7.4.4 + ./configure + make -j + sudo make install - name: Install PlatformIO dependencies run: | - python3 -m venv venv # python venv is also used by the md5server, so this comes first. - source venv/bin/activate + python -m pip install --upgrade pip pip install platformio cd $PIO_PROJECT_DIR pio pkg install -l "Blues Wireless Notecard" -e debug @@ -76,15 +116,14 @@ jobs: mkdir md5srv-files ./scripts/run_md5srv.sh - - name: Start tunnelmole - if: env.START_TUNNELMOLE!='false' + - name: Start ngrok run: | - rm -f tmole.log - ./scripts/run_tmole.sh + ngrok config add-authtoken ${{ secrets.NGROK_AUTH_TOKEN }} + ./scripts/run_ngrok.sh - name: Check MD5 server is available run: | - # the request will return a 401 from md5srv, but that's expected without the access token + # The request will return a 401 from md5srv, but that's expected without the access token # Curl still returns success because it could contact the server code=`curl -s -o /dev/null -w "%{http_code}" $MD5SRV_URL` if [ "$code" -ge "500" ]; then @@ -98,11 +137,11 @@ jobs: if: env.CREATE_NOTEHUB_ROUTES!='false' run: | curl -f -X POST \ - -L 'https://${{env.NOTEHUB}}/oauth2/token' \ + -L 'https://${{ env.NOTEHUB }}/oauth2/token' \ -H 'content-type: application/x-www-form-urlencoded' \ -d grant_type=client_credentials \ - -d client_id=$NOTEHUB_CLIENT_ID \ - -d client_secret=$NOTEHUB_CLIENT_SECRET | \ + -d client_id=${{ secrets.NOTEHUB_HIL_CLIENT_ID }} \ + -d client_secret=${{ secrets.NOTEHUB_HIL_CLIENT_SECRET }} | \ { token=$(jq -r .access_token); echo "NOTEHUB_ACCESS_TOKEN=$token" >> $GITHUB_ENV; } - name: Create Notehub HTTP route @@ -125,6 +164,7 @@ jobs: fi - name: Create Notehub proxy route + if: env.CREATE_NOTEHUB_ROUTES!='false' run: | ALIAS="$NOTEHUB_PROXY_ROUTE_ALIAS" route=`jq -n --arg TOKEN "$MD5SRV_TOKEN" --arg LABEL "$NOTEHUB_PROXY_ROUTE_LABEL" --arg URL "$MD5SRV_URL" --arg ALIAS "$ALIAS" --argjson TIMEOUT $NOTEHUB_ROUTE_TIMEOUT \ @@ -143,13 +183,78 @@ jobs: - name: Build and upload test firmware and run tests run: | - source venv/bin/activate + cd hil_lab/ + pip install -r requirements.txt + + # Reserve a Notestation with the label "note_c_card_binary_test", + # which means it has everything needed to run the note-c card.binary + # tests. + cd notestation/ + nohup python -m core.card_client \ + --mcu-debug \ + --labels '["note_c_card_binary_test"]' \ + --res-file ./reservation.json &> card_client.log & + + PID=$! + + timeout=600 # 10 minutes in seconds + interval=1 # Check every second + elapsed=0 + + # If we aren't able to reserve a Notestation after 10 minutes, or if + # the card_client fails, bail. + while [ ! -f ./reservation.json ]; do + sleep $interval + elapsed=$((elapsed + interval)) + + # Check if the Python process is still running + if ! kill -0 $PID 2>/dev/null; then + echo "Error: Process $PID has terminated unexpectedly." + echo "$(cat card_client.log)" + exit 1 + fi + + if [ $elapsed -ge $timeout ]; then + echo "Timeout reached: reservation.json did not appear." + kill $PID 2>/dev/null + exit 1 + fi + done + + echo "Notestation reserved." + + # Set these environment variables, which are read in platformio.ini in + # order to flash the Swan with the test firmware. + export MCU_GDB_SERVER_IP="$(jq -r '.notestation' ./reservation.json)" + export MCU_GDB_SERVER_PORT="$(jq -r '.mcu_openocd.gdb' ./reservation.json)" + export GDB_CMD="gdb-multiarch" + + if [ -z "$MCU_GDB_SERVER_IP" ] || [ "$MCU_GDB_SERVER_IP" == "null" ]; then + echo "Error: MCU_GDB_SERVER_IP is empty or not defined." + exit 1 + fi + + if [ -z "$MCU_GDB_SERVER_PORT" ] || [ "$MCU_GDB_SERVER_PORT" -eq 0 ]; then + echo "Error: MCU_GDB_SERVER_PORT is empty or zero." + exit 1 + fi + export PLATFORMIO_BUILD_FLAGS="'-D NOTEHUB_PROXY_ROUTE_ALIAS=\"$NOTEHUB_PROXY_ROUTE_ALIAS\"' '-D PRODUCT_UID=\"$NOTEHUB_PRODUCT_UID\"'" echo "build flags $PLATFORMIO_BUILD_FLAGS" - cd $PIO_PROJECT_DIR + cd $GITHUB_WORKSPACE/$PIO_PROJECT_DIR + + # Run the tests. It's important that we provided --no-reset here. If + # we don't, PlatformIO tries to fiddle with DTR and RTS, and that + # causes an exception because the serial port for the Swan isn't a + # local serial port. It's a virtual serial device hooked up to the + # Notestation over TCP, and from there it's connected to the actual + # Swan USB port. Trying to do DTR/RTS on the port causes an ioctl + # error, because PlatformIO is expecting a genuine serial device that + # plays nicely with the ioctl stuff it wants to use. platformio test -v -e debug \ - --json-output-path test.json \ - --junit-output-path test.xml + --no-reset \ + --json-output-path test.json \ + --junit-output-path test.xml - name: Publish test report uses: mikepenz/action-junit-report@v3 @@ -180,28 +285,3 @@ jobs: -L "https://$NOTEHUB_API/v1/projects/$NOTEHUB_PROJECT_UID/routes/$NOTEHUB_HTTP_ROUTE_UID" \ -H "Authorization: Bearer $NOTEHUB_ACCESS_TOKEN" fi - - - - name: Cleanup tmole - if: always() - run: | - if [ -n "$TMOLE_PID" ]; then - echo "Stopping tmole." - kill $TMOLE_PID - else - echo "tmole not running (TMOLE_PID is empty)." - fi - - - name: Cleanup MD5 server - if: always() - run: | - if [ -d md5srv-files ]; then - echo "Deleting md5srv-files directory." - rm -rf md5srv-files - fi - if [ -n "$MD5SRV_PID" ]; then - echo "Stopping MD5 server." - kill $MD5SRV_PID - else - echo "MD5 server not running (MD5SRV_PID is empty)." - fi diff --git a/scripts/check_runner_config.sh b/scripts/check_runner_config.sh deleted file mode 100755 index e266c109..00000000 --- a/scripts/check_runner_config.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -function env_var_defined() { - [ -v $1 ] || echo "Environment variable '$1' not set." -} - -function check_all() { - env_var_defined "NOTEHUB_CLIENT_ID" - env_var_defined "NOTEHUB_CLIENT_SECRET" - env_var_defined "NOTEHUB_PROJECT_UID" - env_var_defined "NOTEHUB_PRODUCT_UID" - env_var_defined "MD5SRV_PORT" - env_var_defined "MD5SRV_ADDRESS" - env_var_defined "MD5SRV_TOKEN" - env_var_defined "SWAN_SERIAL" - env_var_defined "STLINK_PROGRAMMER_PORT" - - # these are defined in the workflow itself, but good to check in case of errors causing them to be unset - env_var_defined "NOTEHUB_API" - env_var_defined "NOTEHUB" - env_var_defined "PIO_PROJECT_DIR" - env_var_defined "NOTEHUB_PROXY_ROUTE_LABEL" - env_var_defined "NOTEHUB_PROXY_ROUTE_ALIAS" - env_var_defined "NOTEHUB_HTTP_ROUTE_LABEL" - - -} - -errors=$(check_all) -if [ -n "$errors" ]; then - echo "$errors" # quoted to preserve newlines - echo "There are configuration errors. See the log above for details." - exit 1 -fi - -exit 0 diff --git a/scripts/run_md5srv.sh b/scripts/run_md5srv.sh index 4e30380c..9fb75968 100755 --- a/scripts/run_md5srv.sh +++ b/scripts/run_md5srv.sh @@ -1,4 +1,3 @@ -. venv/bin/activate python3 ./test/hitl/scripts/md5srv.py --dir md5srv-files --save > md5srv.log 2>&1 & MD5SRV_PID=$! echo "MD5SRV_PID=$MD5SRV_PID" >> $GITHUB_ENV diff --git a/scripts/run_ngrok.sh b/scripts/run_ngrok.sh new file mode 100755 index 00000000..9350897d --- /dev/null +++ b/scripts/run_ngrok.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +if [ -n "$MD5SRV_PORT" ]; then + echo "INFO: Using MD5 server port $MD5SRV_PORT." >&2 +else + echo "ERROR: MD5SRV_PORT not defined." >&2 + exit 1 +fi + +if ! which ngrok > /dev/null 2>&1; then + echo "ERROR: ngrok command not found." >&2 + exit 1 +fi + +if ! which jq > /dev/null 2>&1; then + echo "ERROR: jq command not found." >&2 + exit 1 +fi + +echo "INFO: Starting ngrok..." >&2 +ngrok http http://localhost:$MD5SRV_PORT --log-format json --log stdout > ngrok.log 2>&1 & +NGROK_PID=$! +echo "INFO: ngrok PID is $NGROK_PID." >&2 +# echo "TMOLE_PID=$TMOLE_PID" >> $GITHUB_ENV +timeout 10 bash -c "until test -e ngrok.log; do sleep 0.1; done" +if [ $? -ne 0 ]; then + echo "ERROR: ngrok failed to start." >&2 + exit 1 +fi + +sleep 1 + +TIMEOUT=8 +SECONDS=0 +MD5SRV_URL="" +# Check ngrok.log for the MD5 server URL every second. +until [ "$SECONDS" -ge "$TIMEOUT" ] +do + MD5SRV_URL=$(jq -r 'select(.msg == "started tunnel") | .url' ngrok.log) + if [ -n "$MD5SRV_URL" ]; then + break + else + SECONDS=$((SECONDS+1)) + sleep 1 + fi +done + +if [ -z "$MD5SRV_URL" ]; then + echo "ERROR: Timed out waiting for MD5 server URL to get written to ngrok.log." >&2 + exit 1 +fi + +echo "INFO: Got MD5 server URL from ngrok.log." >&2 +echo "MD5SRV_URL=$MD5SRV_URL" >> $GITHUB_ENV +echo "INFO: ngrok ready. Logging to `realpath ngrok.log`" >&2 diff --git a/scripts/run_tmole.sh b/scripts/run_tmole.sh deleted file mode 100755 index ef33cacf..00000000 --- a/scripts/run_tmole.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -export NVM_DIR=~/.nvm -source ~/.nvm/nvm.sh - -if [ -n "$MD5SRV_PORT" ]; then - echo "INFO: Using MD5 server port $MD5SRV_PORT." >&2 -else - echo "ERROR: MD5SRV_PORT not defined." >&2 - exit 1 -fi - -if ! which tmole > /dev/null 2>&1; then - echo "ERROR: tmole command not found." >&2 - exit 1 -fi - -echo "INFO: Starting tmole..." >&2 -tmole $MD5SRV_PORT > tmole.log 2>&1 & -TMOLE_PID=$! -echo "INFO: tmole PID is $TMOLE_PID." >&2 -echo "TMOLE_PID=$TMOLE_PID" >> $GITHUB_ENV -timeout 10 bash -c "until test -e tmole.log; do sleep 0.1; done" -if [ $? -ne 0 ]; then - echo "ERROR: tmole failed to start." >&2 - exit 1 -fi - -sleep 1 - - -TIMEOUT=8 -SECONDS=0 -MD5SRV_URL="" -# Check tmole.log for the MD5 server URL every second. -until [ "$SECONDS" -ge "$TIMEOUT" ] -do - MD5SRV_URL=$(grep -oP "^https://[\w\d\.-]+" tmole.log) - - if [ -n "$MD5SRV_URL" ]; then - break - else - SECONDS=$((SECONDS+1)) - sleep 1 - fi -done - -if [ -z "$MD5SRV_URL" ]; then - echo "ERROR: Timed out waiting for MD5 server URL to get written to tmole.log." >&2 - exit 1 -fi - -echo "INFO: Got MD5 server URL from tmole.log." >&2 -echo "MD5SRV_URL=$MD5SRV_URL" >> $GITHUB_ENV -echo "INFO: tmole ready. Logging to `realpath tmole.log`" >&2 diff --git a/test/hitl/card.binary/platformio.ini b/test/hitl/card.binary/platformio.ini index f0948bf9..f4972b81 100644 --- a/test/hitl/card.binary/platformio.ini +++ b/test/hitl/card.binary/platformio.ini @@ -21,10 +21,7 @@ build_flags = '-D DEFAULT_NOTEHUB_PROXY_ROUTE_ALIAS="cobstest"' '-D DEFAULT_PRODUCT_UID=""' # don't set the product UID lib_deps = blues/Blues Wireless Notecard@^1.6.0 -debug_tool = stlink -upload_protocol = stlink test_framework = unity -test_port = /dev/note_c_hil_swan ; The serial port used for streaming test results from the Swan to the host ; computer takes a second to show up after uploading. This script waits for it @@ -36,10 +33,13 @@ extra_scripts = post:wait_for_test_port.py ; machine. Simply specifying the serial port corresponding to a given ST-LINK ; via upload_port doesn't work, so we have to use the specific serial number ; of the ST-LINK we're trying to target. -upload_flags = - -c - hla_serial 0029000F3156501420323443 +;upload_flags = +; -c +; hla_serial 0029000F3156501420323443 [env:debug] build_type = debug debug_test = * +upload_protocol = custom +upload_command = ${sysenv.GDB_CMD} -ex "set confirm off" -ex "set pagination off" -ex "target extended-remote ${sysenv.MCU_GDB_SERVER_IP}:${sysenv.MCU_GDB_SERVER_PORT}" -ex "monitor reset halt" -ex "file .pio/build/debug/firmware.elf" -ex "load" -ex "monitor reset" -ex "quit" +test_port = /tmp/mcu_usb diff --git a/test/hitl/card.binary/wait_for_test_port.py b/test/hitl/card.binary/wait_for_test_port.py index 34497f76..3a16999e 100644 --- a/test/hitl/card.binary/wait_for_test_port.py +++ b/test/hitl/card.binary/wait_for_test_port.py @@ -1,5 +1,6 @@ Import("env") + def after_upload(source, target, env): port = env.GetProjectOption("test_port") print("waiting for " + port + " ...") @@ -11,4 +12,5 @@ def after_upload(source, target, env): except: pass + env.AddPostAction("upload", after_upload) diff --git a/test/hitl/scripts/md5srv.py b/test/hitl/scripts/md5srv.py index 43340d4a..02947f35 100755 --- a/test/hitl/scripts/md5srv.py +++ b/test/hitl/scripts/md5srv.py @@ -23,6 +23,7 @@ def log_sensitive(s: str): class HTTPException(Exception): + def __init__(self, status_code, message, detail=None): self.status_code = status_code self.message = message @@ -81,19 +82,26 @@ def do_POST(self): def do_PUT(self): self.do(self.write_file_or_note) - def send_status(self, code: int, message: str, detail: str | dict | bytes, headers: dict = {}): + def send_status(self, + code: int, + message: str, + detail: str | dict | bytes, + headers: dict = {}): """ Send a status code with a message and optional detail, which is returned as JSON in the response body. """ self.send_response(code, message) for header in headers.items(): self.send_header(*header) - self.send_header("Content-Type", "application/json" if type(detail) is not bytes else "application/octet-stream") + self.send_header( + "Content-Type", "application/json" + if type(detail) is not bytes else "application/octet-stream") self.end_headers() detail = detail or message if detail is not None: if type(detail) is bytes: self.wfile.write(detail) else: - key = "err" if code > 400 else "text" if not type(detail) is dict else None + key = "err" if code > 400 else "text" if not type( + detail) is dict else None json_str = json.dumps(detail) reply_body = json_str if key is None else f'{{"{key}":{json_str},"code":{code}}}' reply_body = reply_body + '\n' @@ -115,12 +123,15 @@ def validate_token(self): requested = self.headers['X-Access-Token'] or '' token = self.args.token or '' if token and requested != token: - raise HTTPException(403, "Forbidden") if requested else HTTPException(401, "Unauthorized") + raise HTTPException(403, + "Forbidden") if requested else HTTPException( + 401, "Unauthorized") def get_md5(self): dirname = self.validate_url_path(self.url.path) if not os.path.exists(dirname): - raise HTTPException(404, "Not Found", f"Directory {self.url.path} not found.") + raise HTTPException(404, "Not Found", + f"Directory {self.url.path} not found.") chunk = self.query_data.get('chunk') headers = {} @@ -144,7 +155,9 @@ def md5_for_directory(self, dirname): abs_filename = os.path.join(dirname, filename) self.validate_path(abs_filename) if not os.path.isfile(abs_filename): - raise HTTPException(403, "Not a file.", f"{os.path.join(self.url.path,filename)} is not a file") + raise HTTPException( + 403, "Not a file.", + f"{os.path.join(self.url.path,filename)} is not a file") with open(abs_filename, 'rb') as file: data = file.read() @@ -171,14 +184,15 @@ def write_file_or_note(self): def write_note(self): content_type = self.headers['Content-Type'] if content_type != 'application/json': - raise HTTPException(400, f"Unsupported content type: {content_type}") + raise HTTPException(400, + f"Unsupported content type: {content_type}") data = self.post_data try: event = json.loads(data) except json.decoder.JSONDecodeError as e: print(f"JSON decode error: {data} {e}") raise e - body = event['body'] # non-optional keys + body = event['body'] # non-optional keys name = body['name'] length = body['length'] md5 = body['md5'] @@ -186,8 +200,10 @@ def write_note(self): payload = base64.b64decode(event['payload']) payload_length = event.get('payload_length') if payload_length is not None and payload_length != length: - raise HTTPException(400, "Payload length mismatch", f"payload_length {payload_length}!=length {length}") - chunk = body.get('chunk') # optional + raise HTTPException( + 400, "Payload length mismatch", + f"payload_length {payload_length}!=length {length}") + chunk = body.get('chunk') # optional self._write_file(name, chunk, length, payload, md5) def write_file(self): @@ -195,7 +211,8 @@ def write_file(self): if not length: raise HTTPException(400, "Request body is empty.") chunk = self.query_data.get("chunk", None) - return self._write_file(self.url.path, chunk, length, self.post_data, None) + return self._write_file(self.url.path, chunk, length, self.post_data, + None) def _read_file(self, path, chunk): filename = self._chunk_file(path, chunk) @@ -207,7 +224,9 @@ def _read_file(self, path, chunk): def _chunk_file(self, dirname, chunk): chunk_index = None if chunk is None else int(chunk) - filename = os.path.join(dirname, f"payload{chunk_index:05d}.bin" if chunk_index is not None else "payload.bin") + filename = os.path.join( + dirname, f"payload{chunk_index:05d}.bin" + if chunk_index is not None else "payload.bin") self.validate_path(filename) return filename @@ -222,19 +241,24 @@ def _write_file(self, path, chunk, length, data, md5): filename = self._chunk_file(dirname, chunk) if os.path.exists(filename): - raise HTTPException(409, "Conflict", f"Chunk {chunk} already exists for {self.url.path}" - if chunk else f"File {self.url.path} already exists.") + raise HTTPException( + 409, "Conflict", + f"Chunk {chunk} already exists for {self.url.path}" + if chunk else f"File {self.url.path} already exists.") if len(data) != length: - raise HTTPException(400, "Invalid content length.", - f"Payload length does not equal given length {len(data)}!={length}") + raise HTTPException( + 400, "Invalid content length.", + f"Payload length does not equal given length {len(data)}!={length}" + ) if self.args.save: with open(filename, 'wb') as output_file: output_file.write(data) md5str = hashlib.md5(data).hexdigest() if md5 and md5str != md5: - raise HTTPException(400, f"MD5 mismatch. actual {md5str}!=expected {md5}") + raise HTTPException( + 400, f"MD5 mismatch. actual {md5str}!=expected {md5}") response = {"md5": md5str, "length": length} content_type = self.headers['Content-Type'] if content_type is not None: @@ -242,7 +266,8 @@ def _write_file(self, path, chunk, length, data, md5): self.send_status(200, "Ok", response) def validate_url_path(self, urlpath): - return self.validate_path(os.path.join(self.args.directory, urlpath.lstrip('/'))) + return self.validate_path( + os.path.join(self.args.directory, urlpath.lstrip('/'))) def validate_path(self, path): abs_path = os.path.realpath(os.path.abspath(path)) @@ -252,14 +277,12 @@ def validate_path(self, path): return abs_path def dump_request(self) -> str: - return json.dumps( - { - "path": self.url.path, - "query_data": self.query_data, - "post_data": self.post_data.decode("utf-8"), - "form_data": self.form_data - } - ) + return json.dumps({ + "path": self.url.path, + "query_data": self.query_data, + "post_data": self.post_data.decode("utf-8"), + "form_data": self.form_data + }) def main(args): @@ -284,20 +307,22 @@ def build_request_handler(*request_args): token = os.environ.get("MD5SRV_TOKEN") parser = argparse.ArgumentParser( - description='Run a simple webserver to save and validate web.post requests.') - parser.add_argument( - '--port', - default=port or "8080", - required=False, - help='The TCP port to bind to.') + description= + 'Run a simple webserver to save and validate web.post requests.') + parser.add_argument('--port', + default=port or "8080", + required=False, + help='The TCP port to bind to.') parser.add_argument('--address', default=address or "0.0.0.0", required=False, help='The IP address to bind to.') - parser.add_argument('--dir', - dest="directory", - required=False, - help='The working directory. All files are stored and retrieved from here.') + parser.add_argument( + '--dir', + dest="directory", + required=False, + help= + 'The working directory. All files are stored and retrieved from here.') # parser.add_argument('--timeout', # default=5, # required=False, @@ -307,12 +332,14 @@ def build_request_handler(*request_args): required=False, action='store_true', help='Save the content received to the filesystem.') - parser.add_argument('--token', - default=None, - required=False, - help='The authorization token required in X-Access-Token header.') + parser.add_argument( + '--token', + default=None, + required=False, + help='The authorization token required in X-Access-Token header.') args = parser.parse_args() args.token = args.token or token - args.directory = os.getcwd() if not args.directory else os.path.abspath(args.directory) + args.directory = os.getcwd() if not args.directory else os.path.abspath( + args.directory) main(args)