-
-
Notifications
You must be signed in to change notification settings - Fork 35
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add nightly tests for quota and hard cap enforcement
- Loading branch information
Showing
3 changed files
with
171 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,156 @@ | ||
import math | ||
import requests | ||
import time | ||
from datetime import datetime | ||
|
||
from .conftest import API_PREFIX | ||
from .utils import get_crawl_status | ||
|
||
|
||
EXEC_MINS_QUOTA = 1 | ||
EXEC_MINS_ALLOWED_OVERAGE = 10 | ||
EXEC_MINS_HARD_CAP = EXEC_MINS_QUOTA + EXEC_MINS_ALLOWED_OVERAGE | ||
|
||
config_id = None | ||
|
||
|
||
def test_set_execution_mins_quota(org_with_quotas, admin_auth_headers): | ||
r = requests.post( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}/quotas", | ||
headers=admin_auth_headers, | ||
json={"crawlExecMinutesQuota": EXEC_MINS_QUOTA}, | ||
) | ||
data = r.json() | ||
assert data.get("updated") == True | ||
|
||
|
||
def test_crawl_stopped_when_quota_reached(org_with_quotas, admin_auth_headers): | ||
# Run crawl | ||
global config_id | ||
crawl_id, config_id = run_crawl(org_with_quotas, admin_auth_headers) | ||
time.sleep(1) | ||
|
||
while get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers) in ( | ||
"starting", | ||
"waiting_capacity", | ||
): | ||
time.sleep(2) | ||
|
||
while get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers) in ( | ||
"running", | ||
"generate-wacz", | ||
"uploading-wacz", | ||
"pending-wait", | ||
): | ||
time.sleep(2) | ||
|
||
# Ensure that crawl was stopped by quota | ||
assert ( | ||
get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers) | ||
== "partial_complete" | ||
) | ||
|
||
time.sleep(5) | ||
|
||
# Ensure crawl execution seconds went over quota | ||
r = requests.get( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}/crawls/{crawl_id}/replay.json", | ||
headers=admin_auth_headers, | ||
) | ||
data = r.json() | ||
execution_seconds = data["crawlExecSeconds"] | ||
assert math.floor(execution_seconds / 60) >= EXEC_MINS_QUOTA | ||
|
||
time.sleep(5) | ||
|
||
# Ensure we can't start another crawl when over the quota | ||
r = requests.post( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}/crawlconfigs/{config_id}/run", | ||
headers=admin_auth_headers, | ||
) | ||
assert r.status_code == 403 | ||
assert r.json()["detail"] == "execution_minutes_hard_cap_reached" | ||
|
||
|
||
def test_crawl_stopped_when_card_cap_reached(org_with_quotas, admin_auth_headers): | ||
# Set allowed overage on org | ||
r = requests.post( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}/billing", | ||
headers=admin_auth_headers, | ||
json={"crawlExecMinutesAllowedOverage": EXEC_MINS_ALLOWED_OVERAGE}, | ||
) | ||
assert r.status_code == 200 | ||
|
||
time.sleep(10) | ||
|
||
# Run new crawl from config | ||
r = requests.post( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}/crawlconfigs/{config_id}/run", | ||
headers=admin_auth_headers, | ||
) | ||
assert r.status_code == 200 | ||
crawl_id = r.json()["started"] | ||
assert crawl_id | ||
|
||
time.sleep(1) | ||
|
||
while get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers) in ( | ||
"starting", | ||
"waiting_capacity", | ||
): | ||
time.sleep(2) | ||
|
||
while get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers) in ( | ||
"running", | ||
"generate-wacz", | ||
"uploading-wacz", | ||
"pending-wait", | ||
): | ||
time.sleep(2) | ||
|
||
# Ensure that crawl was stopped when hard cap reached | ||
assert ( | ||
get_crawl_status(org_with_quotas, crawl_id, admin_auth_headers) | ||
== "partial_complete" | ||
) | ||
|
||
time.sleep(5) | ||
|
||
# Ensure crawl execution seconds went over hard cap (stopping takes a while) | ||
r = requests.get( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}", | ||
headers=admin_auth_headers, | ||
) | ||
data = r.json() | ||
execution_seconds = data["crawlExecSeconds"] | ||
yymm = datetime.utcnow().strftime("%Y-%m") | ||
assert math.floor(execution_seconds[yymm] / 60) >= EXEC_MINS_HARD_CAP | ||
|
||
time.sleep(5) | ||
|
||
# Ensure we can't start another crawl when over the hard cap | ||
r = requests.post( | ||
f"{API_PREFIX}/orgs/{org_with_quotas}/crawlconfigs/{config_id}/run", | ||
headers=admin_auth_headers, | ||
) | ||
assert r.status_code == 403 | ||
assert r.json()["detail"] == "execution_minutes_hard_cap_reached" | ||
|
||
|
||
def run_crawl(org_id, headers): | ||
crawl_data = { | ||
"runNow": True, | ||
"name": "Execution Mins Quota", | ||
"config": { | ||
"seeds": [{"url": "https://webrecorder.net/"}], | ||
"extraHops": 1, | ||
}, | ||
} | ||
r = requests.post( | ||
f"{API_PREFIX}/orgs/{org_id}/crawlconfigs/", | ||
headers=headers, | ||
json=crawl_data, | ||
) | ||
data = r.json() | ||
|
||
return data["run_now_job"], data["id"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
"""nightly test utils""" | ||
|
||
import requests | ||
|
||
from .conftest import API_PREFIX | ||
|
||
|
||
def get_crawl_status(org_id, crawl_id, headers): | ||
r = requests.get( | ||
f"{API_PREFIX}/orgs/{org_id}/crawls/{crawl_id}/replay.json", | ||
headers=headers, | ||
) | ||
data = r.json() | ||
return data["state"] |