Skip to content

Commit

Permalink
fixup: finish the test setup for the desired behavior
Browse files Browse the repository at this point in the history
  • Loading branch information
pedro-psb committed Nov 26, 2024
1 parent 561cfb2 commit 5d8a3e7
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 125 deletions.
25 changes: 21 additions & 4 deletions pulp_file/pytest_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,11 @@ def file_fixtures_root(tmp_path):

@pytest.fixture
def write_3_iso_file_fixture_data_factory(file_fixtures_root):
def _write_3_iso_file_fixture_data_factory(name, overwrite=False):
def _write_3_iso_file_fixture_data_factory(name, overwrite=False, seed=None):
file_fixtures_root.joinpath(name).mkdir(exist_ok=overwrite)
file1 = generate_iso(file_fixtures_root.joinpath(f"{name}/1.iso"))
file2 = generate_iso(file_fixtures_root.joinpath(f"{name}/2.iso"))
file3 = generate_iso(file_fixtures_root.joinpath(f"{name}/3.iso"))
file1 = generate_iso(file_fixtures_root.joinpath(f"{name}/1.iso"), seed=seed)
file2 = generate_iso(file_fixtures_root.joinpath(f"{name}/2.iso"), seed=seed)
file3 = generate_iso(file_fixtures_root.joinpath(f"{name}/3.iso"), seed=seed)
generate_manifest(
file_fixtures_root.joinpath(f"{name}/PULP_MANIFEST"), [file1, file2, file3]
)
Expand Down Expand Up @@ -364,3 +364,20 @@ def _wget_recursive_download_on_host(url, destination):
)

return _wget_recursive_download_on_host


@pytest.fixture
def generate_server_and_remote(
file_bindings, gen_fixture_server, file_fixtures_root, gen_object_with_cleanup
):
def _generate_server_and_remote(*, manifest_path, policy):
server = gen_fixture_server(file_fixtures_root, None)
url = server.make_url(manifest_path)
remote = gen_object_with_cleanup(
file_bindings.RemotesFileApi,
{"name": str(uuid.uuid4()), "url": str(url), "policy": policy},
)
return server, remote

yield _generate_server_and_remote

15 changes: 0 additions & 15 deletions pulp_file/tests/functional/api/test_acs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,6 @@
)


@pytest.fixture
def generate_server_and_remote(
file_bindings, gen_fixture_server, file_fixtures_root, gen_object_with_cleanup
):
def _generate_server_and_remote(*, manifest_path, policy):
server = gen_fixture_server(file_fixtures_root, None)
url = server.make_url(manifest_path)
remote = gen_object_with_cleanup(
file_bindings.RemotesFileApi,
{"name": str(uuid.uuid4()), "url": str(url), "policy": policy},
)
return server, remote

yield _generate_server_and_remote


@pytest.mark.parallel
def test_acs_validation_and_update(
Expand Down
163 changes: 59 additions & 104 deletions pulpcore/tests/functional/api/using_plugin/test_content_delivery.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,99 +143,24 @@ def test_remote_content_changed_with_on_demand(
download_file(get_url)

# Assert again with curl just to be sure.
result = subprocess.run(["curl", "-v", get_url], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = subprocess.run(["curl", "-v", get_url],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert result.returncode == 18
assert b"* Closing connection 0" in result.stderr
assert b"curl: (18) transfer closed with outstanding read data remaining" in result.stderr


def run_server(port: int, server_dir: str, q: Queue):
import http.server
import os

handler_cls = http.server.SimpleHTTPRequestHandler
server_cls = http.server.HTTPServer

os.chdir(server_dir)
server_address = ("", port)
httpd = server_cls(server_address, handler_cls)

q.put(httpd.fileno()) # send to parent so can use select
httpd.serve_forever()


def create_server(port: int, server_dir: str) -> Process:
# setup/teardown server
q = Queue()
proc = Process(target=run_server, args=(port, server_dir, q))
proc.start()

# block until the server socket fd is ready for write
server_socket_fd = q.get()
_, w, _ = select.select([], [server_socket_fd], [], 5)
if not w:
proc.terminate()
proc.join()
raise TimeoutError("The test server didnt get ready.")
return proc


@pytest.fixture
def http_server_factory():

@contextmanager
def _http_server(fixture_dir, url_path):
port = random.randint(9000, 10000)
try:
proc = create_server(port, fixture_dir)
base_url = f"http://localhost:{port}/"
yield base_url
finally:
proc.terminate()
proc.join()

return _http_server


def test_http_server(
http_server_factory, write_3_iso_file_fixture_data_factory, file_fixtures_root
):
server_name = "myserver"
manifest_path = write_3_iso_file_fixture_data_factory(server_name)
with http_server_factory(file_fixtures_root, server_name) as http_server:
url = http_server + "1.iso"
result = requests.get(url)
assert result.ok is True


def test_fixture_server(
write_3_iso_file_fixture_data_factory,
gen_fixture_server,
):
import time

import requests

fixture_root = write_3_iso_file_fixture_data_factory("server_a")
server_data = gen_fixture_server(fixture_root, None)
url = server_data.make_url(fixture_root)
time.sleep(5)
result = requests.get(url)
assert result.ok is True


@pytest.mark.parallel
def test_handling_remote_artifact_on_demand_streaming_failure(
write_3_iso_file_fixture_data_factory,
file_repo_with_auto_publish,
file_remote_ssl_factory,
file_remote_factory,
file_bindings,
monitor_task,
monitor_task_group,
file_distribution_factory,
gen_object_with_cleanup,
http_server_factory,
file_fixtures_root,
generate_server_and_remote
):
"""
GIVEN A content synced with on-demand which has 2 RemoteArtifacts (Remote + ACS).
Expand All @@ -249,42 +174,72 @@ def test_handling_remote_artifact_on_demand_streaming_failure(
"""

# Plumbing
def generate_fixture_data(name):
manifest_path = write_3_iso_file_fixture_data_factory(name)
return file_fixtures_root / name, manifest_path
def create_simple_remote(manifest_path):
remote = file_remote_factory(
manifest_path=manifest_path, policy="on_demand")
body = RepositorySyncURL(remote=remote.pulp_href)
monitor_task(
file_bindings.RepositoriesFileApi.sync(
file_repo_with_auto_publish.pulp_href, body).task
)
return remote

def create_remote(remote_url):
return file_remote_ssl_factory(manifest_path=str(remote_url), policy="on_demand")
def create_acs_remote(manifest_path):
acs_server, acs_remote = generate_server_and_remote(
manifest_path=manifest_path, policy="on_demand"
)
acs = gen_object_with_cleanup(
file_bindings.AcsFileApi,
{"remote": acs_remote.pulp_href,
"paths": [], "name": str(uuid.uuid4())},
)
monitor_task_group(file_bindings.AcsFileApi.refresh(
acs.pulp_href).task_group)
return acs

def sync_publish_and_distribute(remote):
body = RepositorySyncURL(remote=remote.pulp_href)
monitor_task(
file_bindings.RepositoriesFileApi.sync(file_repo_with_auto_publish.pulp_href, body).task
file_bindings.RepositoriesFileApi.sync(
file_repo_with_auto_publish.pulp_href, body).task
)
repo = file_bindings.RepositoriesFileApi.read(file_repo_with_auto_publish.pulp_href)
repo = file_bindings.RepositoriesFileApi.read(
file_repo_with_auto_publish.pulp_href)
distribution = file_distribution_factory(repository=repo.pulp_href)
return repo, distribution
return distribution

def create_acs_and_bind_to_remote(remote, paths: list[str]):
acs = gen_object_with_cleanup(
file_bindings.AcsFileApi,
{"remote": remote.pulp_href, "paths": paths, "name": str(uuid.uuid4())},
)
monitor_task_group(file_bindings.AcsFileApi.refresh(acs.pulp_href).task_group)
def refresh_acs(acs):
monitor_task_group(file_bindings.AcsFileApi.refresh(
acs.pulp_href).task_group)
return acs

# GIVEN
_, sync_server_manifest_path = generate_fixture_data("sync_server", return_manifest_path=True)
acs_server_path, _ = generate_fixture_data("acs_server")
with (http_server_factory(acs_server_path, "acs_server") as acs_server_url,):
remote = create_remote(remote_url=sync_server_manifest_path)
repo, distribution = sync_publish_and_distribute(remote)
create_acs_and_bind_to_remote(remote, [acs_server_url])
def get_original_content_info(remote):
expected_files = get_files_in_manifest(remote.url)
content_unit = list(expected_files)[0]
return content_unit[0], content_unit[1]

# WHEN (first request)
def download_from_distribution(content, distribution):
content_unit_url = urljoin(distribution.base_url, content_name)
downloaded_file = download_file(content_unit_url)
actual_checksum = hashlib.sha256(downloaded_file.body).hexdigest()
return actual_checksum

# THEN (first request)
# GIVEN
basic_manifest_path = write_3_iso_file_fixture_data_factory(
"basic", seed=123)
acs_manifest_path = write_3_iso_file_fixture_data_factory("acs", seed=123)
remote = create_simple_remote(basic_manifest_path)
distribution = sync_publish_and_distribute(remote)
acs = create_acs_remote(acs_manifest_path)
refresh_acs(acs)
write_3_iso_file_fixture_data_factory("acs", overwrite=True) # corrupt

# WHEN/THEN (first request)
content_name, expected_checksum = get_original_content_info(remote)

# WHEN (second request)
with pytest.raises(ClientPayloadError, match="Response payload is not completed"):
download_from_distribution(content_name, distribution)

# THEN (second request)
# WHEN/THEN (second request)
actual_checksum = download_from_distribution(content_name, distribution)
assert actual_checksum == expected_checksum
9 changes: 7 additions & 2 deletions pulpcore/tests/functional/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import asyncio
import hashlib
import os
import random

from aiohttp import web
from dataclasses import dataclass
Expand Down Expand Up @@ -103,10 +104,14 @@ async def _download_file(url, auth=None, headers=None):
return MockDownload(body=await response.read(), response_obj=response)


def generate_iso(full_path, size=1024, relative_path=None):
def generate_iso(full_path, size=1024, relative_path=None, seed=None):
"""Generate a random file."""
with open(full_path, "wb") as fout:
contents = os.urandom(size)
if seed:
random.seed(seed)
contents = random.randbytes(size)
else:
contents = os.urandom(size)
fout.write(contents)
fout.flush()
digest = hashlib.sha256(contents).hexdigest()
Expand Down

0 comments on commit 5d8a3e7

Please sign in to comment.