diff --git a/backend/api/constants.py b/backend/api/constants.py
index d80e5bd1..024f47a1 100644
--- a/backend/api/constants.py
+++ b/backend/api/constants.py
@@ -9,6 +9,8 @@
import humanfriendly
from rq import Retry
+logging.basicConfig()
+
def determine_mandatory_environment_variables():
for variable in ("POSTGRES_URI", "S3_URL_WITH_CREDENTIALS", "PRIVATE_SALT"):
@@ -59,14 +61,16 @@ class BackendConf:
# Deployment
public_url: str = os.getenv("PUBLIC_URL") or "http://localhost"
+ # /!\ this must match the region/bucket on s3 credentials
download_url: str = (
os.getenv("DOWNLOAD_URL")
- or "https://s3.us-west-1.wasabisys.com/org-kiwix-zimit/zim"
+ or "https://s3.eu-west-2.wasabisys.com/org-kiwix-nautilus"
)
allowed_origins = os.getenv(
"ALLOWED_ORIGINS",
"http://localhost",
).split("|")
+ debug: bool = bool(os.getenv("DEBUG") or "")
# Zimfarm (3rd party API creating ZIMs and calling back with feedback)
zimfarm_api_url: str = (
@@ -80,10 +84,16 @@ class BackendConf:
zimfarm_task_cpu: int = int(os.getenv("ZIMFARM_TASK_CPU") or "3")
zimfarm_task_memory: int = 0
zimfarm_task_disk: int = 0
- zimfarm_callback_base_url = os.getenv("ZIMFARM_CALLBACK_BASE_URL", "")
+ zimfarm_callback_base_url = (
+ os.getenv("ZIMFARM_CALLBACK_BASE_URL") or "https://api.nautilus.openzim.org/v1"
+ )
zimfarm_callback_token = os.getenv("ZIMFARM_CALLBACK_TOKEN", uuid.uuid4().hex)
- zimfarm_task_worker: str = os.getenv("ZIMFARM_TASK_WORKDER") or "-"
+ zimfarm_task_worker: str = os.getenv("ZIMFARM_TASK_WORKER") or "-"
zimfarm_request_timeout_sec: int = 10
+ zim_download_url: str = (
+ os.getenv("ZIM_DOWNLOAD_URL")
+ or "https://s3.us-west-1.wasabisys.com/org-kiwix-zimit"
+ )
# Mailgun (3rd party API to send emails)
mailgun_api_url: str = os.getenv("MAILGUN_API_URL") or ""
@@ -95,6 +105,7 @@ class BackendConf:
def __post_init__(self):
self.logger = logging.getLogger(Path(__file__).parent.name)
+ self.logger.setLevel(logging.DEBUG if self.debug else logging.INFO)
self.transient_storage_path.mkdir(exist_ok=True)
self.job_retry = Retry(max=self.s3_max_tries, interval=int(self.s3_retry_wait))
@@ -119,9 +130,6 @@ def __post_init__(self):
os.getenv("ZIMFARM_TASK_DISK") or "200MiB"
)
- if not self.zimfarm_callback_base_url:
- self.zimfarm_callback_base_url = f"{self.zimfarm_api_url}/requests/hook"
-
constants = BackendConf()
logger = constants.logger
diff --git a/backend/api/database/models.py b/backend/api/database/models.py
index 81ef3d49..5bafe225 100644
--- a/backend/api/database/models.py
+++ b/backend/api/database/models.py
@@ -1,4 +1,5 @@
from datetime import datetime
+from enum import Enum
from typing import Any, ClassVar, TypeVar
from uuid import UUID
@@ -36,6 +37,7 @@ class ArchiveConfig(BaseModel):
tags: list[str]
illustration: str
filename: str
+ main_logo: str | None = None
@classmethod
def init_with(cls: type[T], filename: str, **kwargs) -> T:
@@ -49,7 +51,8 @@ def init_with(cls: type[T], filename: str, **kwargs) -> T:
def is_ready(self) -> bool:
try:
for key in self.model_fields.keys():
- validate_required_values(key.title(), getattr(self, key, ""))
+ if key != "main_logo":
+ validate_required_values(key.title(), getattr(self, key, ""))
validate_title("Title", self.title)
validate_description("Description", self.description)
validate_language("Language", self.languages)
@@ -60,6 +63,18 @@ def is_ready(self) -> bool:
return True
+class ArchiveStatus(str, Enum):
+ # It's in database but not requested and can be modified
+ PENDING = "PENDING"
+ # it has been ZF-requested; can not be modified by user,
+ # awaiting callback from ZimFarm
+ REQUESTED = "REQUESTED"
+ # ZimFarm task succeeded, it now has a download_url and filesize
+ READY = "READY"
+ # ZimFarm task failed, cant be downloaded
+ FAILED = "FAILED"
+
+
class ArchiveConfigType(types.TypeDecorator):
cache_ok = True
impl = JSONB
@@ -89,6 +104,7 @@ class Base(MappedAsDataclass, DeclarativeBase):
# timezone below)
type_annotation_map: ClassVar = {
ArchiveConfig: ArchiveConfigType,
+ ArchiveStatus: String,
dict[str, Any]: JSONB, # transform Python Dict[str, Any] into PostgreSQL JSONB
list[dict[str, Any]]: JSONB,
datetime: DateTime(
@@ -207,7 +223,7 @@ class Archive(Base):
completed_on: Mapped[datetime | None]
download_url: Mapped[str | None]
collection_json_path: Mapped[str | None]
- status: Mapped[str]
+ status: Mapped[ArchiveStatus]
zimfarm_task_id: Mapped[UUID | None]
email: Mapped[str | None]
config: Mapped[ArchiveConfig]
diff --git a/backend/api/email.py b/backend/api/email.py
index 0ec984f9..a9879b67 100644
--- a/backend/api/email.py
+++ b/backend/api/email.py
@@ -11,7 +11,7 @@
from api.database.models import Archive
jinja_env = Environment(
- loader=FileSystemLoader("templates"),
+ loader=FileSystemLoader(Path(__file__).parent.joinpath("templates")),
autoescape=select_autoescape(["html", "txt"]),
)
jinja_env.filters["short_id"] = lambda value: str(value)[:5]
@@ -69,7 +69,8 @@ def get_context(task: dict[str, Any], archive: Archive):
"""Jinja context dict for email notifications"""
return {
"base_url": constants.public_url,
- "download_url": constants.download_url,
+ "zim_download_url": constants.zim_download_url,
"task": task,
+ "file": next(iter(task["files"].values())) if task.get("files") else None,
"archive": archive,
}
diff --git a/backend/api/routes/__init__.py b/backend/api/routes/__init__.py
index 0d19d942..6ce0dbd6 100644
--- a/backend/api/routes/__init__.py
+++ b/backend/api/routes/__init__.py
@@ -53,3 +53,15 @@ async def validated_project(
if not project:
raise HTTPException(HTTPStatus.NOT_FOUND, f"Project not found: {project_id}")
return project
+
+
+async def userless_validated_project(
+ project_id: UUID,
+ session: Session = Depends(gen_session),
+) -> Project:
+ """Depends()-able Project from request, ensuring it exists"""
+ stmt = select(Project).filter_by(id=project_id)
+ project = session.execute(stmt).scalar()
+ if not project:
+ raise HTTPException(HTTPStatus.NOT_FOUND, f"Project not found: {project_id}")
+ return project
diff --git a/backend/api/routes/archives.py b/backend/api/routes/archives.py
index 786b1809..0eb06cd4 100644
--- a/backend/api/routes/archives.py
+++ b/backend/api/routes/archives.py
@@ -2,11 +2,11 @@
import datetime
import io
import json
-from enum import Enum
from http import HTTPStatus
from typing import Any, BinaryIO
from uuid import UUID
+import dateutil.parser
import zimscraperlib.image
from fastapi import APIRouter, Depends, HTTPException, UploadFile
from pydantic import BaseModel, ConfigDict, TypeAdapter
@@ -17,7 +17,7 @@
from api.constants import constants, logger
from api.database import gen_session
-from api.database.models import Archive, ArchiveConfig, Project
+from api.database.models import Archive, ArchiveConfig, ArchiveStatus, Project
from api.email import get_context, jinja_env, send_email_via_mailgun
from api.files import (
calculate_file_size,
@@ -25,30 +25,21 @@
normalize_filename,
read_file_in_chunks,
)
-from api.routes import validated_project
+from api.routes import userless_validated_project, validated_project
from api.s3 import s3_file_key, s3_storage
from api.zimfarm import RequestSchema, WebhookPayload, request_task
router = APIRouter()
-class ArchiveStatus(str, Enum):
- # It's in database but not requested and can be modified
- PENDING = "PENDING"
- # it has been ZF-requested; can not be modified by user,
- # awaiting callback from ZimFarm
- REQUESTED = "REQUESTED"
- # ZimFarm task succeeded, it now has a download_url and filesize
- READY = "READY"
- # ZimFarm task failed, cant be downloaded
- FAILED = "FAILED"
+class ArchiveConfigRequest(BaseModel):
+ email: str | None
+ config: ArchiveConfig
+ model_config = ConfigDict(from_attributes=True)
class ArchiveRequest(BaseModel):
email: str | None
- config: ArchiveConfig
-
- model_config = ConfigDict(from_attributes=True)
class ArchiveModel(BaseModel):
@@ -58,6 +49,8 @@ class ArchiveModel(BaseModel):
filesize: int | None
created_on: datetime.datetime
+ requested_on: datetime.datetime | None
+ completed_on: datetime.datetime | None
download_url: str | None
status: str
email: str | None
@@ -79,6 +72,19 @@ def validated_archive(
return archive
+def userless_validated_archive(
+ archive_id: UUID,
+ project: Project = Depends(userless_validated_project),
+ session: Session = Depends(gen_session),
+) -> Archive:
+ """Depends()-able archive from request, ensuring it exists"""
+ stmt = select(Archive).filter_by(id=archive_id).filter_by(project_id=project.id)
+ archive = session.execute(stmt).scalar()
+ if not archive:
+ raise HTTPException(HTTPStatus.NOT_FOUND, f"Archive not found: {archive_id}")
+ return archive
+
+
@router.get("/{project_id}/archives", response_model=list[ArchiveModel])
async def get_all_archives(
project: Project = Depends(validated_project),
@@ -98,7 +104,7 @@ async def get_archive(archive: Archive = Depends(validated_archive)) -> ArchiveM
status_code=HTTPStatus.NO_CONTENT,
)
async def update_archive(
- archive_request: ArchiveRequest,
+ archive_request: ArchiveConfigRequest,
archive: Archive = Depends(validated_archive),
session: Session = Depends(gen_session),
):
@@ -158,6 +164,48 @@ def validate_illustration_image(upload_file: UploadFile):
upload_file.file.seek(0)
+def validate_main_logo_image(upload_file: UploadFile):
+ """
+ Validates the main_logo image to ensure it meets the requirements.
+
+ Args:
+ upload_file (UploadFile): The uploaded illustration image.
+
+ Raises:
+ HTTPException: If the illustration is invalid,
+ the illustration is empty,
+ illustration is not a png image.
+ """
+ filename = upload_file.filename
+
+ if not filename:
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST, detail="Filename is invalid."
+ ) # pragma: no cover
+
+ size = calculate_file_size(upload_file.file)
+
+ if size == 0:
+ raise HTTPException(status_code=HTTPStatus.BAD_REQUEST, detail="Empty file.")
+
+ # using same quota as illustration
+ if size > constants.illustration_quota:
+ raise HTTPException(
+ status_code=HTTPStatus.REQUEST_ENTITY_TOO_LARGE,
+ detail="Illustration is too large.",
+ )
+
+ mimetype = filesystem.get_content_mimetype(upload_file.file.read(2048))
+
+ if "image/" not in mimetype:
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST,
+ detail="Illustration is not a valid image.",
+ )
+
+ upload_file.file.seek(0)
+
+
@router.post(
"/{project_id}/archives/{archive_id}/illustration",
status_code=HTTPStatus.CREATED,
@@ -197,6 +245,37 @@ async def upload_illustration(
session.execute(stmt)
+@router.post(
+ "/{project_id}/archives/{archive_id}/main_logo",
+ status_code=HTTPStatus.CREATED,
+)
+async def upload_main_logo(
+ uploaded_logo: UploadFile,
+ archive: Archive = Depends(validated_archive),
+ session: Session = Depends(gen_session),
+):
+ """Upload an illustration of a archive."""
+ validate_main_logo_image(uploaded_logo)
+
+ src = io.BytesIO()
+ for chunk in read_file_in_chunks(uploaded_logo.file):
+ src.write(chunk)
+ dst = io.BytesIO()
+ try:
+ zimscraperlib.image.convert_image(
+ src, dst, fmt="PNG" # pyright: ignore [reportGeneralTypeIssues]
+ )
+ except Exception as exc:
+ raise HTTPException(
+ status_code=HTTPStatus.BAD_REQUEST,
+ detail="Illustration cannot be converted to PNG",
+ ) from exc
+
+ archive.config.main_logo = base64.b64encode(dst.getvalue()).decode("utf-8")
+ stmt = update(Archive).filter_by(id=archive.id).values(config=archive.config)
+ session.execute(stmt)
+
+
def gen_collection_for(project: Project) -> tuple[list[dict[str, Any]], BinaryIO, str]:
collection = []
# project = get_project_by_id(project_id)
@@ -210,7 +289,7 @@ def gen_collection_for(project: Project) -> tuple[list[dict[str, Any]], BinaryIO
entry["authors"] = ", ".join(file.authors)
entry["files"] = [
{
- "uri": f"{constants.download_url}/{s3_file_key(project.id, file.hash)}",
+ "url": f"{constants.download_url}/{s3_file_key(project.id, file.hash)}",
"filename": file.filename,
}
]
@@ -225,22 +304,22 @@ def gen_collection_for(project: Project) -> tuple[list[dict[str, Any]], BinaryIO
return collection, file, digest
-def get_collection_key(project_id: UUID, collection_hash: str) -> str:
- # using .json suffix (for now) so we can debug live URLs in-browser
- return f"{s3_file_key(project_id=project_id, file_hash=collection_hash)}.json"
+def get_file_key(project_id: UUID, file_hash: str, suffix: str) -> str:
+ # suffix useful to debug live URLs in-browser
+ return f"{s3_file_key(project_id=project_id, file_hash=file_hash)}{suffix}"
-def upload_collection_to_s3(project: Project, collection_file: BinaryIO, s3_key: str):
+def upload_file_to_s3(project: Project, file: BinaryIO, s3_key: str):
try:
if s3_storage.storage.has_object(s3_key):
logger.debug(f"Object `{s3_key}` already in S3… weird but OK")
return
- logger.debug(f"Uploading collection to `{s3_key}`")
- s3_storage.storage.upload_fileobj(fileobj=collection_file, key=s3_key)
+ logger.debug(f"Uploading file to `{s3_key}`")
+ s3_storage.storage.upload_fileobj(fileobj=file, key=s3_key)
s3_storage.storage.set_object_autodelete_on(s3_key, project.expire_on)
except Exception as exc:
- logger.error(f"Collection failed to upload to s3 `{s3_key}`: {exc}")
+ logger.error(f"File failed to upload to s3 `{s3_key}`: {exc}")
raise exc
@@ -248,10 +327,15 @@ def upload_collection_to_s3(project: Project, collection_file: BinaryIO, s3_key:
"/{project_id}/archives/{archive_id}/request", status_code=HTTPStatus.CREATED
)
async def request_archive(
+ archive_request: ArchiveRequest,
archive: Archive = Depends(validated_archive),
project: Project = Depends(validated_project),
session: Session = Depends(gen_session),
):
+ # update archive email
+ stmt = update(Archive).filter_by(id=archive.id).values(email=archive_request.email)
+ session.execute(stmt)
+
if archive.status != ArchiveStatus.PENDING:
raise HTTPException(
status_code=HTTPStatus.BAD_REQUEST,
@@ -271,18 +355,37 @@ async def request_archive(
detail="Project is not ready (no archive or no files)",
)
+ # upload illustration
+ illustration = io.BytesIO(base64.b64decode(archive.config.illustration))
+ illus_key = get_file_key(
+ project_id=archive.project_id,
+ file_hash=generate_file_hash(illustration),
+ suffix=".png",
+ )
+ illustration.seek(0)
+ # upload it to S3
+ upload_file_to_s3(project=project, file=illustration, s3_key=illus_key)
+
+ # upload main-logo
+ if archive.config.main_logo:
+ main_logo = io.BytesIO(base64.b64decode(archive.config.main_logo))
+ main_logo_key = get_file_key(
+ project_id=archive.project_id,
+ file_hash=generate_file_hash(main_logo),
+ suffix=".png",
+ )
+ main_logo.seek(0)
+ # upload it to S3
+ upload_file_to_s3(project=project, file=main_logo, s3_key=main_logo_key)
+
# gen collection and stream
collection, collection_file, collection_hash = gen_collection_for(project=project)
- collection_key = get_collection_key(
- project_id=archive.project_id, collection_hash=collection_hash
+ collection_key = get_file_key(
+ project_id=archive.project_id, file_hash=collection_hash, suffix=".json"
)
# upload it to S3
- upload_collection_to_s3(
- project=project,
- collection_file=collection_file,
- s3_key=collection_key,
- )
+ upload_file_to_s3(project=project, file=collection_file, s3_key=collection_key)
# Everything's on S3, prepare and submit a ZF request
request_def = RequestSchema(
@@ -295,18 +398,30 @@ async def request_archive(
creator=archive.config.creator,
publisher=archive.config.publisher,
tags=archive.config.tags,
- main_logo_url=None,
- illustration_url=f"{constants.download_url}/{collection_key}",
+ main_logo_url=(
+ f"{constants.download_url}/{main_logo_key}"
+ if archive.config.main_logo
+ else ""
+ ),
+ illustration_url=f"{constants.download_url}/{illus_key}",
)
task_id = request_task(
- archive_id=archive.id, request_def=request_def, email=archive.email
+ project_id=project.id,
+ archive_id=archive.id,
+ request_def=request_def,
+ email=archive.email,
)
+ # temporarily recording Archive filesize as the sum of its content
+ # actual ZIM size will be updated upon completion
+ archive_files_size = sum([file.filesize for file in project.files])
+
# request new statis in DB (requested with the ZF ID)
stmt = (
update(Archive)
.filter_by(id=archive.id)
.values(
+ filesize=archive_files_size,
requested_on=datetime.datetime.now(tz=datetime.UTC),
collection_json_path=collection_key,
status=ArchiveStatus.REQUESTED,
@@ -319,7 +434,7 @@ async def request_archive(
@router.post("/{project_id}/archives/{archive_id}/hook", status_code=HTTPStatus.CREATED)
async def record_task_feedback(
payload: WebhookPayload,
- archive: Archive = Depends(validated_archive),
+ archive: Archive = Depends(userless_validated_archive),
session: Session = Depends(gen_session),
token: str = "",
target: str = "",
@@ -342,12 +457,14 @@ async def record_task_feedback(
stmt: ExecutableStatement | None = None
if payload.status == "succeeded":
try:
+ if not payload.files:
+ raise OSError("No files in payload")
# should we check for file["status"] == "uploaded"?
file: dict = next(iter(payload.files.values()))
filesize = file["size"]
- completed_on = datetime.datetime.fromisoformat(file["uploaded_timestamp"])
+ completed_on = dateutil.parser.parse(file["uploaded_timestamp"])
download_url = (
- f"{constants.download_url}/"
+ f"{constants.zim_download_url}"
f"{payload.config['warehouse_path']}/"
f"{file['name']}"
)
@@ -366,6 +483,7 @@ async def record_task_feedback(
status=status,
)
)
+
if payload.status in ("failed", "canceled"):
stmt = (
update(Archive).filter_by(id=archive.id).values(status=ArchiveStatus.FAILED)
@@ -373,7 +491,6 @@ async def record_task_feedback(
if stmt is not None:
try:
session.execute(stmt)
- session.commit()
except Exception as exc:
logger.error(
"Failed to update Archive with FAILED status {archive.id}: {exc!s}"
@@ -387,6 +504,6 @@ async def record_task_feedback(
context = get_context(task=payload.model_dump(), archive=archive)
subject = jinja_env.get_template("email_subject.txt").render(**context)
body = jinja_env.get_template("email_body.html").render(**context)
- send_email_via_mailgun(target, subject, body)
+ send_email_via_mailgun([target], subject, body)
return {"status": "success"}
diff --git a/backend/api/routes/projects.py b/backend/api/routes/projects.py
index a91bb9e4..2eb306c6 100644
--- a/backend/api/routes/projects.py
+++ b/backend/api/routes/projects.py
@@ -8,9 +8,8 @@
from sqlalchemy.orm import Session
from api.database import gen_session
-from api.database.models import Archive, ArchiveConfig, Project, User
+from api.database.models import Archive, ArchiveConfig, ArchiveStatus, Project, User
from api.routes import validated_project, validated_user
-from api.routes.archives import ArchiveStatus
router = APIRouter(prefix="/projects")
@@ -46,7 +45,7 @@ async def create_project(
new_archive = Archive(
created_on=now,
status=ArchiveStatus.PENDING,
- config=ArchiveConfig.init_with(filename="-"),
+ config=ArchiveConfig.init_with(filename="nautilus.zim"),
filesize=None,
requested_on=None,
completed_on=None,
diff --git a/backend/api/templates/email_body.html b/backend/api/templates/email_body.html
index 7621ed70..afe25ee2 100644
--- a/backend/api/templates/email_body.html
+++ b/backend/api/templates/email_body.html
@@ -9,10 +9,9 @@
Zim requested!
{% if task.status == "succeeded" %}
Zim is ready!
Your Zim request of a Nautilus ZIM for “{{ task.config.flags.title }}” has completed.
-Here it is:
-{% if task.files %}
-{% for file in task.files.values() %}- {{ file.name }} ({{ file.size|format_size }})
{% endfor %}
-
{% endif %}
+ {% if archive.download_url %}
+ Here it is:
{{ file.name }} ({{ file.size|format_size }})
+ {% endif %}
{% endif %}
{% if task.status in ("failed", "canceled") %}Your ZIM request of a Nautilus ZIM for “{{ task.config.flags.title }}” has failed!
diff --git a/backend/api/zimfarm.py b/backend/api/zimfarm.py
index a03660d3..3993647f 100644
--- a/backend/api/zimfarm.py
+++ b/backend/api/zimfarm.py
@@ -32,7 +32,7 @@ class RequestSchema:
creator: str
publisher: str
tags: list[str]
- main_logo_url: str | None
+ main_logo_url: str
illustration_url: str
@@ -41,21 +41,10 @@ class WebhookPayload(BaseModel):
_id: str
status: str
- timestamp: dict
- schedule_name: str
- worker_name: str
- updated_at: str
config: dict
+ files: dict[str, dict] | None
original_schedule_name: str
- events: list[dict]
- debug: dict
- requested_by: str
- canceled_by: str
- container: str
- priority: int
- notification: dict
- files: dict[str, dict]
- upload: dict
+ updated_at: str
class TokenData:
@@ -130,7 +119,7 @@ def authenticate(*, force: bool = False):
)
except Exception:
TokenData.ACCESS_TOKEN = TokenData.REFRESH_TOKEN = ""
- TokenData.ACCESS_TOKEN_EXPIRY = datetime.datetime = datetime.datetime(
+ TokenData.ACCESS_TOKEN_EXPIRY = datetime.datetime(
2000, 1, 1, tzinfo=datetime.UTC
)
else:
@@ -213,7 +202,7 @@ def test_connection():
def request_task(
- archive_id: UUID, request_def: RequestSchema, email: str | None
+ project_id: UUID, archive_id: UUID, request_def: RequestSchema, email: str | None
) -> UUID:
ident = uuid4().hex
@@ -221,16 +210,17 @@ def request_task(
"collection": request_def.collection_url,
"name": request_def.name,
"output": "/output",
- "zim_file": f"nautilus_{archive_id}_{ident}.zim",
+ "zim-file": f"nautilus_{archive_id}_{ident}.zim",
"language": request_def.language,
"title": request_def.title,
"description": request_def.description,
"creator": request_def.creator,
"publisher": request_def.publisher,
- "tags": request_def.tags,
- "main_logo": request_def.main_logo_url,
+ "tags": ";".join(request_def.tags),
"favicon": request_def.illustration_url,
}
+ if request_def.main_logo_url:
+ flags.update({"main-logo": request_def.main_logo_url})
config = {
"task_name": "nautilus",
@@ -266,6 +256,7 @@ def request_task(
if email:
url = (
f"{constants.zimfarm_callback_base_url}"
+ f"/projects/{project_id}/archives/{archive_id}/hook"
f"?token={constants.zimfarm_callback_token}&target={email}"
)
payload.update(
@@ -297,6 +288,7 @@ def request_task(
payload={
"schedule_names": [schedule_name],
"worker": constants.zimfarm_task_worker,
+ "priority": "6",
},
)
if not success:
diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py
index b30a9a8d..724604ff 100644
--- a/backend/tests/conftest.py
+++ b/backend/tests/conftest.py
@@ -14,10 +14,16 @@
from starlette.testclient import TestClient
from api.database import Session
-from api.database.models import Archive, ArchiveConfig, File, Project, User
+from api.database.models import (
+ Archive,
+ ArchiveConfig,
+ ArchiveStatus,
+ File,
+ Project,
+ User,
+)
from api.entrypoint import app
from api.files import save_file
-from api.routes.archives import ArchiveStatus
from api.s3 import s3_storage
pytestmark = pytest.mark.asyncio(scope="package")
diff --git a/backend/tests/routes/test_archives.py b/backend/tests/routes/test_archives.py
index 99da268d..984e5c40 100644
--- a/backend/tests/routes/test_archives.py
+++ b/backend/tests/routes/test_archives.py
@@ -224,7 +224,8 @@ def test_upload_illustration_without_wrong_authorization(
async def test_request_archive_not_ready(alogged_in_client, project_id, archive_id):
response = await alogged_in_client.post(
f"{constants.api_version_prefix}/projects/"
- f"{project_id}/archives/{archive_id}/request"
+ f"{project_id}/archives/{archive_id}/request",
+ json={"email": ""},
)
assert response.status_code == HTTPStatus.CONFLICT
@@ -242,6 +243,7 @@ async def test_request_archive_ready(
response = await alogged_in_client.post(
f"{constants.api_version_prefix}/projects/"
- f"{expiring_project_id}/archives/{expiring_archive_id}/request"
+ f"{expiring_project_id}/archives/{expiring_archive_id}/request",
+ json={"email": ""},
)
assert response.status_code == HTTPStatus.CREATED
diff --git a/dev/docker-compose.yml b/dev/docker-compose.yml
index 07cc3c89..1b071e51 100644
--- a/dev/docker-compose.yml
+++ b/dev/docker-compose.yml
@@ -75,47 +75,6 @@ services:
- NAUTILUS_FILE_REFRESH_EVERY_MS=${NAUTILUS_FILE_REFRESH_EVERY_MS}
depends_on:
- backend
- frontend-dev:
- build:
- dockerfile: ../dev/frontend-tools/Dockerfile
- context: ../frontend
- container_name: nautilus-frontend-dev
- volumes:
- - ../frontend/public:/src/public
- - ../frontend/src:/src/src
- ports:
- - 8082:80
- environment:
- - NAUTILUS_WEB_API=http://localhost:8083/v1
- - NAUTILUS_FILE_QUOTA=${NAUTILUS_FILE_QUOTA}
- - NAUTILUS_PROJECT_QUOTA=${NAUTILUS_PROJECT_QUOTA}
- - NAUTILUS_FILE_REFRESH_EVERY_MS=${NAUTILUS_FILE_REFRESH_EVERY_MS}
- depends_on:
- - backend-dev
- backend-dev:
- build:
- dockerfile: ../dev/backend-tools-tests/Dockerfile
- context: ../backend
- volumes:
- - backend_storage:/storage
- - ../backend:/app
- container_name: nautilus-backend-dev
- command: fastapi dev --host 0.0.0.0 --port 80 api/entrypoint.py
- ports:
- - 8083:80
- environment:
- - POSTGRES_URI=${POSTGRES_URI}
- - REDIS_URI=${REDIS_URI}
- - S3_URL_WITH_CREDENTIALS=${S3_URL_WITH_CREDENTIALS}
- - PRIVATE_SALT=secrectkey
- - TRANSIENT_STORAGE_PATH=/storage
- - CHANNEL_NAME=${CHANNEL_NAME}
- - ALLOWED_ORIGINS=http://localhost:8081|http://localhost:8082
- depends_on:
- database:
- condition: service_healthy
- redis:
- condition: service_healthy
backend-tools:
build:
dockerfile: ../dev/backend-tools-tests/Dockerfile
diff --git a/dev/frontend-tools/Dockerfile b/dev/frontend-tools/Dockerfile
index 97eb7764..69e062d6 100644
--- a/dev/frontend-tools/Dockerfile
+++ b/dev/frontend-tools/Dockerfile
@@ -3,13 +3,14 @@ FROM node:20.13.0
ENV NODE_ENV=dev
ENV NAUTILUS_WEB_API http://localhost:8080/v1
-COPY . /src
-WORKDIR /src
+WORKDIR /work
+COPY . /work/
+
RUN yarn \
&& printf "#!/bin/sh\n\
\n\
-JS_PATH=/src/public/environ.json\n\
+JS_PATH=/work/public/environ.json\n\
cp -v \"\${JS_PATH}\" \"\${JS_PATH}.orig\"\n\
echo \"dump NAUTILUS_* environ variables to \$JS_PATH\"\n\
\n\
diff --git a/dev/reload-compose.yaml b/dev/reload-compose.yaml
index 8ebcc8b4..fb93da90 100644
--- a/dev/reload-compose.yaml
+++ b/dev/reload-compose.yaml
@@ -1,10 +1,11 @@
+name: nautwebui-reload
services:
database:
image: postgres:15.3-bullseye
- container_name: nautilus-database
+ container_name: nautilus-reload-db
restart: always
volumes:
- - pg_data_nautilus:/var/lib/postgresql/data
+ - nautilus-postgres:/var/lib/postgresql/data
environment:
- POSTGRES_USER=nautilus
- POSTGRES_PASSWORD=nautilus
@@ -18,10 +19,10 @@ services:
retries: 10
redis:
image: redis:7.2
- container_name: nautilus-redis
+ container_name: nautilus-reload-redis
restart: always
volumes:
- - pg_data_redis:/data
+ - nautilus-reload-redis:/data
ports:
- 6379:6379
healthcheck:
@@ -33,9 +34,9 @@ services:
dockerfile: ../dev/backend-tools-tests/Dockerfile
context: ../backend
volumes:
- - backend_storage:/storage
+ - nautilus-reload-storage:/storage
- ../backend:/app
- container_name: nautilus-backend-worker
+ container_name: nautilus-reload-backend-worker
environment:
- POSTGRES_URI=${POSTGRES_URI}
- REDIS_URI=${REDIS_URI}
@@ -43,18 +44,19 @@ services:
- PRIVATE_SALT=secrectkey
- TRANSIENT_STORAGE_PATH=/storage
- CHANNEL_NAME=${CHANNEL_NAME}
+ - DEBUG=1
entrypoint: ''
command: ["rq-worker"]
depends_on:
- - backend-dev
- frontend-dev:
+ - backend
+ frontend:
build:
dockerfile: ../dev/frontend-tools/Dockerfile
context: ../frontend
- container_name: nautilus-frontend-dev
+ container_name: nautilus-reload-frontend
volumes:
- - ../frontend/public:/src/public
- - ../frontend/src:/src/src
+ - ../frontend/public:/work/public
+ - ../frontend/src:/work/src
ports:
- 8082:80
environment:
@@ -62,16 +64,17 @@ services:
- NAUTILUS_FILE_QUOTA=${NAUTILUS_FILE_QUOTA}
- NAUTILUS_PROJECT_QUOTA=${NAUTILUS_PROJECT_QUOTA}
- NAUTILUS_FILE_REFRESH_EVERY_MS=${NAUTILUS_FILE_REFRESH_EVERY_MS}
+ - DEBUG=1
depends_on:
- - backend-dev
- backend-dev:
+ - backend
+ backend:
build:
dockerfile: ../dev/backend-tools-tests/Dockerfile
context: ../backend
volumes:
- - backend_storage:/storage
+ - nautilus-reload-storage:/storage
- ../backend:/app
- container_name: nautilus-backend-dev
+ container_name: nautilus-reload-backend
command: fastapi dev --host 0.0.0.0 --port 80 api/entrypoint.py
ports:
- 8083:80
@@ -84,12 +87,22 @@ services:
- CHANNEL_NAME=${CHANNEL_NAME}
- ALLOWED_ORIGINS=http://localhost:8081|http://localhost:8082
- RUN_DB_MIGRATIONS=y
+ - ZIMFARM_API_USERNAME=${ZIMFARM_API_USERNAME}
+ - ZIMFARM_API_PASSWORD=${ZIMFARM_API_PASSWORD}
+ - ZIMFARM_TASK_WORKER=${ZIMFARM_TASK_WORKER}
+ - ZIMFARM_CALLBACK_BASE_URL=${ZIMFARM_CALLBACK_BASE_URL}
+ - DOWNLOAD_URL=${DOWNLOAD_URL}
+ - DEBUG=1
+ - ZIMFARM_CALLBACK_TOKEN=${ZIMFARM_CALLBACK_TOKEN}
+ - MAILGUN_API_KEY=${MAILGUN_API_KEY}
+ - MAILGUN_API_URL=${MAILGUN_API_URL}
+ - MAILGUN_FROM=${MAILGUN_FROM}
depends_on:
database:
condition: service_healthy
redis:
condition: service_healthy
volumes:
- pg_data_nautilus:
- pg_data_redis:
- backend_storage:
+ nautilus-postgres:
+ nautilus-reload-redis:
+ nautilus-reload-storage:
diff --git a/frontend/package.json b/frontend/package.json
index 50a10afe..0d198733 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -22,8 +22,10 @@
"@popperjs/core": "^2.11.8",
"axios": "^1.4.0",
"bootstrap": "^5.3.1",
+ "luxon": "^3.5.0",
"pinia": "^2.0.36",
"pinia-plugin-persistedstate": "^3.2.0",
+ "smart-tagz": "^0.4.1",
"uuid": "^9.0.0",
"vue": "^3.3.2",
"vue-router": "^4.2.0"
diff --git a/frontend/src/assets/main.css b/frontend/src/assets/main.css
index a14f53a7..e61792e6 100644
--- a/frontend/src/assets/main.css
+++ b/frontend/src/assets/main.css
@@ -1,3 +1,20 @@
:root {
--main-color: #ff8733;
}
+
+.custom-btn-outline-primary {
+ .btn-outline-primary {
+ --bs-btn-color: var(--main-color);
+ --bs-btn-border-color: var(--main-color);
+ --bs-btn-hover-bg: var(--main-color);
+ --bs-btn-hover-border-color: var(--main-color);
+ --bs-btn-active-bg: var(--main-color);
+ --bs-btn-active-border-color: var(--main-color);
+ --bs-btn-disabled-color: var(--main-color);
+ --bs-btn-disabled-border-color: var(--main-color);
+ }
+}
+
+.active {
+ background-color: orange;
+}
diff --git a/frontend/src/commons.ts b/frontend/src/commons.ts
new file mode 100644
index 00000000..c7cc99b5
--- /dev/null
+++ b/frontend/src/commons.ts
@@ -0,0 +1,22 @@
+import { useProjectStore } from '@/stores/stores'
+import { ArchiveStatus } from '@/constants'
+
+export function getpreviousArchives(): Array {
+ const storeProject = useProjectStore()
+ return storeProject.lastProjectArchives.filter((item) => item.status != ArchiveStatus.PENDING)
+}
+
+export function getLastPreviousArchive(): Archive {
+ const previousArchives = getpreviousArchives()
+ return previousArchives[previousArchives.length - 1]
+}
+
+export function getAdditionalPreviousArchives(): Array {
+ const previousArchives = getpreviousArchives()
+ const lastPreviousArchive = getLastPreviousArchive()
+ return previousArchives.filter((item) => item.id != lastPreviousArchive.id)
+}
+
+export function hasAdditionalPrevious(): boolean {
+ return getAdditionalPreviousArchives().length > 0
+}
diff --git a/frontend/src/components/ArchivesList.vue b/frontend/src/components/ArchivesList.vue
new file mode 100644
index 00000000..f30e4f20
--- /dev/null
+++ b/frontend/src/components/ArchivesList.vue
@@ -0,0 +1,54 @@
+
+
+
+
+
diff --git a/frontend/src/components/ImageUpload.vue b/frontend/src/components/ImageUpload.vue
new file mode 100644
index 00000000..964a1bc0
--- /dev/null
+++ b/frontend/src/components/ImageUpload.vue
@@ -0,0 +1,142 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/src/components/LatestArchive.vue b/frontend/src/components/LatestArchive.vue
new file mode 100644
index 00000000..354ca7e4
--- /dev/null
+++ b/frontend/src/components/LatestArchive.vue
@@ -0,0 +1,40 @@
+
+
+
+
+ [Download {{ props.archive.config.filename }} ({{ archiveFileSize }} of files]
+
+ requested on {{ formattedDate(archive.requested_on) }}
+
+
+
+
+
+
+
diff --git a/frontend/src/components/ZIMMetadataComponent.vue b/frontend/src/components/ZIMMetadataComponent.vue
new file mode 100644
index 00000000..81cd7fda
--- /dev/null
+++ b/frontend/src/components/ZIMMetadataComponent.vue
@@ -0,0 +1,335 @@
+
+
+
+
+
diff --git a/frontend/src/constants.ts b/frontend/src/constants.ts
index e54a3882..8a97b5da 100644
--- a/frontend/src/constants.ts
+++ b/frontend/src/constants.ts
@@ -15,6 +15,32 @@ export interface Project {
expire_on?: string
}
+export interface Archive {
+ id: string
+ project_id: string
+ status: string
+ email?: string
+ filesize?: int
+ created_on: string
+ requested_on?: string
+ completed_on?: string
+ download_url?: string
+ config: ArchiveConfig
+}
+
+export interface ArchiveConfig {
+ title: string
+ description: string
+ name: string
+ publisher: string
+ creator: string
+ languages: string
+ tags: string[]
+ illustration: string
+ filename: string
+ main_logo: string
+}
+
export interface User {
id: string
created_on: string
@@ -161,3 +187,26 @@ export interface MetadataEditorFormType {
authors: string[]
filename: string
}
+
+export type ArchiveMetadataFormType = {
+ title: string
+ description: string
+ name: string
+ creator: string
+ publisher: string
+ language: string
+ filename: string
+ tags: string[]
+}
+
+export enum ArchiveStatus {
+ PENDING = 'PENDING',
+ REQUESTED = 'REQUESTED',
+ READY = 'READY',
+ FAILED = 'FAILED'
+}
+
+export const DEFAULT_MAIN_LOGO: string =
+ 'iVBORw0KGgoAAAANSUhEUgAAASwAAABBCAMAAABcvml3AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAwBQTFRFR3BM/////////////////////////////5EA/////////////////////18d/////5IA/3kL/////5IA/5EE/48A/2QY/3IN/5QK/5IB/5ML/bZQ/5EA/5MA/2Mb/5EA/5IA/64A/5MA/5EA/7JG/5MA/2Qa/5AX/2Qa/5IA/5MF/5YM/5wR/2UZ/5EA/8Qo/5sP/5gO/2Ma/5EA/5IA/5wQ/7ZM/7dO/7I0/7ZN/2MZ/7RO/3sP/2Ac/14W/2YQ/5sc/5wK/2Qb/5cK/5IA/5cQ/5IF/6Y7/7xV/7ZM/7ZL+7dS/2Qa/5YL/7ZM/4gM/7lJ/7BU/7hK/4sA/71E/7dM/5UA/2Eb/1kX/1sZ/2sb/5AA/7UQ/50D/50Z/5AA/5cN+qg2/5EG/2Qa/7ZM/34A/6Qj/7ZU/7RO/7VM/5cN/5IQ/24X/48b/8JA/7ZL/7dM4d7O/34A/4cA8tWZ/5IA/4cI2P//2P7/4P//8phy/3gA/0oA/34A/////5EA/2Qa5v///7ZM/5AA/40A/4oC/2Ma/44A/2UZ/5sA/48A/4wA2fL//5IA/75b/4sF/18c/4sA/71a/7dM/7xY/1sf4f///7tW/7hQ/5gA/4gB0////10e/5oA/5MB/4kA/7lT/7VK/7NC/50D/14d/1wf/2Ea/2cc/7pS/5UA/28Q/6kw1/D/8///2fH//6Yr/6w2/30L5P///2Ub/5UJ1/r//54b/6Af/8Bf/7RH/1IB/3wA/6Ml/5oR4vn//5gN/1og/4UG1f///0wA/5IJ/38E/5wX/7pV5Pz/6f///4UA/7BA/3sP/7A8/4kF/3YL1/P//0gA/646/5kR/2oX/4IJ9b5n/1sL3Ov24/38+X1C6u7W6d2r9NCQ3vb8b4OM4d7J/40F0enu5ubG97JF9blp+IFI6/Le9Y9e+cl45se46byn87xn/79E+3Qy/1Ygm7G2kai3/8VoyeDoy+HjZHeEIDE7YnR+MUFN/2IB8o9o94JN/08a5vTl/5EG/0MAjBDboQAAAH50Uk5TALtEdzPuzBGBZt1ViKrhmZoTIkVMWkIWIvICT1KTDobLBBj3J+fcDXjEjbSUzikVXqzu+/mAxL0+WDt7B9eOUmS2oDf+pM8IrPLo5MPb4LJdIt3FOJLs5N4ts8JQ0z+wnvff95dl7W/VbHh0L4RGS5LCcZkRluJI057e7+JGWqqUagAABz9JREFUeNrtmgdw01YcxuWNMwhNKRvKpoWyN6XQlkJbdseV7r0n3VtSNCxbih3PeCSxY5MdMklIQhKgZRM2oS27ZXTvvdsnS7aF44TekeNM73138dOznuXT777/eHIQBAoKCgoKCgoK6nzrpr7Lpyy+8cYFUx6ePhTS6FB9pxJArtxclw2MU6ZfD5G0p6WPE0SR1bqFaGjgx1yCmP8EpBJTY28gCKepobRpc43XWNPcVNpgchLEVBiMMTRtvt9lCtRttNAWimEoC01jq1tMRYS/L2QTrecIv7OoifEwWFiMh6pz2QliKaQTldkJwuqv8TBGTCIj69sYMBG26ZCPVM8ShCnA+rA2oqmdJv+WmyChiIZ+RZhaWIuR5TiOIs8wl4UKfL9vUR/IKKyb/fYAw5E+cl3z/hqKPoMWvSJgtDwDGYV0pW2Ln+Q4pimQC9rRfZtpaeoybCopJrERkJKoxYR1s48jW+qddpMJ/FVY2AirjLxv3nFTkyElQbcSWwIsRwZMLntLRanf7qqvCEUiaaDzzPklbmxip258lPILlVWfK23WCpquq3fZ6iia/rvU7rKvs4isMszZep3ZU8xN68yvlKEXrLOWE/bVPkupyb56O0NznKfUbq3kxBgErHDcUZVDXdEZ3yRHZf8nWHX7Gc5rc9q8TIQVDuKQnQdhCcWQEMPQX1Fpcq7juEqrax0FWNECK9xcyHof7Iyg08iVFzqsW225IMGzLfW5Vqe9fj9n4WFxfB0UWOG6gl3FvTszQ13AsEBPal3to8hKp8lKbPZ4MMJpq6GAr/IEVrjO/I4bwhL1mK3IBnaBluammsZGytO807aPyhHzFYQVralFzoaNHobevfHLI4cPnzyau734o7Cv+DD0SGApZVpEo5ChqDpReCM1CUzQZHnktIgkCbxoZTIVKuMlOSfCiloKrpSiApdNUsY1rGuKnE5bBdl4+mBmZnV1ZuZfp0ryIqzwAj1jHCYpaYpEFaqSqVE0UZiDiUwmzvjT4sJg6TsDVuicCCtqKaJUoWqZLAFNiOeu9BZfs82Va6o4mlmdFlR15u97I6zw7JJiyz0SWODu5cED4a5Sgk5QirNoAtKgOwusZDToTmVi/DHq1aWXcDAQw7ZffYO18rMQq21ba7O+XrMqDMuxI4eaK4Ul3mMymiq5njg7B1ihD8Sf5gyheg8C46DLQft5CYJMe/STTOCptLKytK3lK7M+31AQTll5YCd9NxJ9xwiiQKV7PHF2LrDUccpqzHUYM2yQ4CueFfLIA7yvqsuObStbuTL93Q8K3w9HYWsxSY49D7CSUW1cshoxyYjdxoeh6CukS+N7wFhpZT/+cvyHtenptbUfrwnBWl9loMYj5wEWKBXJ2jgshd2M2DCe1V2AVT8wXoptCsJCv/315+/K0tP3vPvpBgGV3tHqNpK9OoAlTwYFPwFUsXOFhcgTQA+SoIg3WEOwiWOkvsKMjQuCzjrx0/ETQWcdEpylN+dlGCxLkPZhgYSfpFAkdQYsUAkVySiaEmewMGxiHyFfdQ+ywrAXXgKwth1cuzZt68p0kLMKgjlLX7D+gMFXPLYDWMmospPCUOxx1WicheI1Fmxel4FhX2HYiwjyevU2UAfXHitPTy/P+mKDLsjKUZWD+cYhHcAK3WhnwUIS0TgLxDHiD158vhqJYeTLYOyblgnqIK/yrNpCvs/Sm0Fy90Y9+GsHlkYttpShKFKcFVbbpVFfEC/qPoRljUP6Cb4iu4Hx7TdPZ+0p51Htycr6Y0Mh/9Qv+4DBKK2EsWCpVRr+xhNC+x2VMAf7H5FAUrhoRmBpYi0VO/cUNO5a+MHjulwyOFgHBVb9DJ5/3nitNovXyUN7datws6N1l8HI3I90DEsLdtTaFPCCqhUCmwSFQo2qEkOwUlG1Vq5QSz6qAC1C26WpaEKKQgG26PHam4Z9dbEhI/utEcirz3/44ZFTv+3FCxwOfZXbTXFXIGeBBW5ZqPdaFEVCc5VCE0lEWhX/VCI18lEQsrK2SzWAG68UTZyyAr7C+vNRaaAdXS8CB7d4yE1r/szPx0uqjDlebtLV/+Uqco341PjMp8cRtfntS6mMvVQuj99fySS+cnTtiSA9h1Gst6p19+5NH7lzvAw7Cv4rW1h8HewfZEWvvwr4asArbvCO0Q1UzDLkqO4QUWxf8ayu2rHCyzAsy5IsuWjyvZBQW1bdxRgcMFxnLnlq8txR40c+OW4O5NOOr/KX8b4ajuMFMyCXdupgMF/1M9D5fAz2BKx0PSCXGOoj8dV6ntXMZbgOsmqnf8dIVuivdjn4Oogs1OF6GIOxNfO+Sf1DdXAAOHhah0/gfTXrzjsgnBjekvSiCDJbh88Cw7WFrbdDNLGfPKzICOZ2oB6r8NGAFcjxl0EuMXUbYNVTOBzwkF43fLQOxyGrdrQkW/QVby2dHtfhqyCrdrP8jJmRSY+FEyaMng2hQEFBQUFBQUGdg/4FoRd4DGxXi7IAAAAASUVORK5CYII='
+export const DEFAULT_ILLUSTRATION: string =
+ 'iVBORw0KGgoAAAANSUhEUgAAADAAAAAwAQMAAABtzGvEAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAANQTFRFR3BMgvrS0gAAAAF0Uk5TAEDm2GYAAAANSURBVBjTY2AYBdQEAAFQAAGn4toWAAAAAElFTkSuQmCC'
diff --git a/frontend/src/main.ts b/frontend/src/main.ts
index 916a8432..1036df9c 100644
--- a/frontend/src/main.ts
+++ b/frontend/src/main.ts
@@ -16,6 +16,7 @@ import {
faAngleDown,
faCheck,
faSort,
+ faUpload,
faFile,
faFileArrowDown,
faFileExcel
@@ -32,6 +33,7 @@ library.add(
faAngleDown,
faCheck,
faSort,
+ faUpload,
faFile,
faFileArrowDown,
farCircleXmark,
diff --git a/frontend/src/router/index.ts b/frontend/src/router/index.ts
index 46302b84..c04420c6 100644
--- a/frontend/src/router/index.ts
+++ b/frontend/src/router/index.ts
@@ -1,5 +1,5 @@
-import StartView from '@/views/StartView.vue'
import CollectionsView from '@/views/CollectionsView.vue'
+import StartView from '@/views/StartView.vue'
import { createRouter, createWebHistory } from 'vue-router'
import NotFoundView from '@/views/NotFoundView.vue'
import StaticStartView from '@/views/StaticStartView.vue'
diff --git a/frontend/src/stores/stores.ts b/frontend/src/stores/stores.ts
index 1a0f1d52..0616efa1 100644
--- a/frontend/src/stores/stores.ts
+++ b/frontend/src/stores/stores.ts
@@ -6,7 +6,8 @@ import {
type AlertMessage,
type Environ,
AlertType,
- type Project
+ type Project,
+ type Archive
} from '@/constants'
import { v4 as uuid } from 'uuid'
import axios from 'axios'
@@ -16,20 +17,52 @@ export const useProjectStore = defineStore(
() => {
const lastProjectId: Ref = ref(null)
const projects: Ref = ref([])
+ const lastProject: Ref = ref(null)
+ const lastProjectArchives: Ref = ref([])
+ const lastProjectPendingArchive: Ref = ref(null)
function setProjects(newIds: Project[]) {
projects.value = newIds
+ if (lastProjectId.value) {
+ lastProject.value =
+ projects.value.filter((project) => project.id == lastProjectId.value).at(0) || null
+ }
}
function setLastProjectId(newId: string) {
lastProjectId.value = newId
+ lastProject.value =
+ projects.value.filter((project) => project.id == lastProjectId.value).at(0) || null
}
function clearLastProjectId() {
lastProjectId.value = null
+ lastProject.value = null
}
- return { projects, lastProjectId, setLastProjectId, clearLastProjectId, setProjects }
+ function setLastProjectArchives(archives: Archive[]) {
+ lastProjectArchives.value = archives
+ lastProjectPendingArchive.value =
+ lastProjectArchives.value.filter((ark) => ark.status == 'PENDING').at(0) || null
+ }
+
+ function clearLastProjectArchives() {
+ lastProjectArchives.value = []
+ lastProjectPendingArchive.value = null
+ }
+
+ return {
+ projects,
+ lastProjectId,
+ lastProject,
+ lastProjectArchives,
+ lastProjectPendingArchive,
+ setLastProjectId,
+ clearLastProjectId,
+ setProjects,
+ setLastProjectArchives,
+ clearLastProjectArchives
+ }
},
{
persist: true
diff --git a/frontend/src/utils.ts b/frontend/src/utils.ts
index 8752cc0a..57d3198f 100644
--- a/frontend/src/utils.ts
+++ b/frontend/src/utils.ts
@@ -1,5 +1,5 @@
import { useAppStore, useProjectStore } from './stores/stores'
-import type { Project } from '@/constants'
+import type { Archive, Project } from '@/constants'
import router from '@/router'
/** Checks if a given project ID is valid */
@@ -61,3 +61,19 @@ export async function updateProjects() {
router.replace({ path: '/' })
}
}
+
+export async function refreshArchives() {
+ console.log('refreshArchives!!!!!!!!!!!')
+ const storeProject = useProjectStore()
+ const storeApp = useAppStore()
+ try {
+ const response = await storeApp.axiosInstance.get(
+ `/projects/${storeProject.lastProjectId}/archives`
+ )
+ console.debug(response.data)
+ storeProject.setLastProjectArchives(response.data)
+ } catch (error: unknown) {
+ console.log('Unable to retrieve archives info', error)
+ storeApp.alertsError('Unable to retrieve archives info')
+ }
+}
diff --git a/frontend/src/views/ProjectView.vue b/frontend/src/views/ProjectView.vue
index edd75e26..60d68926 100644
--- a/frontend/src/views/ProjectView.vue
+++ b/frontend/src/views/ProjectView.vue
@@ -1,4 +1,6 @@
+
+
+
+
+