Skip to content

Commit

Permalink
fix: Problem downloading all datasets
Browse files Browse the repository at this point in the history
A bug meant that no temporary zip folder was deleted. This caused the server to crash in production.
  • Loading branch information
drorganvidez committed Nov 29, 2024
1 parent 01ebc4b commit 4cffd09
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 13 deletions.
26 changes: 18 additions & 8 deletions app/modules/dataset/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import json
import shutil
import tempfile

from flask import (
abort,
Expand Down Expand Up @@ -200,15 +201,24 @@ def download_dataset(dataset_id):

@dataset_bp.route("/dataset/download/all", methods=["GET"])
def download_all_dataset():
zip_path = dataset_service.zip_all_datasets()
# Crear un directorio temporal
temp_dir = tempfile.mkdtemp()
zip_path = os.path.join(temp_dir, "all_datasets.zip")

# Obtener la fecha actual en el formato deseado (por ejemplo, YYYYMMDD)
current_date = datetime.now().strftime("%Y_%m_%d")

# Crear el nombre del archivo con la fecha
zip_filename = f"uvlhub_bulk_{current_date}.zip"

return send_file(zip_path, as_attachment=True, download_name=zip_filename)
try:
# Generar el archivo ZIP
dataset_service.zip_all_datasets(zip_path)

# Crear el nombre del archivo con la fecha
current_date = datetime.now().strftime("%Y_%m_%d")
zip_filename = f"uvlhub_bulk_{current_date}.zip"

# Enviar el archivo como respuesta
return send_file(zip_path, as_attachment=True, download_name=zip_filename)
finally:
# Asegurar que la carpeta temporal se elimine después de que Flask sirva el archivo
if os.path.exists(temp_dir):
shutil.rmtree(temp_dir)


@dataset_bp.route("/doi/<path:doi>/", methods=["GET"])
Expand Down
6 changes: 1 addition & 5 deletions app/modules/dataset/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,10 +302,7 @@ def zip_dataset(self, dataset: DataSet) -> str:

return temp_dir

def zip_all_datasets(self) -> str:
temp_dir = tempfile.mkdtemp()
zip_path = os.path.join(temp_dir, "all_datasets.zip")

def zip_all_datasets(self, zip_path: str):
with ZipFile(zip_path, "w") as zipf:
for user_dir in os.listdir("uploads"):
user_path = os.path.join("uploads", user_dir)
Expand All @@ -327,7 +324,6 @@ def zip_all_datasets(self) -> str:
full_path,
arcname=os.path.join(dataset_dir, relative_path),
)
return zip_path


class AuthorService(BaseService):
Expand Down

0 comments on commit 4cffd09

Please sign in to comment.