Skip to content

Commit

Permalink
wip 2
Browse files Browse the repository at this point in the history
  • Loading branch information
edelclaux committed Jan 2, 2025
1 parent 088baf9 commit bb72f06
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 38 deletions.
9 changes: 4 additions & 5 deletions backend/geonature/core/imports/checks/dataframe/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,14 +262,13 @@ def check_datasets(
authorized_datasets = {
str(ds.unique_dataset_id): ds
for ds in db.session.execute(
# sa.select(TDatasets)
TDatasets.filter_by_creatable(
user=imprt.authors[0], module_code=module_code, object_code=object_code
)
.where(TDatasets.unique_dataset_id.in_(uuid))
.options(sa.orm.raiseload("*"))
).scalars()
# .unique()
# .where(TDatasets.unique_dataset_id.in_(uuid))
# .options(sa.orm.raiseload("*"))
)
.scalars()
.all()
}
authorized_ds_mask = valid_ds_mask & df[uuid_col].isin(authorized_datasets.keys())
Expand Down
84 changes: 51 additions & 33 deletions backend/geonature/tests/imports/test_imports_synthese.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,41 +137,59 @@ def import_dataset(datasets, import_file_name):
ds.nomenclature_data_origin = previous_data_origin


@pytest.fixture()
def new_import(synthese_destination, users):
with db.session.begin_nested():
imprt = TImports(
destination=synthese_destination,
authors=[users["user"]],
)
db.session.add(imprt)
return imprt
# @pytest.fixture()
# def new_import(synthese_destination, users):
# # admin_user = User.query.filter(User.identifiant == "admin").one()
# with db.session.begin_nested():
# imprt = TImports(
# destination=synthese_destination,
# authors=[users["user"]],
# )
# db.session.add(imprt)
# return imprt


# @pytest.fixture()
# def uploaded_import(new_import, datasets, import_file_name):
# with db.session.begin_nested():
# with open(tests_path / "files" / "synthese" / import_file_name, "rb") as f:
# f.seek(0)
# content = f.read()
# if import_file_name == "jdd_to_import_file.csv":
# content = content.replace(
# b"VALID_DATASET_UUID",
# datasets["own_dataset"].unique_dataset_id.hex.encode("ascii"),
# )
# content = content.replace(
# b"FORBIDDEN_DATASET_UUID",
# datasets["orphan_dataset"].unique_dataset_id.hex.encode("ascii"),
# )
# content = content.replace(
# b"PRIVATE_DATASET_UUID",
# datasets["private"].unique_dataset_id.hex.encode("ascii"),
# )
# new_import.full_file_name = "jdd_to_import_file.csv"
# else:
# new_import.full_file_name = "valid_file.csv"
# new_import.source_file = content
# return new_import


@pytest.fixture()
def uploaded_import(new_import, datasets, import_file_name):
with db.session.begin_nested():
with open(tests_path / "files" / "synthese" / import_file_name, "rb") as f:
f.seek(0)
content = f.read()
if import_file_name == "jdd_to_import_file.csv":
content = content.replace(
b"VALID_DATASET_UUID",
datasets["own_dataset"].unique_dataset_id.hex.encode("ascii"),
)
content = content.replace(
b"FORBIDDEN_DATASET_UUID",
datasets["orphan_dataset"].unique_dataset_id.hex.encode("ascii"),
)
content = content.replace(
b"PRIVATE_DATASET_UUID",
datasets["private"].unique_dataset_id.hex.encode("ascii"),
)
new_import.full_file_name = "jdd_to_import_file.csv"
else:
new_import.full_file_name = "valid_file.csv"
new_import.source_file = content
return new_import
def uploaded_import(client, users):
set_logged_user(client, users["user"])

filename = "valid_file.csv"
file = (open(tests_path / "files" / "synthese" / filename, "rb"), filename)

r = client.post(
url_for("import.upload_file", destination="synthese"),
data={"file": file},
)
assert r.status_code == 200, r.data
unset_logged_user(client)
# db.session.refresh(uploaded_import)
# return uploaded_import


@pytest.fixture()
Expand Down Expand Up @@ -489,7 +507,7 @@ def get(import_name):
assert r.status_code == 200, r.data
assert r.json["id_import"] == imports["own_import"].id_import

def test_delete_import(self, users, imported_import):
def test_delete_import(self, g_permissions, users, imported_import):
imprt = imported_import
transient_table = imprt.destination.get_transient_table()
r = self.client.delete(url_for("import.delete_import", import_id=imprt.id_import))
Expand Down
4 changes: 4 additions & 0 deletions backend/geonature/tests/test_gn_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,6 +598,10 @@ def test_datasets_permissions(self, app, datasets, users):
)
assert set(sc(dsc.filter_by_scope(2, query=qs)).unique().all()) == set(
[
# The code is attempting to access a dataset named "own_dataset" from a dictionary or list
# named "datasets" in Python. However, the code snippet provided is incomplete and lacks
# context, so it is difficult to determine the exact functionality or purpose of this code
# without additional information.
datasets["own_dataset"],
datasets["own_dataset_not_activated"],
datasets["associate_dataset"],
Expand Down

0 comments on commit bb72f06

Please sign in to comment.