Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
edelclaux committed Jan 2, 2025
1 parent 4d6a468 commit 088baf9
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 25 deletions.
28 changes: 14 additions & 14 deletions backend/geonature/core/gn_meta/models/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from geonature.core.gn_permissions.tools import get_scopes_by_action
from geonature.core.gn_commons.models import cor_field_dataset, cor_module_dataset

from ref_geo.models import LAreas
from .commons import *


Expand Down Expand Up @@ -202,21 +201,21 @@ def filter_by_scope(cls, scope, *, query, user=None):
elif scope in (1, 2):
ors = [
cls.id_digitizer == user.id_role,
cls.cor_dataset_actor.any(id_role=user.id_role),
cls.acquisition_framework.has(id_digitizer=user.id_role),
cls.acquisition_framework.has(
TAcquisitionFramework.cor_af_actor.any(id_role=user.id_role),
),
# cls.cor_dataset_actor.any(id_role=user.id_role),
# cls.acquisition_framework.has(id_digitizer=user.id_role),
# cls.acquisition_framework.has(
# TAcquisitionFramework.cor_af_actor.any(id_role=user.id_role),
# ),
]
# if organism is None => do not filter on id_organism even if level = 2
if scope == 2 and user.id_organisme is not None:
ors += [
cls.cor_dataset_actor.any(id_organism=user.id_organisme),
cls.acquisition_framework.has(
TAcquisitionFramework.cor_af_actor.any(id_organism=user.id_organisme),
),
]
whereclause = or_(*ors)
# if scope == 2 and user.id_organisme is not None:
# ors += [
# cls.cor_dataset_actor.any(id_organism=user.id_organisme),
# cls.acquisition_framework.has(
# TAcquisitionFramework.cor_af_actor.any(id_organism=user.id_organisme),
# ),
# ]
# whereclause = or_(*ors)
return query.where(whereclause)

@qfilter(query=True)
Expand Down Expand Up @@ -322,6 +321,7 @@ def filter_by_creatable(cls, module_code, *, query, user=None, object_code=None)
@qfilter(query=True)
def filter_by_areas(cls, areas, *, query):
from geonature.core.gn_synthese.models import Synthese
from ref_geo.models import LAreas

areaFilter = []
for id_area in areas:
Expand Down
2 changes: 1 addition & 1 deletion backend/geonature/core/gn_synthese/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ class Synthese(DB.Model):
meta_update_date = DB.Column(DB.DateTime, server_default=FetchedValue())
last_action = DB.Column(DB.Unicode)

areas = relationship(LAreas, secondary=corAreaSynthese, backref="synthese_obs")
# areas = relationship(LAreas, secondary=corAreaSynthese, backref="synthese_obs")
area_attachment = relationship(LAreas, foreign_keys=[id_area_attachment])
validations = relationship(TValidations, backref="attached_row")
last_validation = relationship(last_validation, uselist=False, viewonly=True)
Expand Down
19 changes: 10 additions & 9 deletions backend/geonature/core/imports/checks/dataframe/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,9 +234,10 @@ def check_datasets(

datasets = {
str(ds.unique_dataset_id): ds
for ds in TDatasets.query.filter(TDatasets.unique_dataset_id.in_(uuid))
.options(sa.orm.joinedload(TDatasets.nomenclature_data_origin))
.options(sa.orm.raiseload("*"))
for ds in TDatasets.query.filter(TDatasets.unique_dataset_id.in_(uuid)).options(
sa.orm.joinedload(TDatasets.nomenclature_data_origin)
)
# .options(sa.orm.raiseload("*"))
.all()
}
valid_ds_mask = df[uuid_col].isin(datasets.keys())
Expand All @@ -258,21 +259,21 @@ def check_datasets(
}

# Warning: we check only permissions of first author, but currently there it only one author per import.

authorized_datasets = {
str(ds.unique_dataset_id): ds
for ds in db.session.execute(
# sa.select(TDatasets)
TDatasets.filter_by_creatable(
user=imprt.authors[0], module_code=module_code, object_code=object_code
)
# .where(TDatasets.unique_dataset_id.in_(uuid))
.where(TDatasets.unique_dataset_id.in_(uuid))
.options(sa.orm.raiseload("*"))
)
.scalars()
).scalars()
# .unique()
.all()
}
authorized_ds_mask = df[uuid_col].isin(authorized_datasets.keys())
unauthorized_ds_mask = valid_ds_mask & ~authorized_ds_mask
authorized_ds_mask = valid_ds_mask & df[uuid_col].isin(authorized_datasets.keys())
unauthorized_ds_mask = ~authorized_ds_mask
if unauthorized_ds_mask.any():
yield {
"error_code": ImportCodeError.DATASET_NOT_AUTHORIZED,
Expand Down
5 changes: 4 additions & 1 deletion backend/geonature/tests/imports/test_imports_synthese.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,10 @@ def fieldmapping(import_file_name, autogenerate, import_dataset):
}
for field in bib_fields
}
fieldmapping["unique_dataset_id"] = {"default_value": str(import_dataset.unique_dataset_id)}
fieldmapping["unique_dataset_id"] = {
# "column_src": "jdd_uuid",
"default_value": str(import_dataset.unique_dataset_id),
}

return fieldmapping

Expand Down

0 comments on commit 088baf9

Please sign in to comment.