-
+ |
Go to "{{ applet_name }}" invitation page
@@ -40,7 +40,7 @@
"{{ applet_name }}" in the free MindLogger app on your mobile
device, if you follow three simple steps (see the
user guide
for greater detail):
diff --git a/src/apps/mailing/static/templates/invitation_registered_user_fr.html b/src/apps/mailing/static/templates/invitation_registered_user_fr.html
new file mode 100644
index 00000000000..2c9c56aef5f
--- /dev/null
+++ b/src/apps/mailing/static/templates/invitation_registered_user_fr.html
@@ -0,0 +1,86 @@
+
+
+{% include 'header.html' %}
+
+
+
+ {{ first_name }}!
+ |
+
+
+
+ Bienvenue dans "{{ applet_name }}"!
+ |
+
+
+
+ Vous avez été invité à devenir {{ role }} de "{{ applet_name
+ }}", dans l'application MindLogger (voir ci-dessous).
+ |
+
+
+
+ Pour accepter cette invitation, cliquez ci-dessous et votre
+ navigateur Internet s'ouvrira sur la page d'invitation
+ de "{{ applet_name }}":
+ |
+
+
+
+
+ Aller à la page d'invitation "{{ applet_name }}".
+
+ |
+
+
+
+ Après avoir accepté l'invitation, vous pourrez accéder à
+ "{{ applet_name }}" dans l'application gratuite MindLogger sur votre
+ appareil mobile, en suivant trois étapes simples (voir
+
+ le guide de l'utilisateur
+ pour plus de détails):
+ |
+
+
+
+
+ -
+ Installez l'application MindLogger sur votre appareil
+ mobile, si elle n'est pas déjà installée.
+
+ -
+ Ouvrez l'application MindLogger sur votre appareil mobile et
+ connectez-vous.
+
+ -
+ Appuyez sur "{{ applet_name }}" sur l'écran d'accueil de
+ MindLogger et c'est tout ! Si "{{ applet_name }}"
+ n'apparaît pas, rafraîchissez l'écran en faisant glisser
+ votre doigt vers le bas à partir du haut et une roue
+ devrait apparaître pendant le chargement
+ de "{{ applet_name }}".
+
+
+ |
+
+
+
+ Merci d'avoir accepté l'invitation à utiliser "{{ applet_name }}"!
+ |
+
+
+
+ -L'équipe MindLogger
+ |
+
+
+{% include 'footers/footer_info_fr.html' %}
+
+
+
\ No newline at end of file
diff --git a/src/apps/migrate/answers/answer_item_service.py b/src/apps/migrate/answers/answer_item_service.py
index 27b5600825c..6de9c0dc49d 100644
--- a/src/apps/migrate/answers/answer_item_service.py
+++ b/src/apps/migrate/answers/answer_item_service.py
@@ -9,26 +9,30 @@
class AnswerItemMigrationService:
- async def create_item(
- self,
- *,
- regular_session,
- regular_or_arbitary_session,
- mongo_answer: dict,
- **kwargs,
- ):
- identifier = mongo_answer["meta"]["subject"].get("identifier", "")
+ async def get_respondent_id(self, regular_session, mongo_answer):
respondent_mongo_id = Profile().findOne(
{"_id": mongo_answer["meta"]["subject"].get("@id")}
)["userId"]
if respondent_mongo_id:
- respondent_id = mongoid_to_uuid(respondent_mongo_id)
+ return mongoid_to_uuid(respondent_mongo_id)
else:
anon_respondent = await MigrateUsersMCRUD(
regular_session
).get_anonymous_respondent()
- respondent_id = anon_respondent.id
+ return anon_respondent.id
+ async def create_item(
+ self,
+ *,
+ regular_session,
+ regular_or_arbitary_session,
+ mongo_answer: dict,
+ **kwargs,
+ ):
+ identifier = mongo_answer["meta"]["subject"].get("identifier", "")
+ respondent_id = await self.get_respondent_id(
+ regular_session, mongo_answer
+ )
answer_item = await AnswerItemsCRUD(
regular_or_arbitary_session
).create(
@@ -63,7 +67,7 @@ async def create_item(
def _get_migrated_data(self, identifier):
if not identifier:
return None
- return {"is_identifier_encrypted": True}
+ return {"is_identifier_encrypted": False}
def _get_item_ids(self, mongo_answer):
responses_keys = list(mongo_answer["meta"]["responses"])
@@ -74,12 +78,19 @@ def _get_item_ids(self, mongo_answer):
for k in list(mongo_answer["meta"]["responses"])
]
+ item_ids_from_url = [url.split("/")[-1] for url in responses_keys]
return [
str(mongoid_to_uuid(i["_id"]))
for i in Item().find(
query={
"meta.activityId": mongo_answer["meta"]["activity"]["@id"],
- "meta.screen.schema:url": {"$in": responses_keys},
+ # If meta.screen.schema:url exists then try to find by url,
+ # because meta.screen.@id will start with '/'.
+ # In other case try to find by last word in url (screen.@id)
+ "$or": [
+ {"meta.screen.@id": {"$in": item_ids_from_url}},
+ {"meta.screen.schema:url": {"$in": responses_keys}},
+ ],
}
)
]
@@ -88,3 +99,52 @@ def _fromtimestamp(self, timestamp: int | None):
if timestamp is None:
return None
return datetime.utcfromtimestamp((float(timestamp) / 1000))
+
+ async def create_or_update_assessment(
+ self,
+ regular_session,
+ regular_or_arbitary_session,
+ mongo_answer: dict,
+ **kwargs,
+ ):
+ respondent_id = await self.get_respondent_id(
+ regular_session, mongo_answer
+ )
+ crud = AnswerItemsCRUD(regular_or_arbitary_session)
+ assessment = await crud.get_assessment(
+ answer_id=kwargs["answer_id"], user_id=respondent_id
+ )
+ identifier = mongo_answer["meta"]["subject"].get("identifier", "")
+ data = dict(
+ created_at=mongo_answer["created"],
+ updated_at=mongo_answer["updated"],
+ answer_id=kwargs["answer_id"],
+ answer=mongo_answer["meta"]["dataSource"],
+ item_ids=self._get_item_ids(mongo_answer),
+ events=mongo_answer["meta"].get("events", ""),
+ respondent_id=respondent_id,
+ identifier=mongo_answer["meta"]["subject"].get("identifier", None),
+ user_public_key=str(mongo_answer["meta"]["userPublicKey"]),
+ scheduled_datetime=self._fromtimestamp(
+ mongo_answer["meta"].get("scheduledTime")
+ ),
+ start_datetime=self._fromtimestamp(
+ mongo_answer["meta"].get("responseStarted")
+ ),
+ end_datetime=self._fromtimestamp(
+ mongo_answer["meta"].get("responseCompleted")
+ ),
+ is_assessment=kwargs["is_assessment"],
+ migrated_data=self._get_migrated_data(identifier),
+ assessment_activity_id=mongo_answer["activity_id_version"],
+ )
+ if not assessment:
+ data["id"] = mongoid_to_uuid(mongo_answer["_id"])
+ data["migrated_date"] = datetime.utcnow()
+ await crud.create(AnswerItemSchema(**data))
+
+ else:
+ data["id"] = assessment.id
+ data["migrated_date"] = assessment.migrated_date
+ data["migrated_updated"] = datetime.utcnow()
+ await crud.update(AnswerItemSchema(**data))
diff --git a/src/apps/migrate/answers/answer_note_service.py b/src/apps/migrate/answers/answer_note_service.py
index e151badf321..6487c7a2066 100644
--- a/src/apps/migrate/answers/answer_note_service.py
+++ b/src/apps/migrate/answers/answer_note_service.py
@@ -2,7 +2,6 @@
from apps.answers.crud.notes import AnswerNotesCRUD
from apps.answers.db.schemas import AnswerNoteSchema
from apps.migrate.utilities import mongoid_to_uuid
-from apps.shared.encryption import encrypt
from infrastructure.database import atomic
diff --git a/src/apps/migrate/answers/run.py b/src/apps/migrate/answers/run.py
index 0915dd540e4..d994716c7eb 100644
--- a/src/apps/migrate/answers/run.py
+++ b/src/apps/migrate/answers/run.py
@@ -16,6 +16,7 @@
from apps.migrate.answers.user_applet_access import (
MigrateUserAppletAccessService,
)
+from apps.migrate.answers.utills import get_arguments
from apps.migrate.run import get_applets_ids
from apps.migrate.services.mongo import Mongo
@@ -23,7 +24,6 @@
configure_report,
migration_log,
mongoid_to_uuid,
- get_arguments,
intersection,
)
from apps.workspaces.crud.user_applet_access import UserAppletAccessCRUD
@@ -32,6 +32,7 @@
from apps.activities.crud import (
ActivityHistoriesCRUD,
ActivityItemHistoriesCRUD,
+ ActivitiesCRUD,
)
from apps.activities.db.schemas import (
ActivityHistorySchema,
@@ -39,6 +40,31 @@
)
+APPLETS_WITH_ISSUES_DONT_MIGRATE_ANSWERS = {
+ "623cd7ee5197b9338bdaf218",
+ "6116c49e66f506a576da4f03",
+ "5fd28283c47c585b7c73354b",
+ "5f0e35523477de8b4a528dd0",
+ "61f3415f62485608c74c1f0b",
+ "61f3423962485608c74c1f45",
+ "623cb24d5197b9338bdaed65",
+ "623ce1695197b9338bdaf388",
+ "61f3419a62485608c74c1f25",
+ "63d3d579b71996780cdf409a",
+ "636533965cb70043112200a9",
+ "636936b352ea02101467640d",
+ "631aba1db7ee970ffa9009e3",
+ "623ce52a5197b9338bdaf4b6",
+ "623dfaf95197b9338bdaf8c5",
+ "62f16366acd35a39e99b57ec",
+ "636425cf5cb700431121fe46",
+ "636532fd5cb700431121ff93",
+ "636936ca52ea021014676437",
+ "636936e652ea02101467645b",
+ "636e942c52ea0234e1f4ec25",
+}
+
+
class AnswersMigrateFacade:
anonymous_respondent_answers = 0
total_answers = 0
@@ -53,16 +79,23 @@ def __init__(self):
self.answer_item_migrate_service = AnswerItemMigrationService()
self.answer_note_migrate_service = AnswerNoteMigrateService()
- async def migrate(self, workspace, applets):
+ async def migrate(self, workspace, applets, assessments_only, update_data):
regular_session = session_manager.get_session()
applets_ids = await self._get_allowed_applets_ids(workspace, applets)
- applets_ids = [mongoid_to_uuid(applet_id) for applet_id in applets_ids]
+ applets_ids = [
+ mongoid_to_uuid(applet_id)
+ for applet_id in applets_ids
+ if applet_id not in APPLETS_WITH_ISSUES_DONT_MIGRATE_ANSWERS
+ ]
- await self._wipe_answers_data(regular_session, applets_ids)
+ # if not update_data:
+ # answer = input("Please type 'delete' to delete all answers data")
+ # if answer == "delete":
+ # await self._wipe_answers_data(regular_session, applets_ids)
async for answer_with_files in self._collect_migratable_answers(
- applets_ids
+ applets_ids, assessments_only
):
self.total_answers += 1
query = answer_with_files["query"]
@@ -95,7 +128,7 @@ async def migrate(self, workspace, applets):
mongo_answer["meta"]["reviewing"]["responseId"]
)
await self._create_reviewer_assessment(
- regular_session, mongo_answer
+ regular_session, mongo_answer, assessments_only
)
else:
@@ -173,7 +206,7 @@ async def migrate(self, workspace, applets):
async with atomic(regular_session):
await self._migrate_answers_items(
- regular_session, self.answer_items_data
+ regular_session, self.answer_items_data, assessments_only
)
self._log_migration_results()
@@ -221,7 +254,9 @@ async def _get_regular_or_arbitary_session(self, session, applet_id):
return arbitary_session
return session
- async def _collect_migratable_answers(self, applets_ids: list[uuid.UUID]):
+ async def _collect_migratable_answers(
+ self, applets_ids: list[uuid.UUID], assessments_only: bool = False
+ ):
migratable_data_count = 0
regular_session = session_manager.get_session()
@@ -232,8 +267,12 @@ async def _collect_migratable_answers(self, applets_ids: list[uuid.UUID]):
).get_answers_migration_params(applets_ids)
for answer_migration_params in answers_migration_params:
+ kwargs = {
+ **answer_migration_params,
+ "assessments_only": assessments_only,
+ }
answer_migration_queries = self.mongo.get_answer_migration_queries(
- **answer_migration_params
+ **kwargs
)
anwswers_with_files = self.mongo.get_answers_with_files(
@@ -247,7 +286,9 @@ async def _collect_migratable_answers(self, applets_ids: list[uuid.UUID]):
migratable_data_count += 1
- async def _migrate_answers_items(self, regular_session, answer_items_data):
+ async def _migrate_answers_items(
+ self, regular_session, answer_items_data, assessments_only
+ ):
for i, answer_item_data in enumerate(answer_items_data):
migration_log.debug(
f"Migrating {i} answer_item of {len(answer_items_data)}"
@@ -271,11 +312,18 @@ async def _migrate_answers_items(self, regular_session, answer_items_data):
)
try:
async with atomic(regular_or_arbitary_session):
- await self.answer_item_migrate_service.create_item(
- regular_session=regular_session,
- regular_or_arbitary_session=regular_or_arbitary_session,
- **answer_item_data,
- )
+ if assessments_only:
+ await self.answer_item_migrate_service.create_or_update_assessment(
+ regular_session=regular_session,
+ regular_or_arbitary_session=regular_or_arbitary_session,
+ **answer_item_data,
+ )
+ else:
+ await self.answer_item_migrate_service.create_item(
+ regular_session=regular_session,
+ regular_or_arbitary_session=regular_or_arbitary_session,
+ **answer_item_data,
+ )
except Exception as e:
self.error_answers_migration.append((answer_item_data, str(e)))
continue
@@ -322,7 +370,12 @@ def _log_migration_results(self):
f"Anonymous users answers count: {self.anonymous_respondent_answers}"
)
- async def _create_reviewer_assessment(self, regular_session, mongo_answer):
+ async def _create_reviewer_assessment(
+ self,
+ regular_session,
+ mongo_answer,
+ assessment_only,
+ ):
# check if reviewer assessment activity for this answers applet version exists
original_answer = self.mongo.db["item"].find_one(
{"_id": mongo_answer["meta"]["reviewing"]["responseId"]}
@@ -333,13 +386,9 @@ async def _create_reviewer_assessment(self, regular_session, mongo_answer):
)
original_applet_version = original_answer["meta"]["applet"]["version"]
- all_assessment_activities = await ActivityHistoriesCRUD(
+ all_assessment_activities = await ActivitiesCRUD(
regular_session
- ).retrieve_by_applet_ids(
- [
- f"{original_applet_id}_{original_applet_version}",
- ]
- )
+ ).get_by_applet_id(original_applet_id)
reviewer_assessment_activities = [
_a for _a in all_assessment_activities if _a.is_reviewable
]
@@ -352,7 +401,7 @@ async def _create_reviewer_assessment(self, regular_session, mongo_answer):
)
# if not, create it
- if not reviewer_assessment_activities:
+ if not reviewer_assessment_activities and not assessment_only:
missing_applet_version = mongo_answer["meta"]["applet"]["version"]
duplicating_activity_res = await ActivityHistoriesCRUD(
@@ -386,9 +435,31 @@ async def _create_reviewer_assessment(self, regular_session, mongo_answer):
item = await ActivityItemHistoriesCRUD(
regular_session
)._create(ActivityItemHistorySchema(**item))
+ elif assessment_only and reviewer_assessment_activities:
+ activity = reviewer_assessment_activities[0]
+ id_version = (
+ f"{activity.id}_{mongo_answer['meta']['applet']['version']}"
+ )
+ activity_hist = await ActivityHistoriesCRUD(
+ regular_session
+ ).get_by_id(id_version)
+ if activity_hist:
+ mongo_answer["activity_id_version"] = activity_hist.id_version
+ else:
+ raise Exception(
+ f"Assessment activity history {id_version} does not "
+ f"exist for applet {original_applet_id}"
+ )
if __name__ == "__main__":
args = get_arguments()
configure_report(migration_log, args.report_file)
- asyncio.run(AnswersMigrateFacade().migrate(args.workspace, args.applet))
+ asyncio.run(
+ AnswersMigrateFacade().migrate(
+ args.workspace,
+ args.applet,
+ args.assessments_only,
+ args.update_data,
+ )
+ )
diff --git a/src/apps/migrate/answers/user_applet_access.py b/src/apps/migrate/answers/user_applet_access.py
index 5333f442cfd..13cf8437721 100644
--- a/src/apps/migrate/answers/user_applet_access.py
+++ b/src/apps/migrate/answers/user_applet_access.py
@@ -34,7 +34,7 @@ async def add_role_for_legacy_deleted_respondent(
self._applet_id,
Role.RESPONDENT.value,
)
-
+ nickname = meta.pop("nickname", None)
if not access_schema:
access_schema = await UserAppletAccessCRUD(self.session).save(
UserAppletAccessSchema(
@@ -44,6 +44,7 @@ async def add_role_for_legacy_deleted_respondent(
owner_id=self._user_id,
invitor_id=self._user_id,
meta=meta,
+ nickname=nickname,
)
)
diff --git a/src/apps/migrate/answers/utills.py b/src/apps/migrate/answers/utills.py
new file mode 100644
index 00000000000..5156fe12d76
--- /dev/null
+++ b/src/apps/migrate/answers/utills.py
@@ -0,0 +1,41 @@
+import argparse
+
+from pydantic import BaseModel, validator
+
+
+class Params(BaseModel):
+ class Config:
+ orm_mode = True
+
+ workspace: str | None = None
+ applet: list[str] | None = None
+ report_file: str | None = None
+ assessments_only: bool = False
+ update_data: bool = True
+
+ @validator("applet", pre=True)
+ def to_array(cls, value, values):
+ if isinstance(value, str):
+ return value.split(",")
+
+ return value
+
+
+def get_arguments() -> Params:
+ parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
+ parser.add_argument("-w", "--workspace", type=str, required=False)
+ parser.add_argument("-a", "--applet", type=str, required=False)
+ parser.add_argument("-r", "--report_file", type=str, required=False)
+ parser.add_argument("--assessments_only", type=bool, required=False)
+ parser.add_argument("--update_data", type=bool, required=False)
+ args = parser.parse_args()
+ arguments = Params.from_orm(args)
+ return arguments
+
+ @validator("assessments_only")
+ def assessments_only_to_bool(values):
+ return bool(values)
+
+ @validator("update_data")
+ def update_data_to_bool(values):
+ return bool(values)
diff --git a/src/apps/migrate/data_description/applet_user_access.py b/src/apps/migrate/data_description/applet_user_access.py
index e81dcb81d3e..d823a2aae8a 100644
--- a/src/apps/migrate/data_description/applet_user_access.py
+++ b/src/apps/migrate/data_description/applet_user_access.py
@@ -38,6 +38,8 @@ def insert_stmt(self) -> str:
"id",
"migrated_date",
"migrated_updated",
+ "created_at",
+ "updated_at",
"is_deleted",
"is_pinned",
"role",
@@ -45,24 +47,32 @@ def insert_stmt(self) -> str:
"applet_id",
"owner_id",
"invitor_id",
+ "nickname",
"meta"
)
VALUES (
%s,
now() at time zone ('utc'),
now() at time zone ('utc'),
+ now() at time zone ('utc'),
+ now() at time zone ('utc'),
FALSE,
- %s, %s, %s, %s, %s, %s, %s
+ %s, %s, %s, %s, %s, %s, %s, %s
)
"""
def update_stmt(self):
return """
UPDATE user_applet_accesses
- SET meta = jsonb_set(
- COALESCE(meta, '{}'::jsonb),
- '{legacyProfileId}',%s, true
- )
+ SET
+ meta = jsonb_set(
+ COALESCE(meta, '{}'::jsonb),
+ '{legacyProfileId}',%s, true
+ ),
+ "migrated_date" = now() at time zone ('utc'),
+ "migrated_updated" = now() at time zone ('utc'),
+ "created_at" = now() at time zone ('utc'),
+ "updated_at" = now() at time zone ('utc')
WHERE
role = %s AND
user_id = %s AND
@@ -71,6 +81,8 @@ def update_stmt(self):
"""
def values(self) -> tuple:
+ nickname = self.meta.pop("nickname", None)
+
return (
str(uuid.uuid4()),
self.is_pinned,
@@ -79,6 +91,7 @@ def values(self) -> tuple:
str(self.applet_id),
str(self.owner_id),
str(self.inviter_id),
+ nickname,
self.dump_meta(),
)
diff --git a/src/apps/migrate/data_description/library_dao.py b/src/apps/migrate/data_description/library_dao.py
index 646f2f60da6..9eb7fae96dc 100644
--- a/src/apps/migrate/data_description/library_dao.py
+++ b/src/apps/migrate/data_description/library_dao.py
@@ -15,6 +15,8 @@ class LibraryDao:
updated_at: datetime.datetime
migrated_date: datetime.datetime
migrated_updated: datetime.datetime
+ display_name: str
+ name: str
is_deleted: bool = False
def __hash__(self):
@@ -41,7 +43,6 @@ def values(self) -> tuple:
class ThemeDao:
id: uuid.UUID
creator_id: uuid.UUID
- applet_id: uuid.UUID
name: str
logo: str | None
small_logo: str | None
diff --git a/src/apps/migrate/domain/activity_full.py b/src/apps/migrate/domain/activity_full.py
new file mode 100644
index 00000000000..5ccdad4d7f1
--- /dev/null
+++ b/src/apps/migrate/domain/activity_full.py
@@ -0,0 +1,20 @@
+import datetime
+import uuid
+from pydantic import Field
+
+from apps.activities.domain.activity_base import ActivityBase
+from apps.activities.domain.activity_full import ActivityItemFull
+from apps.shared.domain import InternalModel
+
+
+class ActivityItemMigratedFull(ActivityItemFull):
+ extra_fields: dict = Field(default_factory=dict)
+
+
+class ActivityMigratedFull(ActivityBase, InternalModel):
+ id: uuid.UUID
+ key: uuid.UUID
+ order: int
+ created_at: datetime.datetime
+ extra_fields: dict = Field(default_factory=dict)
+ items: list[ActivityItemMigratedFull] = Field(default_factory=list)
diff --git a/src/apps/migrate/domain/applet_full.py b/src/apps/migrate/domain/applet_full.py
index fb8d5acacf0..22febc0a119 100644
--- a/src/apps/migrate/domain/applet_full.py
+++ b/src/apps/migrate/domain/applet_full.py
@@ -1,13 +1,24 @@
import datetime
-from apps.applets.domain.applet_full import AppletFull
+from pydantic import Field
+from apps.applets.domain.base import AppletFetchBase
+from apps.migrate.domain.activity_full import ActivityMigratedFull
+from apps.migrate.domain.flow_full import FlowMigratedFull
+from apps.shared.domain import InternalModel
-class AppletMigratedFull(AppletFull):
+
+class AppletMigratedFull(AppletFetchBase, InternalModel):
migrated_date: datetime.datetime
migrated_updated: datetime.datetime
+ extra_fields: dict = Field(default_factory=dict)
+ activities: list[ActivityMigratedFull] = Field(default_factory=list)
+ activity_flows: list[FlowMigratedFull] = Field(default_factory=list)
-class AppletMigratedHistoryFull(AppletFull):
+class AppletMigratedHistoryFull(AppletFetchBase, InternalModel):
migrated_date: datetime.datetime
migrated_updated: datetime.datetime
+ extra_fields: dict = Field(default_factory=dict)
+ activities: list[ActivityMigratedFull] = Field(default_factory=list)
+ activity_flows: list[FlowMigratedFull] = Field(default_factory=list)
diff --git a/src/apps/migrate/domain/flow_full.py b/src/apps/migrate/domain/flow_full.py
new file mode 100644
index 00000000000..57932318eab
--- /dev/null
+++ b/src/apps/migrate/domain/flow_full.py
@@ -0,0 +1,7 @@
+from pydantic import Field
+
+from apps.activity_flows.domain.flow_full import FlowFull
+
+
+class FlowMigratedFull(FlowFull):
+ extra_fields: dict = Field(default_factory=dict)
diff --git a/src/apps/migrate/run.py b/src/apps/migrate/run.py
index bb49396ed13..6e9b56b06b0 100644
--- a/src/apps/migrate/run.py
+++ b/src/apps/migrate/run.py
@@ -491,6 +491,16 @@ async def get_applets_ids() -> list[str]:
"6307d801924264279508777d",
"6324c0afb7ee9765ba54229f",
"631aba1db7ee970ffa9009e3",
+ # library
+ "61b384f7d386d628d862eb76",
+ "61df0360bf09cb40db5a2b14",
+ "62b613e0b90b7f2ba9e1d2ae",
+ "6296531cb90b7f104d02e3f7",
+ "61e6e627bf09cb40db5a35d0",
+ "6249dc8d3b4f351025642c3f",
+ "6239e7695197b94689825f7e",
+ "625387043b4f351025643e7e",
+ "627be2ba0a62aa47962268a4",
]
for applet in applets:
migrating_applets.append(str(applet["_id"]))
@@ -645,9 +655,13 @@ def migrate_user_pins(
skipped += 1
continue
to_migrate.append(profile)
- rows_count = postgres.save_user_pins(to_migrate)
- migration_log.info(f"Inserted {rows_count} rows")
- migration_log.info("User pins migration end")
+ try:
+ rows_count = postgres.save_user_pins(to_migrate)
+ migration_log.info(f"Inserted {rows_count} rows")
+ except Exception as e:
+ migration_log.error(e)
+ finally:
+ migration_log.info("User pins migration end")
def migrate_folders(workspace_id: str | None, mongo, postgres):
@@ -667,11 +681,14 @@ def migrate_folders(workspace_id: str | None, mongo, postgres):
migration_log.info("Folders migration end")
-def migrate_library(applet_ids: list[ObjectId] | None, mongo, postgres):
+def migrate_library(
+ applet_ids: list[ObjectId] | None, mongo: Mongo, postgres: Postgres
+):
migration_log.info("Library & themes migration start")
lib_count = 0
theme_count = 0
- lib_set, theme_set = mongo.get_library(applet_ids)
+ lib_set = mongo.get_library(applet_ids)
+ theme_set = mongo.get_themes()
for lib in lib_set:
if lib.applet_id_version is None:
version = postgres.get_latest_applet_id_version(lib.applet_id)
@@ -683,14 +700,19 @@ def migrate_library(applet_ids: list[ObjectId] | None, mongo, postgres):
)
lib.search_keywords = keywords + lib.keywords
success = postgres.save_library_item(lib)
+
if success:
lib_count += 1
+ if lib.name != lib.display_name:
+ postgres.update_applet_name(
+ lib.applet_id, lib.name, lib.applet_id_version
+ )
for theme in theme_set:
success = postgres.save_theme_item(theme)
if success:
theme_count += 1
- postgres.add_theme_to_applet(theme.applet_id, theme.id)
+ # postgres.add_theme_to_applet(theme.applet_id, theme.id)
applet_themes = mongo.get_applet_theme_mapping()
applets_count = postgres.set_applets_themes(applet_themes)
@@ -831,7 +853,7 @@ async def migrate_public_links(postgres: Postgres, mongo: Mongo):
applet_mongo_ids = postgres.get_migrated_applets()
links = mongo.get_public_link_mappings(applet_mongo_ids)
await postgres.save_public_link(links)
- migration_log.info("Public links migration start")
+ migration_log.info("Public links migration end")
async def main(workspace_id: str | None, applets_ids: list[str] | None):
diff --git a/src/apps/migrate/services/activity_history_service.py b/src/apps/migrate/services/activity_history_service.py
index 2965afb12bf..215d5a042cb 100644
--- a/src/apps/migrate/services/activity_history_service.py
+++ b/src/apps/migrate/services/activity_history_service.py
@@ -1,8 +1,6 @@
-import uuid
-
from apps.activities.crud import ActivityHistoriesCRUD
from apps.activities.db.schemas import ActivityHistorySchema
-from apps.activities.domain.activity_full import ActivityFull
+from apps.migrate.domain.activity_full import ActivityMigratedFull
from apps.migrate.domain.applet_full import AppletMigratedFull
from apps.migrate.services.activity_item_history_service import (
ActivityItemHistoryMigrationService,
@@ -19,7 +17,7 @@ def __init__(self, session, applet: AppletMigratedFull, version: str):
self._applet_id_version = f"{applet.id}_{version}"
self.session = session
- async def add(self, activities: list[ActivityFull]):
+ async def add(self, activities: list[ActivityMigratedFull]):
activity_items = []
schemas = []
diff --git a/src/apps/migrate/services/activity_item_history_service.py b/src/apps/migrate/services/activity_item_history_service.py
index 7b3fdd038d2..76750669e9d 100644
--- a/src/apps/migrate/services/activity_item_history_service.py
+++ b/src/apps/migrate/services/activity_item_history_service.py
@@ -1,7 +1,7 @@
from apps.activities.crud import ActivityItemHistoriesCRUD
from apps.activities.db.schemas import ActivityItemHistorySchema
-from apps.activities.domain.activity_full import (
- ActivityItemFull,
+from apps.migrate.domain.activity_full import (
+ ActivityItemMigratedFull,
)
from apps.migrate.domain.applet_full import AppletMigratedFull
from apps.migrate.utilities import prepare_extra_fields_to_save
@@ -13,7 +13,7 @@ def __init__(self, session, version: str, applet: AppletMigratedFull):
self.session = session
self._applet = applet
- async def add(self, activity_items: list[ActivityItemFull]):
+ async def add(self, activity_items: list[ActivityItemMigratedFull]):
schemas = []
for item in activity_items:
@@ -38,9 +38,7 @@ async def add(self, activity_items: list[ActivityItemFull]):
updated_at=self._applet.updated_at,
migrated_date=self._applet.migrated_date,
migrated_updated=self._applet.migrated_updated,
- extra_fields=prepare_extra_fields_to_save(
- item.extra_fields
- ),
+ extra_fields={},
)
)
await ActivityItemHistoriesCRUD(self.session).create_many(schemas)
diff --git a/src/apps/migrate/services/activity_service.py b/src/apps/migrate/services/activity_service.py
index e92fb99eb0c..986bbc6f349 100644
--- a/src/apps/migrate/services/activity_service.py
+++ b/src/apps/migrate/services/activity_service.py
@@ -3,17 +3,10 @@
from apps.activities.crud import ActivitiesCRUD
from apps.activities.db.schemas import ActivitySchema
-from apps.activities.domain.activity_create import (
- ActivityCreate,
-)
-
-from apps.activities.domain.activity_full import ActivityFull
-from apps.activities.domain.activity_update import (
- ActivityUpdate,
- PreparedActivityItemUpdate,
-)
+from apps.activities.domain.activity_create import ActivityCreate
from apps.activities.services.activity_item import ActivityItemService
from apps.migrate.domain.activity_create import ActivityItemMigratedCreate
+from apps.migrate.domain.activity_full import ActivityMigratedFull
from apps.migrate.domain.applet_full import AppletMigratedFull
from apps.migrate.utilities import prepare_extra_fields_to_save
@@ -27,7 +20,7 @@ async def create(
self,
applet: AppletMigratedFull,
activities_create: list[ActivityCreate],
- ) -> list[ActivityFull]:
+ ) -> list[ActivityMigratedFull]:
schemas = []
activity_key_id_map: dict[uuid.UUID, uuid.UUID] = dict()
activity_id_key_map: dict[uuid.UUID, uuid.UUID] = dict()
@@ -101,12 +94,11 @@ async def create(
prepared_activity_items
)
activities = list()
-
- activity_id_map: dict[uuid.UUID, ActivityFull] = dict()
+ activity_id_map: dict[uuid.UUID, ActivityMigratedFull] = dict()
for activity_schema in activity_schemas:
activity_schema.key = activity_id_key_map[activity_schema.id]
- activity = ActivityFull.from_orm(activity_schema)
+ activity = ActivityMigratedFull.from_orm(activity_schema)
activities.append(activity)
activity_id_map[activity.id] = activity
@@ -121,7 +113,7 @@ async def update_create(
self,
applet: AppletMigratedFull,
activities_create: list[ActivityCreate],
- ) -> list[ActivityFull]:
+ ) -> list[ActivityMigratedFull]:
schemas = []
activity_key_id_map: dict[uuid.UUID, uuid.UUID] = dict()
activity_id_key_map: dict[uuid.UUID, uuid.UUID] = dict()
@@ -191,12 +183,11 @@ async def update_create(
prepared_activity_items
)
activities = list()
-
- activity_id_map: dict[uuid.UUID, ActivityFull] = dict()
+ activity_id_map: dict[uuid.UUID, ActivityMigratedFull] = dict()
for activity_schema in activity_schemas:
activity_schema.key = activity_id_key_map[activity_schema.id]
- activity = ActivityFull.from_orm(activity_schema)
+ activity = ActivityMigratedFull.from_orm(activity_schema)
activities.append(activity)
activity_id_map[activity.id] = activity
diff --git a/src/apps/migrate/services/applet_history_service.py b/src/apps/migrate/services/applet_history_service.py
index db045ca1c5a..c8c10c199b8 100644
--- a/src/apps/migrate/services/applet_history_service.py
+++ b/src/apps/migrate/services/applet_history_service.py
@@ -52,6 +52,7 @@ async def add_history(
migrated_date=applet.migrated_date,
migrated_updated=applet.migrated_updated,
extra_fields=prepare_extra_fields_to_save(applet.extra_fields),
+ stream_enabled=applet.stream_enabled,
)
)
await ActivityHistoryMigrationService(
diff --git a/src/apps/migrate/services/applet_service.py b/src/apps/migrate/services/applet_service.py
index e76194bd2a1..7ecaabdd1de 100644
--- a/src/apps/migrate/services/applet_service.py
+++ b/src/apps/migrate/services/applet_service.py
@@ -8,10 +8,7 @@
from apps.applets.domain import (
Role,
)
-from apps.applets.domain.applet_create_update import (
- AppletCreate,
- AppletUpdate,
-)
+from apps.applets.domain.applet_create_update import AppletCreate
from apps.migrate.domain.applet_full import AppletMigratedFull
from apps.migrate.services.applet_history_service import (
AppletMigrationHistoryService,
@@ -106,6 +103,8 @@ async def _create(self, create_data: AppletCreate) -> AppletMigratedFull:
extra_fields=prepare_extra_fields_to_save(
create_data.extra_fields
),
+ retention_period=create_data.retention_period,
+ retention_type=create_data.retention_type,
)
)
return AppletMigratedFull.from_orm(schema)
@@ -178,6 +177,8 @@ async def _update(
extra_fields=prepare_extra_fields_to_save(
update_data.extra_fields
),
+ retention_period=update_data.retention_period,
+ retention_type=update_data.retention_type,
),
)
return AppletMigratedFull.from_orm(schema)
diff --git a/src/apps/migrate/services/event_service.py b/src/apps/migrate/services/event_service.py
index 11c6eea5fd0..9660a3ff2ea 100644
--- a/src/apps/migrate/services/event_service.py
+++ b/src/apps/migrate/services/event_service.py
@@ -385,17 +385,22 @@ async def run_events_migration(self):
f"Migrate events {i}/{number_of_events_in_mongo}. Working on Event: {event.id}"
)
try:
- # Migrate data to PeriodicitySchema
- periodicity = await self._create_periodicity(event)
-
- # Migrate data to EventSchema
- pg_event = await self._create_event(event, periodicity)
-
- # Migrate data to ActivityEventsSchema or FlowEventsSchema
- if event.data.activity_id:
- await self._create_activity(event, pg_event)
- if event.data.activity_flow_id:
- await self._create_flow(event, pg_event)
+ if event.data.activity_id or event.data.activity_flow_id:
+ # Migrate data to PeriodicitySchema
+ periodicity = await self._create_periodicity(event)
+
+ # Migrate data to EventSchema
+ pg_event = await self._create_event(event, periodicity)
+
+ # Migrate data to ActivityEventsSchema or FlowEventsSchema
+ if event.data.activity_id:
+ await self._create_activity(event, pg_event)
+ if event.data.activity_flow_id:
+ await self._create_flow(event, pg_event)
+ else:
+ raise Exception(
+ "Mongo event do not have any information about activity and flow"
+ )
# Migrate data to NotificationSchema
if event.data.notifications:
@@ -414,19 +419,20 @@ async def run_events_migration(self):
user_ids: list = self._check_user_existence(event)
# add individual event for already created (on previous steps) event
- await self._create_user(event, pg_event, user_id[0])
+ await self._create_user(event, pg_event, user_ids[0])
# create new events for next users
new_events: list = []
- for user_id in user_ids[1:]:
+ for user_id in event.data.users[1:]:
e = copy.deepcopy(event)
e.id = ObjectId()
e.data.users = [user_id]
new_events.append(e)
- print(
- f"\nWill extend events list. Currents number of events is: {len(self.events)}. New number is: {len(self.events)+len(new_events)}\n"
+ migration_log.debug(
+ f"Will extend events list. Currents number of events is: {len(self.events)}. New number is: {len(self.events)+len(new_events)}"
)
+ number_of_events_in_mongo += len(new_events)
self.events.extend(new_events)
except Exception as e:
@@ -522,7 +528,9 @@ def _check_user_existence(self, event: dict) -> ObjectId:
for user in event.data.users:
profile = Profile().findOne(query={"_id": ObjectId(user)})
if not profile:
- print("Unable to find profile by event. Skip")
+ migration_log.debug(
+ "Unable to find profile by event. Skip"
+ )
continue
ids.append(profile["userId"])
diff --git a/src/apps/migrate/services/flow_history_service.py b/src/apps/migrate/services/flow_history_service.py
index 4db7a987021..7f7c34de272 100644
--- a/src/apps/migrate/services/flow_history_service.py
+++ b/src/apps/migrate/services/flow_history_service.py
@@ -2,7 +2,7 @@
from apps.activity_flows.crud import FlowsHistoryCRUD
from apps.activity_flows.db.schemas import ActivityFlowHistoriesSchema
-from apps.activity_flows.domain.flow_full import FlowFull
+from apps.migrate.domain.flow_full import FlowMigratedFull
from apps.migrate.domain.applet_full import AppletMigratedFull
from apps.migrate.services.flow_item_history_service import (
FlowItemHistoryMigrationService,
@@ -17,7 +17,7 @@ def __init__(self, session, applet: AppletMigratedFull, version: str):
self.applet_id_version = f"{applet.id}_{version}"
self.session = session
- async def add(self, flows: list[FlowFull]):
+ async def add(self, flows: list[FlowMigratedFull]):
flow_items = []
schemas = []
diff --git a/src/apps/migrate/services/flow_item_history_service.py b/src/apps/migrate/services/flow_item_history_service.py
index ec7cbbfbea8..261b956ebd6 100644
--- a/src/apps/migrate/services/flow_item_history_service.py
+++ b/src/apps/migrate/services/flow_item_history_service.py
@@ -1,10 +1,6 @@
-import uuid
-
from apps.activity_flows.crud import FlowItemHistoriesCRUD
from apps.activity_flows.db.schemas import ActivityFlowItemHistorySchema
-from apps.activity_flows.domain.flow_full import (
- ActivityFlowItemFull,
-)
+from apps.activity_flows.domain.flow_full import ActivityFlowItemFull
from apps.migrate.domain.applet_full import AppletMigratedFull
diff --git a/src/apps/migrate/services/flow_service.py b/src/apps/migrate/services/flow_service.py
index de53518710c..f1e727fb1ff 100644
--- a/src/apps/migrate/services/flow_service.py
+++ b/src/apps/migrate/services/flow_service.py
@@ -6,11 +6,8 @@
FlowCreate,
PreparedFlowItemCreate,
)
-from apps.activity_flows.domain.flow_full import FlowFull
-from apps.activity_flows.domain.flow_update import (
- FlowUpdate,
- PreparedFlowItemUpdate,
-)
+from apps.activity_flows.domain.flow_update import PreparedFlowItemUpdate
+from apps.migrate.domain.flow_full import FlowMigratedFull
from apps.migrate.domain.applet_full import AppletMigratedFull
from apps.migrate.services.flow_item_service import FlowItemMigrationService
from apps.migrate.utilities import prepare_extra_fields_to_save
@@ -25,7 +22,7 @@ async def create(
applet: AppletMigratedFull,
flows_create: list[FlowCreate],
activity_key_id_map: dict[uuid.UUID, uuid.UUID],
- ) -> list[FlowFull]:
+ ) -> list[FlowMigratedFull]:
schemas = list()
prepared_flow_items = list()
for index, flow_create in enumerate(flows_create):
@@ -67,7 +64,7 @@ async def create(
flow_id_map = dict()
for flow_schema in flow_schemas:
- flow = FlowFull.from_orm(flow_schema)
+ flow = FlowMigratedFull.from_orm(flow_schema)
flows.append(flow)
flow_id_map[flow.id] = flow
@@ -81,7 +78,7 @@ async def update_create(
applet: AppletMigratedFull,
flows_update: list[FlowCreate],
activity_key_id_map: dict[uuid.UUID, uuid.UUID],
- ) -> list[FlowFull]:
+ ) -> list[FlowMigratedFull]:
schemas = list()
prepared_flow_items = list()
@@ -124,7 +121,7 @@ async def update_create(
flow_id_map = dict()
for flow_schema in flow_schemas:
- flow = FlowFull.from_orm(flow_schema)
+ flow = FlowMigratedFull.from_orm(flow_schema)
flows.append(flow)
flow_id_map[flow.id] = flow
diff --git a/src/apps/migrate/services/mongo.py b/src/apps/migrate/services/mongo.py
index c15e270509a..0094a5d270f 100644
--- a/src/apps/migrate/services/mongo.py
+++ b/src/apps/migrate/services/mongo.py
@@ -53,7 +53,7 @@
uuid_to_mongoid,
)
from apps.shared.domain.base import InternalModel, PublicModel
-from apps.shared.encryption import encrypt, get_key
+from apps.shared.encryption import get_key
from apps.workspaces.domain.constants import Role
from apps.shared.version import INITIAL_VERSION
@@ -79,6 +79,29 @@ def decrypt(data):
def patch_broken_applet_versions(applet_id: str, applet_ld: dict) -> dict:
+ broken_conditional_date_item = [
+ "62a8d7d7b90b7f2ba9e1aa43",
+ ]
+ if applet_id in broken_conditional_date_item:
+ for property in applet_ld["reprolib:terms/order"][0]["@list"][0][
+ "reprolib:terms/addProperties"
+ ]:
+ if property["reprolib:terms/isAbout"][0]["@id"] == "EPDSMotherDOB":
+ property["reprolib:terms/isVis"][0]["@value"] = True
+
+ broken_item_flow_order = ["613f6eba6401599f0e495dc5"]
+ if applet_id in broken_item_flow_order:
+ for activity in applet_ld["reprolib:terms/order"][0]["@list"]:
+ for prop in activity["reprolib:terms/addProperties"]:
+ prop["reprolib:terms/isVis"][0]["@value"] = True
+ if applet_ld["schema:version"][0]["@value"] == "1.2.2":
+ applet_ld["reprolib:terms/order"][0]["@list"][0][
+ "reprolib:terms/order"
+ ][0]["@list"].pop(26)
+ applet_ld["reprolib:terms/order"][0]["@list"][0][
+ "reprolib:terms/addProperties"
+ ].pop(26)
+
broken_applet_versions = [
"6201cc26ace55b10691c0814",
"6202734eace55b10691c0fc4",
@@ -321,13 +344,42 @@ def patch_broken_applet_versions(applet_id: str, applet_ld: dict) -> dict:
def patch_broken_applets(
applet_id: str, applet_ld: dict, applet_mongo: dict
) -> tuple[dict, dict]:
+ broken_report_condition_item = [
+ "6358265b5cb700431121f033",
+ "6358267b5cb700431121f143",
+ "63696d4a52ea02101467671d",
+ "63696e7c52ea021014676784",
+ ]
+ if applet_id in broken_report_condition_item:
+ for report in applet_ld["reprolib:terms/order"][0]["@list"][0][
+ "reprolib:terms/reports"
+ ][0]["@list"]:
+ if report["@id"] == "sumScore_suicidalorselfinjury":
+ report["reprolib:terms/conditionals"][0]["@list"][1][
+ "reprolib:terms/printItems"
+ ][0]["@list"] = []
+ report["reprolib:terms/conditionals"][0]["@list"][0][
+ "reprolib:terms/printItems"
+ ][0]["@list"] = []
+
+ broken_conditional_date_item = [
+ "62a8d7d7b90b7f2ba9e1aa43",
+ "62a8d7e5b90b7f2ba9e1aab3",
+ ]
+ if applet_id in broken_conditional_date_item:
+ for property in applet_ld["reprolib:terms/order"][0]["@list"][0][
+ "reprolib:terms/addProperties"
+ ]:
+ if property["reprolib:terms/isAbout"][0]["@id"] == "EPDSMotherDOB":
+ property["reprolib:terms/isVis"][0]["@value"] = False
+
broken_item_flow = [
"6522a4753c36ce0d4d6cda4d",
]
if applet_id in broken_item_flow:
applet_ld["reprolib:terms/order"][0]["@list"][0][
"reprolib:terms/addProperties"
- ][5]["reprolib:terms/isVis"][0] = {"@value": True}
+ ][5]["reprolib:terms/isVis"][0] = {"@value": False}
broken_activity_order = [
"63d3d579b71996780cdf409a",
@@ -391,7 +443,7 @@ def patch_broken_applets(
property["reprolib:terms/isAbout"][0]["@id"]
== "IUQ_Wd_Social_Device"
):
- property["reprolib:terms/isVis"] = [{"@value": True}]
+ property["reprolib:terms/isVis"] = [{"@value": False}]
repo_replacements = [
(
@@ -720,11 +772,7 @@ def patch_broken_applets(
applet_ld = patch_prize_activity(applet_id, applet_ld)
- if (
- applet_id not in broken_applets
- and applet_id not in broken_applet_version
- ):
- patch_broken_visability_for_applet(applet_ld)
+ patch_broken_visability_for_applet(applet_ld)
return applet_ld, applet_mongo
@@ -819,6 +867,15 @@ def set_isvis(entity: dict, value: bool) -> None:
set_isvis(add_prop, acitivity_id_isvis_map[activity_id])
+def patch_library_version(applet_id: str, version: str) -> str:
+ if applet_id == "61f42e5c62485608c74c2a7e":
+ version = "4.2.42"
+ elif applet_id == "623b81c45197b9338bdaea22":
+ version = "2.11.39"
+
+ return version
+
+
class Mongo:
def __init__(self) -> None:
# Setup MongoDB connection
@@ -1223,15 +1280,18 @@ async def get_applet(self, applet_id: str) -> dict:
or applet["meta"]["applet"] == {}
):
raise EmptyAppletException()
-
+ # fetch version
+ applet = self.fetch_applet_version(applet)
ld_request_schema = self.get_applet_repro_schema(applet)
ld_request_schema, applet = patch_broken_applets(
applet_id, ld_request_schema, applet
)
+ ld_request_schema = self.preprocess_performance_task(ld_request_schema)
converted = await self.get_converter_result(ld_request_schema)
converted.extra_fields["created"] = applet["created"]
converted.extra_fields["updated"] = applet["updated"]
+ converted.extra_fields["creator"] = str(applet.get("creatorId", None))
converted.extra_fields["version"] = applet["meta"]["applet"].get(
"version", INITIAL_VERSION
)
@@ -1343,11 +1403,17 @@ def resolve_arbitrary_client(profile: dict):
def get_answer_migration_queries(self, **kwargs):
db = self.get_main_or_arbitrary_db(kwargs["applet_id"])
query = {
- "meta.responses": {"$exists": True},
+ "meta.responses": {
+ "$exists": True,
+ # Some items have response, but response is empty dict, dont't migrate
+ "$ne": {},
+ },
"meta.activity.@id": kwargs["activity_id"],
"meta.applet.@id": kwargs["applet_id"],
"meta.applet.version": kwargs["version"],
}
+ if kwargs.get("assessments_only"):
+ query["meta.reviewing"] = {"$exists": True}
item_collection = db["item"]
try:
creators_ids = item_collection.find(query).distinct("creatorId")
@@ -1412,6 +1478,9 @@ def docs_by_ids(
def get_user_nickname(self, user_profile: dict) -> str:
nick_name = decrypt(user_profile.get("nickName"))
if not nick_name:
+ # f_name = decrypt(user_profile.get("firstName"))
+ # l_name = decrypt(user_profile.get("lastName"))
+ # nick_name = f"{f_name} {l_name}" if f_name and l_name else f""
nick_name = ""
return nick_name
@@ -1513,7 +1582,8 @@ def get_anons(self, anon_id: uuid.UUID) -> List[AppletUserDAO]:
created_at=datetime.datetime.utcnow(),
updated_at=datetime.datetime.utcnow(),
meta={
- "nickname": "Mindlogger ChildMindInstitute",
+ # nickname is encrypted version of 'Mindlogger ChildMindInstitute'
+ "nickname": "hFywashKw+KlcDPazIy5QHz4AdkTOYkD28Q8+dpeDDA=",
"secretUserId": "Guest Account Submission",
"legacyProfileId": str(applet_profile["_id"]),
},
@@ -1532,6 +1602,11 @@ def get_user_roles(applet_profile: dict) -> list[str]:
return ["manager", "user"] if "user" in roles else ["manager"]
return roles
+ def has_manager_role(self, roles: list[str]):
+ manager_roles = set(Role.managers())
+ exist = bool(set(roles).intersection(manager_roles))
+ return exist
+
def get_roles_mapping_from_applet_profile(
self, migrated_applet_ids: List[ObjectId]
):
@@ -1551,7 +1626,6 @@ def get_roles_mapping_from_applet_profile(
editor_count = 0
coordinator_count = 0
respondent_count = 0
- managerial_applets = []
for applet_profile in applet_profiles:
if applet_profile["userId"] in not_found_users:
@@ -1572,9 +1646,8 @@ def get_roles_mapping_from_applet_profile(
continue
roles = self.get_user_roles(applet_profile)
+ has_manager_role = self.has_manager_role(roles)
for role_name in set(roles):
- if role_name != "user":
- managerial_applets.append(applet_profile["appletId"])
meta = {}
if role_name == Role.REVIEWER:
meta["respondents"] = self.respondents_by_applet_profile(
@@ -1595,15 +1668,13 @@ def get_roles_mapping_from_applet_profile(
applet_profile
)
if data:
- if applet_profile["appletId"] in managerial_applets:
+ if has_manager_role:
if data["nick"] == "":
f_name = user["firstName"]
l_name = user["lastName"]
- meta["nickname"] = (
- f"{f_name} {l_name}"
- if f_name and l_name
- else f"- -"
- )
+ f_name = f_name if f_name else "-"
+ l_name = l_name if l_name else "-"
+ meta["nickname"] = f"{f_name} {l_name}"
else:
meta["nickname"] = data["nick"]
@@ -1615,6 +1686,12 @@ def get_roles_mapping_from_applet_profile(
else:
meta["nickname"] = data["nick"]
meta["secretUserId"] = data["secret"]
+ if "nickname" in meta:
+ nickname = meta.pop("nickname")
+ if nickname != "":
+ meta["nickname"] = enc.process_bind_param(
+ nickname, String
+ )
owner_id = self.get_owner_by_applet(applet_profile["appletId"])
if not owner_id:
@@ -1915,41 +1992,36 @@ def get_folder_mapping(
)
return set(folders_list), set(applets_list)
- def get_theme(
- self, key: str | ObjectId, applet_id: uuid.UUID
- ) -> ThemeDao | None:
- if not isinstance(key, ObjectId):
- try:
- theme_id = ObjectId(key)
- except Exception:
- return None
- theme_doc = self.db["folder"].find_one({"_id": theme_id})
- if theme_doc:
- meta = theme_doc.get("meta", {})
- return ThemeDao(
- id=mongoid_to_uuid(theme_doc["_id"]),
- creator_id=mongoid_to_uuid(theme_doc["creatorId"]),
- name=theme_doc["name"],
- logo=meta.get("logo"),
- small_logo=meta.get("smallLogo"),
- background_image=meta.get("backgroundImage"),
- primary_color=meta.get("primaryColor"),
- secondary_color=meta.get("secondaryColor"),
- tertiary_color=meta.get("tertiaryColor"),
- public=theme_doc["public"],
- allow_rename=True,
- created_at=theme_doc["created"],
- updated_at=theme_doc["updated"],
- is_default=False,
- applet_id=applet_id,
- )
- return None
+ def get_themes(self) -> list[ThemeDao]:
+ themes = []
+ theme_docs = self.db["folder"].find(
+ {"parentId": ObjectId("61323c0ff7102f0a6e9b3588")}
+ )
+ for theme_doc in theme_docs:
+ if theme_doc:
+ meta = theme_doc.get("meta", {})
+ themes.append(
+ ThemeDao(
+ id=mongoid_to_uuid(theme_doc["_id"]),
+ creator_id=mongoid_to_uuid(theme_doc["creatorId"]),
+ name=theme_doc["name"],
+ logo=meta.get("logo"),
+ small_logo=meta.get("smallLogo"),
+ background_image=meta.get("backgroundImage"),
+ primary_color=meta.get("primaryColor"),
+ secondary_color=meta.get("secondaryColor"),
+ tertiary_color=meta.get("tertiaryColor"),
+ public=theme_doc["public"],
+ allow_rename=True,
+ created_at=theme_doc["created"],
+ updated_at=theme_doc["updated"],
+ is_default=False,
+ )
+ )
+ return themes
- def get_library(
- self, applet_ids: list[ObjectId] | None
- ) -> (LibraryDao, ThemeDao):
+ def get_library(self, applet_ids: list[ObjectId] | None) -> LibraryDao:
lib_set = set()
- theme_set = set()
query = {}
if applet_ids:
query["appletId"] = {"$in": applet_ids}
@@ -1957,6 +2029,7 @@ def get_library(
for lib_doc in library:
applet_id = mongoid_to_uuid(lib_doc["appletId"])
version = lib_doc.get("version")
+ version = patch_library_version(str(lib_doc["appletId"]), version)
if version:
version_id = f"{applet_id}_{version}"
else:
@@ -1975,14 +2048,11 @@ def get_library(
migrated_date=now,
migrated_updated=now,
is_deleted=False,
+ name=lib_doc["name"],
+ display_name=lib_doc["displayName"],
)
- theme_id = lib_doc.get("themeId")
- if theme_id:
- theme = self.get_theme(theme_id, applet_id)
- if theme:
- theme_set.add(theme)
lib_set.add(lib)
- return lib_set, theme_set
+ return lib_set
def get_applets_by_workspace(self, workspace_id: str) -> list[str]:
items = Profile().find(query={"accountId": ObjectId(workspace_id)})
@@ -2015,7 +2085,7 @@ def get_public_link_mappings(
user_id = applet_profile["userId"]
if not isinstance(user_id, ObjectId):
user_id = ObjectId(user_id)
- if link_id and login:
+ if link_id is not None and login is not None:
result.append(
PublicLinkDao(
applet_bson=document["_id"],
@@ -2053,3 +2123,160 @@ def get_applet_theme_mapping(self) -> list[AppletTheme]:
)
result.append(mapper)
return result
+
+ def get_repro_order(self, schema: dict):
+ act_list = schema.get("reprolib:terms/order", [])
+ result = []
+ for act in act_list:
+ _list_attr = act.get("@list", [])
+ result += _list_attr
+ return result
+
+ @staticmethod
+ def is_has_item_types(
+ _types: list[str], activity_items: list[dict]
+ ) -> bool:
+ for item in activity_items:
+ _inputs = item.get("reprolib:terms/inputType", [])
+ for _input in _inputs:
+ if _input.get("@value") in _types:
+ return True
+ return False
+
+ def get_activity_names(self, activity_schemas: list[dict]) -> list[str]:
+ names = []
+ for activity in activity_schemas:
+ name_attr = activity.get(
+ "http://www.w3.org/2004/02/skos/core#prefLabel"
+ )
+ name_attr = next(iter(name_attr), {})
+ name = name_attr.get("@value")
+ if name:
+ names.append(name)
+ return names
+
+ def _is_cst(self, activity_items: list[dict], cst_type: str):
+ def _filter_user_input_type(item: dict):
+ _type = next(iter(item.get("@type", [])), None)
+ if not _type or _type != "http://schema.org/Text":
+ return False
+ name = next(iter(item.get("schema:name", [])), {})
+ value = next(iter(item.get("schema:value", [])), {})
+ if (
+ name.get("@value") == "userInputType"
+ and value.get("@value") == cst_type
+ ):
+ return True
+
+ for item in activity_items:
+ _inputs = item.get("reprolib:terms/inputs", [])
+ for _input in _inputs:
+ flt_result = next(
+ filter(_filter_user_input_type, _inputs), None
+ )
+ if flt_result:
+ return self.is_has_item_types(
+ ["stabilityTracker"], activity_items
+ )
+ return False
+
+ def is_cst(self, activity_items: list[dict]) -> bool:
+ return self._is_cst(activity_items, "touch")
+
+ def is_cst_gyro(self, activity_items: list[dict]) -> bool:
+ return self._is_cst(activity_items, "gyroscope")
+
+ def is_ab_trails(
+ self,
+ applet_schema: dict,
+ activity_items: list[dict],
+ activity_names: list[str],
+ ) -> bool:
+ # Check activity names
+ # Try to find 'Trails_iPad', 'Trails_Mobile' strings as activity name
+ ab_trails_act_names = ["Trails_iPad", "Trails_Mobile"]
+ m = list(map(lambda name: name in ab_trails_act_names, activity_names))
+ if not any(m):
+ return False
+ # Check applet name
+ # Try to find 'A/B Trails' as expected applet name
+ ab_trails_name = "A/B Trails"
+ applet_name = applet_schema.get(
+ "http://www.w3.org/2004/02/skos/core#prefLabel"
+ )
+ applet_name = next(iter(applet_name), {})
+ if applet_name.get("@value") != ab_trails_name:
+ return False
+ # Check activity item types
+ # Try to find items with type 'trail'
+ return self.is_has_item_types(["trail"], activity_items)
+
+ def is_flanker(self, activity_items: list[dict]) -> bool:
+ return self.is_has_item_types(
+ ["visual-stimulus-response"], activity_items
+ )
+
+ def preprocess_performance_task(self, applet_schema) -> dict:
+ # Add activity type by activity items for activities without type
+ activities = self.get_repro_order(applet_schema)
+ activity_names = self.get_activity_names(activities)
+ for activity in activities:
+ activity_type = activity.get("reprolib:terms/activityType")
+ if activity_type is not None:
+ # If activity have activityType it is normal case
+ continue
+ items = self.get_repro_order(activity)
+ if self.is_ab_trails(applet_schema, items, activity_names):
+ name_attr = activity.get(
+ "http://www.w3.org/2004/02/skos/core#prefLabel"
+ )
+ activity_name = next(iter(name_attr), {})
+ if activity_name.get("@value") == "Trails_Mobile":
+ name = "TRAILS_MOBILE"
+ else:
+ name = "TRAILS_IPAD"
+ activity["reprolib:terms/activityType"] = [
+ {
+ "@type": "http://www.w3.org/2001/XMLSchema#string",
+ "@value": name,
+ }
+ ]
+ continue
+ elif self.is_cst_gyro(items):
+ activity["reprolib:terms/activityType"] = [
+ {
+ "@type": "http://www.w3.org/2001/XMLSchema#string",
+ "@value": "CST_GYRO",
+ }
+ ]
+ elif self.is_cst(items):
+ activity["reprolib:terms/activityType"] = [
+ {
+ "@type": "http://www.w3.org/2001/XMLSchema#string",
+ "@value": "CST_TOUCH",
+ }
+ ]
+ continue
+ elif self.is_flanker(items):
+ activity["reprolib:terms/activityType"] = [
+ {
+ "@type": "http://www.w3.org/2001/XMLSchema#string",
+ "@value": "FLANKER",
+ }
+ ]
+ continue
+ return applet_schema
+
+ def fetch_applet_version(self, applet: dict):
+ if not applet["meta"]["applet"].get("version", None):
+ protocol = self.db["folder"].find_one(
+ {
+ "_id": ObjectId(
+ str(applet["meta"]["protocol"]["_id"]).split("/")[1]
+ )
+ }
+ )
+ applet["meta"]["applet"]["version"] = protocol["meta"]["protocol"][
+ "schema:version"
+ ][0]["@value"]
+ return applet
diff --git a/src/apps/migrate/services/postgres.py b/src/apps/migrate/services/postgres.py
index 522e1847df1..66b3f277a1d 100644
--- a/src/apps/migrate/services/postgres.py
+++ b/src/apps/migrate/services/postgres.py
@@ -881,3 +881,42 @@ def themes_slice(self) -> str:
for row in rows:
s += f"\t{row[0]}: {row[1]}\n"
return s
+
+ def update_applet_name(
+ self, applet_id: uuid.UUID, name: str, applet_id_version: str
+ ):
+ sql_applet_version = """
+ UPDATE applet_histories
+ SET display_name = %s
+ WHERE id_version = %s;
+ """
+
+ sql_applet = """
+ UPDATE applets
+ SET display_name = %s
+ WHERE id = %s AND version = %s;
+ """
+
+ try:
+ cursor = self.connection.cursor()
+
+ cursor.execute(
+ sql_applet_version,
+ (
+ str(name),
+ str(applet_id_version),
+ ),
+ )
+ cursor.execute(
+ sql_applet,
+ (
+ str(name),
+ str(applet_id),
+ str(applet_id_version.split("_")[1]),
+ ),
+ )
+ migration_log.debug(f"[LIBRARY] Name changed: {applet_id}")
+ except Exception as ex:
+ migration_log.debug(f"[LIBRARY] Name cannot be changed: {ex}")
+ finally:
+ self.connection.commit()
diff --git a/src/apps/migrate/utilities.py b/src/apps/migrate/utilities.py
index 4ed3628e6e3..3c97fe1f230 100644
--- a/src/apps/migrate/utilities.py
+++ b/src/apps/migrate/utilities.py
@@ -15,7 +15,9 @@ def mongoid_to_uuid(id_):
return uuid.UUID(str(id_) + "00000000")
-def uuid_to_mongoid(uid: uuid.UUID) -> None | ObjectId:
+def uuid_to_mongoid(uid: uuid.UUID | str) -> None | ObjectId:
+ if isinstance(uid, str):
+ uid = uuid.UUID(uid)
return ObjectId(uid.hex[:-8]) if uid.hex[-8:] == "0" * 8 else None
diff --git a/src/apps/schedule/api/schedule.py b/src/apps/schedule/api/schedule.py
index babd1ff14ce..f33d37e5381 100644
--- a/src/apps/schedule/api/schedule.py
+++ b/src/apps/schedule/api/schedule.py
@@ -1,11 +1,13 @@
import uuid
from copy import deepcopy
+from datetime import date, timedelta
from fastapi import Body, Depends
from firebase_admin.exceptions import FirebaseError
from apps.answers.errors import UserDoesNotHavePermissionError
-from apps.applets.crud import UserAppletAccessCRUD
+from apps.applets.crud import AppletsCRUD, UserAppletAccessCRUD
+from apps.applets.db.schemas import AppletSchema
from apps.applets.service import AppletService
from apps.authentication.deps import get_current_user
from apps.schedule.domain.schedule.filters import EventQueryParams
@@ -52,8 +54,8 @@ async def schedule_create(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
respondent_ids=[schedule.respondent_id]
if schedule.respondent_id
@@ -145,8 +147,8 @@ async def schedule_delete_all(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
)
except FirebaseError as e:
@@ -175,8 +177,8 @@ async def schedule_delete_by_id(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
respondent_ids=[respondent_id] if respondent_id else None,
)
@@ -207,8 +209,8 @@ async def schedule_update(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
respondent_ids=[schedule.respondent_id]
if schedule.respondent_id
@@ -256,8 +258,8 @@ async def schedule_delete_by_user(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
respondent_ids=[respondent_id],
)
@@ -281,6 +283,45 @@ async def schedule_get_all_by_user(
return ResponseMulti(result=schedules, count=count)
+async def schedule_get_all_by_respondent_user(
+ user: User = Depends(get_current_user),
+ session=Depends(get_session),
+) -> ResponseMulti[PublicEventByUser]:
+ """Get all the respondent's schedules for the next 2 weeks."""
+ max_date_from_event_delta_days = 15
+ min_date_to_event_delta_days = 2
+ today: date = date.today()
+ max_start_date: date = today + timedelta(
+ days=max_date_from_event_delta_days
+ )
+ min_end_date: date = today - timedelta(days=min_date_to_event_delta_days)
+
+ async with atomic(session):
+ # applets for this endpoint must be equal to
+ # applets from /applets?roles=respondent endpoint
+ query_params: QueryParams = QueryParams(
+ filters={"roles": Role.RESPONDENT, "flat_list": False},
+ limit=10000,
+ )
+ applets: list[AppletSchema] = await AppletsCRUD(
+ session
+ ).get_applets_by_roles(
+ user_id=user.id,
+ roles=[Role.RESPONDENT],
+ query_params=query_params,
+ exclude_without_encryption=True,
+ )
+ applet_ids: list[uuid.UUID] = [applet.id for applet in applets]
+
+ schedules = await ScheduleService(session).get_upcoming_events_by_user(
+ user_id=user.id,
+ applet_ids=applet_ids,
+ min_end_date=min_end_date,
+ max_start_date=max_start_date,
+ )
+ return ResponseMulti(result=schedules, count=len(schedules))
+
+
async def schedule_get_by_user(
applet_id: uuid.UUID,
user: User = Depends(get_current_user),
@@ -315,8 +356,8 @@ async def schedule_remove_individual_calendar(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
respondent_ids=[respondent_id],
)
@@ -368,8 +409,8 @@ async def schedule_create_individual(
try:
await applet_service.send_notification_to_applet_respondents(
applet_id,
- "Schedules are updated",
- "Schedules are updated",
+ "Your schedule has been changed, click to update.",
+ "Your schedule has been changed, click to update.",
FirebaseNotificationType.SCHEDULE_UPDATED,
respondent_ids=[respondent_id],
)
diff --git a/src/apps/schedule/commands/__init__.py b/src/apps/schedule/commands/__init__.py
new file mode 100644
index 00000000000..d5dcc72dd58
--- /dev/null
+++ b/src/apps/schedule/commands/__init__.py
@@ -0,0 +1,3 @@
+from apps.schedule.commands.remove_events import ( # noqa: F401
+ app as events_cli,
+)
diff --git a/src/apps/schedule/commands/remove_events.py b/src/apps/schedule/commands/remove_events.py
new file mode 100644
index 00000000000..c0399b38513
--- /dev/null
+++ b/src/apps/schedule/commands/remove_events.py
@@ -0,0 +1,52 @@
+import asyncio
+from functools import wraps
+
+import typer
+from sqlalchemy import select
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import Query
+
+from apps.activities.db.schemas.activity import ActivitySchema
+from apps.schedule.service import ScheduleService
+from infrastructure.database import atomic, session_manager
+
+app = typer.Typer()
+
+
+def coro(f):
+ @wraps(f)
+ def wrapper(*args, **kwargs):
+ return asyncio.run(f(*args, **kwargs))
+
+ return wrapper
+
+
+async def get_assessments(session: AsyncSession) -> list[ActivitySchema]:
+ query: Query = select(ActivitySchema)
+ query = query.where(ActivitySchema.is_reviewable.is_(True))
+ res = await session.execute(query)
+ return res.scalars().all() # noqa
+
+
+@app.command(short_help="Remove events for assessments")
+@coro
+async def remove_events():
+ session_maker = session_manager.get_session()
+ try:
+ async with session_maker() as session:
+ async with atomic(session):
+ try:
+ assessments = await get_assessments(session)
+ service = ScheduleService(session)
+ for activity in assessments:
+ print(
+ f"Applet: {activity.applet_id} "
+ f"Activity: {activity.id}"
+ )
+ await service.delete_by_activity_ids(
+ activity.applet_id, [activity.id]
+ )
+ except Exception as ex:
+ print(ex)
+ finally:
+ await session_maker.remove()
diff --git a/src/apps/schedule/crud/events.py b/src/apps/schedule/crud/events.py
index daa31d845a2..3f07a7b15bc 100644
--- a/src/apps/schedule/crud/events.py
+++ b/src/apps/schedule/crud/events.py
@@ -1,4 +1,5 @@
import uuid
+from datetime import date
from sqlalchemy.exc import IntegrityError, MultipleResultsFound
from sqlalchemy.orm import Query
@@ -219,6 +220,105 @@ async def get_all_by_applet_and_user(
)
return events
+ async def get_all_by_applets_and_user(
+ self,
+ applet_ids: list[uuid.UUID],
+ user_id: uuid.UUID,
+ min_end_date: date | None = None,
+ max_start_date: date | None = None,
+ ) -> tuple[dict[uuid.UUID, list[EventFull]], set[uuid.UUID]]:
+ """Get events by applet_ids and user_id
+ Return {applet_id: [EventFull]}"""
+
+ query: Query = select(
+ EventSchema,
+ PeriodicitySchema.start_date,
+ PeriodicitySchema.end_date,
+ PeriodicitySchema.selected_date,
+ PeriodicitySchema.type,
+ ActivityEventsSchema.activity_id,
+ FlowEventsSchema.flow_id,
+ )
+ query = query.join(
+ UserEventsSchema,
+ and_(
+ EventSchema.id == UserEventsSchema.event_id,
+ UserEventsSchema.user_id == user_id,
+ ),
+ )
+
+ query = query.join(
+ PeriodicitySchema,
+ PeriodicitySchema.id == EventSchema.periodicity_id,
+ )
+
+ query = query.join(
+ FlowEventsSchema,
+ FlowEventsSchema.event_id == EventSchema.id,
+ isouter=True,
+ )
+ query = query.join(
+ ActivityEventsSchema,
+ ActivityEventsSchema.event_id == EventSchema.id,
+ isouter=True,
+ )
+
+ query = query.where(EventSchema.applet_id.in_(applet_ids))
+ query = query.where(EventSchema.is_deleted == False) # noqa: E712
+ if min_end_date and max_start_date:
+ query = query.where(
+ or_(
+ PeriodicitySchema.type == PeriodicityType.ALWAYS,
+ and_(
+ PeriodicitySchema.type != PeriodicityType.ONCE,
+ or_(
+ PeriodicitySchema.start_date.is_(None),
+ PeriodicitySchema.start_date <= max_start_date,
+ ),
+ or_(
+ PeriodicitySchema.end_date.is_(None),
+ PeriodicitySchema.end_date >= min_end_date,
+ ),
+ ),
+ and_(
+ PeriodicitySchema.type == PeriodicityType.ONCE,
+ PeriodicitySchema.selected_date <= max_start_date,
+ PeriodicitySchema.selected_date >= min_end_date,
+ ),
+ )
+ )
+
+ db_result = await self._execute(query)
+
+ events_map: dict[uuid.UUID, list[EventFull]] = dict()
+ event_ids: set[uuid.UUID] = set()
+ for row in db_result:
+ event_ids.add(row.EventSchema.id)
+ events_map.setdefault(row.EventSchema.applet_id, list())
+ events_map[row.EventSchema.applet_id].append(
+ EventFull(
+ id=row.EventSchema.id,
+ start_time=row.EventSchema.start_time,
+ end_time=row.EventSchema.end_time,
+ access_before_schedule=row.EventSchema.access_before_schedule, # noqa: E501
+ one_time_completion=row.EventSchema.one_time_completion,
+ timer=row.EventSchema.timer,
+ timer_type=row.EventSchema.timer_type,
+ user_id=user_id,
+ periodicity=Periodicity(
+ id=row.EventSchema.periodicity_id,
+ type=row.type,
+ start_date=row.start_date,
+ end_date=row.end_date,
+ selected_date=row.selected_date,
+ ),
+ activity_id=row.activity_id,
+ flow_id=row.flow_id,
+ )
+ )
+
+ return events_map, event_ids
+
async def delete_by_ids(self, ids: list[uuid.UUID]) -> None:
"""Delete event by event ids."""
query: Query = delete(EventSchema)
@@ -258,7 +358,8 @@ async def get_all_by_applet_and_activity(
)
query = query.where(EventSchema.applet_id == applet_id)
query = query.where(EventSchema.is_deleted == False) # noqa: E712
- query = query.where(UserEventsSchema.user_id == respondent_id)
+ if respondent_id:
+ query = query.where(UserEventsSchema.user_id == respondent_id)
result = await self._execute(query)
return result.scalars().all()
@@ -299,7 +400,8 @@ async def get_all_by_applet_and_flow(
query = query.where(EventSchema.applet_id == applet_id)
query = query.where(EventSchema.is_deleted == False) # noqa: E712
- query = query.where(UserEventsSchema.user_id == respondent_id)
+ if respondent_id:
+ query = query.where(UserEventsSchema.user_id == respondent_id)
result = await self._execute(query)
return result.scalars().all()
@@ -411,6 +513,144 @@ async def get_general_events_by_user(
)
return events
+ async def get_general_events_by_applets_and_user(
+ self,
+ applet_ids: list[uuid.UUID],
+ user_id: uuid.UUID,
+ min_end_date: date | None = None,
+ max_start_date: date | None = None,
+ ) -> tuple[dict[uuid.UUID, list[EventFull]], set[uuid.UUID]]:
+ """Get general events by applet_id and user_id"""
+ # select flow_ids to exclude
+ flow_ids = (
+ select(distinct(FlowEventsSchema.flow_id))
+ .select_from(FlowEventsSchema)
+ .join(
+ UserEventsSchema,
+ UserEventsSchema.event_id == FlowEventsSchema.event_id,
+ )
+ .join(
+ EventSchema,
+ EventSchema.id == FlowEventsSchema.event_id,
+ )
+ .where(UserEventsSchema.user_id == user_id)
+ .where(EventSchema.applet_id.in_(applet_ids))
+ )
+ activity_ids = (
+ select(distinct(ActivityEventsSchema.activity_id))
+ .select_from(ActivityEventsSchema)
+ .join(
+ UserEventsSchema,
+ UserEventsSchema.event_id == ActivityEventsSchema.event_id,
+ )
+ .join(
+ EventSchema,
+ EventSchema.id == ActivityEventsSchema.event_id,
+ )
+ .where(UserEventsSchema.user_id == user_id)
+ .where(EventSchema.applet_id.in_(applet_ids))
+ )
+
+ query: Query = select(
+ EventSchema,
+ PeriodicitySchema.start_date,
+ PeriodicitySchema.end_date,
+ PeriodicitySchema.selected_date,
+ PeriodicitySchema.type,
+ ActivityEventsSchema.activity_id,
+ FlowEventsSchema.flow_id,
+ )
+
+ query = query.join(
+ PeriodicitySchema,
+ PeriodicitySchema.id == EventSchema.periodicity_id,
+ )
+
+ query = query.join(
+ FlowEventsSchema,
+ FlowEventsSchema.event_id == EventSchema.id,
+ isouter=True,
+ )
+ query = query.join(
+ ActivityEventsSchema,
+ ActivityEventsSchema.event_id == EventSchema.id,
+ isouter=True,
+ )
+ query = query.join(
+ UserEventsSchema,
+ UserEventsSchema.event_id == EventSchema.id,
+ isouter=True,
+ )
+
+ query = query.where(EventSchema.applet_id.in_(applet_ids))
+ query = query.where(EventSchema.is_deleted == False) # noqa: E712
+ query = query.where(
+ or_(
+ FlowEventsSchema.flow_id.is_(None),
+ FlowEventsSchema.flow_id.not_in(flow_ids),
+ )
+ )
+ query = query.where(
+ or_(
+ ActivityEventsSchema.activity_id.is_(None),
+ ActivityEventsSchema.activity_id.not_in(activity_ids),
+ )
+ )
+ query = query.where(UserEventsSchema.user_id == None) # noqa: E711
+ if min_end_date and max_start_date:
+ query = query.where(
+ or_(
+ PeriodicitySchema.type == PeriodicityType.ALWAYS,
+ and_(
+ PeriodicitySchema.type != PeriodicityType.ONCE,
+ or_(
+ PeriodicitySchema.start_date.is_(None),
+ PeriodicitySchema.start_date <= max_start_date,
+ ),
+ or_(
+ PeriodicitySchema.end_date.is_(None),
+ PeriodicitySchema.end_date >= min_end_date,
+ ),
+ ),
+ and_(
+ PeriodicitySchema.type == PeriodicityType.ONCE,
+ PeriodicitySchema.selected_date <= max_start_date,
+ PeriodicitySchema.selected_date >= min_end_date,
+ ),
+ )
+ )
+
+ db_result = await self._execute(query)
+
+ events_map: dict[uuid.UUID, list[EventFull]] = dict()
+ event_ids: set[uuid.UUID] = set()
+ for row in db_result:
+ event_ids.add(row.EventSchema.id)
+ events_map.setdefault(row.EventSchema.applet_id, list())
+ events_map[row.EventSchema.applet_id].append(
+ EventFull(
+ id=row.EventSchema.id,
+ start_time=row.EventSchema.start_time,
+ end_time=row.EventSchema.end_time,
+ access_before_schedule=row.EventSchema.access_before_schedule, # noqa: E501
+ one_time_completion=row.EventSchema.one_time_completion,
+ timer=row.EventSchema.timer,
+ timer_type=row.EventSchema.timer_type,
+ user_id=user_id,
+ periodicity=Periodicity(
+ id=row.EventSchema.periodicity_id,
+ type=row.type,
+ start_date=row.start_date,
+ end_date=row.end_date,
+ selected_date=row.selected_date,
+ ),
+ activity_id=row.activity_id,
+ flow_id=row.flow_id,
+ )
+ )
+
+ return events_map, event_ids
+
async def count_general_events_by_user(
self, applet_id: uuid.UUID, user_id: uuid.UUID
) -> int:
@@ -502,6 +742,13 @@ async def count_individual_events_by_user(
db_result = await self._execute(query)
return db_result.scalar()
+ async def get_all(self, applet_id: uuid.UUID) -> list[EventSchema]:
+ query: Query = select(EventSchema)
+ query = query.where(EventSchema.applet_id == applet_id)
+ query = query.where(EventSchema.is_deleted.is_(False))
+ result = await self._execute(query)
+ return result.scalars().all()
+
class UserEventsCRUD(BaseCRUD[UserEventsSchema]):
schema_class = UserEventsSchema
@@ -708,6 +955,22 @@ async def get_by_applet_and_user_id(
for activity_event in activity_events
]
+ async def get_missing_events(
+ self, activity_ids: list[uuid.UUID]
+ ) -> list[uuid.UUID]:
+ query: Query = select(ActivityEventsSchema.activity_id)
+ query.join(
+ ActivitySchema,
+ and_(
+ ActivitySchema.id == ActivityEventsSchema.activity_id,
+ ActivitySchema.is_reviewable.is_(False),
+ ),
+ )
+ query.where(ActivityEventsSchema.activity_id.in_(activity_ids))
+ res = await self._execute(query)
+ db_result = res.scalars().all()
+ return list(set(activity_ids) - set(db_result))
+
class FlowEventsCRUD(BaseCRUD[FlowEventsSchema]):
schema_class = FlowEventsSchema
diff --git a/src/apps/schedule/crud/notification.py b/src/apps/schedule/crud/notification.py
index 576073a0585..a669480acb3 100644
--- a/src/apps/schedule/crud/notification.py
+++ b/src/apps/schedule/crud/notification.py
@@ -46,6 +46,26 @@ async def get_all_by_event_id(
for notification in result
]
+ async def get_all_by_event_ids(
+ self, event_ids: set[uuid.UUID]
+ ) -> dict[uuid.UUID, list[NotificationSetting]]:
+ """Return all notifications in map by event ids."""
+
+ query: Query = select(NotificationSchema)
+ query = query.where(NotificationSchema.event_id.in_(event_ids))
+ query = query.order_by(NotificationSchema.order.asc())
+ db_result = await self._execute(query)
+ result = db_result.scalars().all()
+
+ notifications_map: dict[uuid.UUID, list[NotificationSetting]] = dict()
+ for notification in result:
+ notifications_map.setdefault(notification.event_id, list())
+ notifications_map[notification.event_id].append(
+ NotificationSetting.from_orm(notification)
+ )
+
+ return notifications_map
+
async def delete_by_event_ids(self, event_ids: list[uuid.UUID]):
"""Delete all notifications by event id."""
query: Query = delete(NotificationSchema)
@@ -72,6 +92,24 @@ async def get_by_event_id(self, event_id: uuid.UUID) -> ReminderSchema:
return db_result.scalars().first()
+ async def get_by_event_ids(
+ self, event_ids: set[uuid.UUID]
+ ) -> dict[uuid.UUID, ReminderSchema]:
+ """Return all reminders in map by event ids."""
+
+ query: Query = select(ReminderSchema)
+ query = query.where(ReminderSchema.event_id.in_(event_ids))
+ query = query.order_by(ReminderSchema.id.asc())
+ db_result = await self._execute(query)
+
+ result = db_result.scalars().all()
+ reminders_map: dict[uuid.UUID, ReminderSchema] = dict()
+ for reminder in result:
+ if reminder.event_id not in reminders_map:
+ reminders_map[reminder.event_id] = reminder
+
+ return reminders_map
+
async def delete_by_event_ids(self, event_ids: list[uuid.UUID]):
"""Delete all reminders by event id."""
query: Query = delete(ReminderSchema)
diff --git a/src/apps/schedule/domain/schedule/requests.py b/src/apps/schedule/domain/schedule/requests.py
index 799e8726aa0..dacd20deae7 100644
--- a/src/apps/schedule/domain/schedule/requests.py
+++ b/src/apps/schedule/domain/schedule/requests.py
@@ -16,6 +16,7 @@
ActivityOrFlowRequiredError,
OneTimeCompletionCaseError,
StartEndTimeAccessBeforeScheduleCaseError,
+ StartEndTimeEqualError,
UnavailableActivityOrFlowError,
)
from apps.shared.domain import InternalModel, PublicModel
@@ -85,10 +86,10 @@ def validate_optional_fields(cls, values):
if (
notification.trigger_type
== NotificationTriggerType.FIXED
- and not (
- values.get("start_time")
- <= notification.at_time
- <= values.get("end_time") # noqa: E501
+ and (
+ values.get("start_time") is None
+ or values.get("end_time") is None
+ or notification.at_time is None # noqa: E501
)
):
raise UnavailableActivityOrFlowError()
@@ -96,21 +97,26 @@ def validate_optional_fields(cls, values):
if (
notification.trigger_type
== NotificationTriggerType.RANDOM
- and not (
- values.get("start_time")
- <= notification.from_time
- <= notification.to_time
- <= values.get("end_time") # noqa: E501
+ and (
+ values.get("start_time") is None
+ or values.get("end_time") is None
+ or notification.from_time is None
+ or notification.to_time is None # noqa: E501
)
):
raise UnavailableActivityOrFlowError()
if values.get("notification").reminder:
- if not (
- values.get("start_time")
- <= values.get("notification").reminder.reminder_time
- <= values.get("end_time")
+ if (
+ values.get("start_time") is None
+ or values.get("end_time") is None
+ or values.get("notification").reminder.reminder_time
+ is None
):
raise UnavailableActivityOrFlowError()
+
+ if values.get("start_time") == values.get("end_time"):
+ raise StartEndTimeEqualError()
+
return values
diff --git a/src/apps/schedule/errors.py b/src/apps/schedule/errors.py
index 058c328f19b..35aba85d5c0 100644
--- a/src/apps/schedule/errors.py
+++ b/src/apps/schedule/errors.py
@@ -10,14 +10,17 @@
class EventNotFoundError(NotFoundError):
+ message_is_template: bool = True
message = _("No such event with {key}={value}.")
class PeriodicityNotFoundError(NotFoundError):
+ message_is_template: bool = True
message = _("No such periodicity with {key}={value}.")
class AppletScheduleNotFoundError(NotFoundError):
+ message_is_template: bool = True
message = _("No schedules found for applet {applet_id}")
@@ -42,16 +45,19 @@ class EventError(InternalServerError):
class UserEventAlreadyExists(ValidationError):
+ message_is_template: bool = True
message = _("The event {event_id} for user {user_id} already exists.")
class ActivityEventAlreadyExists(ValidationError):
+ message_is_template: bool = True
message = _(
"The event {event_id} for activity {activity_id} already exists."
)
class FlowEventAlreadyExists(ValidationError):
+ message_is_template: bool = True
message = _("The event {event_id} for flow {flow_id} already exists.")
@@ -82,5 +88,9 @@ class StartEndTimeAccessBeforeScheduleCaseError(FieldError):
)
+class StartEndTimeEqualError(FieldError):
+ message = _("The start_time and end_time fields can't be equal.")
+
+
class UnavailableActivityOrFlowError(FieldError):
message = _("Activity/flow is unavailable at this time.")
diff --git a/src/apps/schedule/router.py b/src/apps/schedule/router.py
index 7b40b46fc92..d539a5f01fb 100644
--- a/src/apps/schedule/router.py
+++ b/src/apps/schedule/router.py
@@ -10,6 +10,7 @@
schedule_delete_by_id,
schedule_delete_by_user,
schedule_get_all,
+ schedule_get_all_by_respondent_user,
schedule_get_all_by_user,
schedule_get_by_id,
schedule_get_by_user,
@@ -207,3 +208,15 @@
**NO_CONTENT_ERROR_RESPONSES,
},
)(schedule_get_by_user)
+
+user_router.get(
+ "/me/respondent/current_events",
+ response_model=ResponseMulti[PublicEventByUser],
+ status_code=status.HTTP_200_OK,
+ responses={
+ status.HTTP_200_OK: {"model": ResponseMulti[PublicEventByUser]},
+ **AUTHENTICATION_ERROR_RESPONSES,
+ **DEFAULT_OPENAPI_RESPONSE,
+ **NO_CONTENT_ERROR_RESPONSES,
+ },
+)(schedule_get_all_by_respondent_user)
diff --git a/src/apps/schedule/service/schedule.py b/src/apps/schedule/service/schedule.py
index 5d040424aad..7a5a1542a30 100644
--- a/src/apps/schedule/service/schedule.py
+++ b/src/apps/schedule/service/schedule.py
@@ -1,4 +1,6 @@
+import asyncio
import uuid
+from datetime import date
from apps.activities.crud import ActivitiesCRUD
from apps.activity_flows.crud import FlowsCRUD
@@ -336,7 +338,7 @@ async def delete_all_schedules(self, applet_id: uuid.UUID):
event_schemas: list[EventSchema] = await EventCRUD(
self.session
- ).get_all_by_applet_id_with_filter(applet_id, None)
+ ).get_all(applet_id)
event_ids = [event_schema.id for event_schema in event_schemas]
periodicity_ids = [
event_schema.periodicity_id for event_schema in event_schemas
@@ -816,6 +818,73 @@ async def get_events_by_user(
return events
+ async def get_upcoming_events_by_user(
+ self,
+ user_id: uuid.UUID,
+ applet_ids: list[uuid.UUID],
+ min_end_date: date | None = None,
+ max_start_date: date | None = None,
+ ) -> list[PublicEventByUser]:
+ """Get all events for user in applets that user is respondent."""
+ user_events_map, user_event_ids = await EventCRUD(
+ self.session
+ ).get_all_by_applets_and_user(
+ applet_ids=applet_ids,
+ user_id=user_id,
+ min_end_date=min_end_date,
+ max_start_date=max_start_date,
+ )
+ general_events_map, general_event_ids = await EventCRUD(
+ self.session
+ ).get_general_events_by_applets_and_user(
+ applet_ids=applet_ids,
+ user_id=user_id,
+ min_end_date=min_end_date,
+ max_start_date=max_start_date,
+ )
+ full_events_map = self._sum_applets_events_map(
+ user_events_map, general_events_map
+ )
+
+ event_ids = user_event_ids | general_event_ids
+ notifications_map_c = NotificationCRUD(
+ self.session
+ ).get_all_by_event_ids(event_ids)
+ reminders_map_c = ReminderCRUD(self.session).get_by_event_ids(
+ event_ids
+ )
+ notifications_map, reminders_map = await asyncio.gather(
+ notifications_map_c, reminders_map_c
+ )
+
+ events: list[PublicEventByUser] = []
+ for applet_id, all_events in full_events_map.items():
+ events.append(
+ PublicEventByUser(
+ applet_id=applet_id,
+ events=[
+ self._convert_to_dto(
+ event=event,
+ notifications=notifications_map.get(event.id),
+ reminder=reminders_map.get(event.id),
+ )
+ for event in all_events
+ ],
+ )
+ )
+
+ return events
+
+ @staticmethod
+ def _sum_applets_events_map(m1: dict, m2: dict):
+ result = dict()
+ for k, v in m1.items():
+ result[k] = v
+ for k, v in m2.items():
+ result.setdefault(k, list())
+ result[k] += v
+ return result
+
def _convert_to_dto(
self,
event: EventFull,
@@ -1102,6 +1171,16 @@ async def import_schedule(
"""Import schedule."""
events = []
for schedule in schedules:
+ if schedule.periodicity.type == PeriodicityType.ALWAYS:
+ # delete alwaysAvailable events of this activity or flow,
+ # if new event type is AA
+ await self._delete_by_activity_or_flow(
+ applet_id=applet_id,
+ activity_id=schedule.activity_id,
+ flow_id=schedule.flow_id,
+ respondent_id=schedule.respondent_id,
+ only_always_available=True,
+ )
event = await self.create_schedule(
applet_id=applet_id, schedule=schedule
)
@@ -1116,7 +1195,7 @@ async def create_schedule_individual(
# get list of activity ids
activity_ids = []
activities = await ActivitiesCRUD(self.session).get_by_applet_id(
- applet_id
+ applet_id, is_reviewable=False
)
activity_ids = [
activity.id for activity in activities if not activity.is_hidden
@@ -1147,3 +1226,18 @@ async def create_schedule_individual(
applet_id,
QueryParams(filters={"respondent_id": respondent_id}),
)
+
+ async def create_default_schedules_if_not_exist(
+ self,
+ applet_id: uuid.UUID,
+ activity_ids: list[uuid.UUID],
+ ) -> None:
+ """Create default schedules for applet."""
+ activities_without_events = await ActivityEventsCRUD(
+ self.session
+ ).get_missing_events(activity_ids)
+ await self.create_default_schedules(
+ applet_id=applet_id,
+ activity_ids=activities_without_events,
+ is_activity=True,
+ )
diff --git a/src/apps/schedule/tests/test_schedule.py b/src/apps/schedule/tests/test_schedule.py
index 121ce519f92..284ded8bff4 100644
--- a/src/apps/schedule/tests/test_schedule.py
+++ b/src/apps/schedule/tests/test_schedule.py
@@ -27,6 +27,10 @@ class TestSchedule(BaseTest):
schedule_user_url = "users/me/events"
schedule_detail_user_url = f"{schedule_user_url}/{{applet_id}}"
+ erspondent_schedules_user_two_weeks_url = (
+ "/users/me/respondent/current_events"
+ )
+
schedule_url = f"{applet_detail_url}/events"
schedule_import_url = f"{applet_detail_url}/events/import"
schedule_create_individual = (
@@ -46,6 +50,46 @@ class TestSchedule(BaseTest):
public_events_url = "public/applets/{key}/events"
+ @rollback
+ async def test_schedule_create_with_equal_start_end_time(self):
+ await self.client.login(
+ self.login_url, "tom@mindlogger.com", "Test1234!"
+ )
+ create_data = {
+ "start_time": "08:00:00",
+ "end_time": "08:00:00",
+ "access_before_schedule": False,
+ "one_time_completion": False,
+ "timer": "00:00:00",
+ "timer_type": "NOT_SET",
+ "periodicity": {
+ "type": "ONCE",
+ "start_date": "2021-09-01",
+ "end_date": "2021-09-01",
+ "selected_date": "2023-09-01",
+ },
+ "respondent_id": None,
+ "activity_id": "09e3dbf0-aefb-4d0e-9177-bdb321bf3611",
+ "flow_id": None,
+ "notification": {
+ "notifications": [
+ {"trigger_type": "FIXED", "at_time": "08:30:00"},
+ ],
+ "reminder": {
+ "activity_incomplete": 1,
+ "reminder_time": "08:30:00",
+ },
+ },
+ }
+
+ response = await self.client.post(
+ self.schedule_url.format(
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b1"
+ ),
+ data=create_data,
+ )
+ assert response.status_code == 422
+
@rollback
async def test_schedule_create_with_activity(self):
await self.client.login(
@@ -365,6 +409,66 @@ async def test_schedule_delete_detail(self):
assert response.status_code == 204
+ @rollback
+ async def test_schedule_update_with_equal_start_end_time(self):
+ await self.client.login(
+ self.login_url, "tom@mindlogger.com", "Test1234!"
+ )
+ create_data = {
+ "start_time": "08:00:00",
+ "end_time": "09:00:00",
+ "access_before_schedule": True,
+ "one_time_completion": True,
+ "timer": "00:00:00",
+ "timer_type": "NOT_SET",
+ "periodicity": {
+ "type": "MONTHLY",
+ "start_date": "2021-09-01",
+ "end_date": "2021-09-01",
+ "selected_date": "2023-09-01",
+ },
+ "respondent_id": "7484f34a-3acc-4ee6-8a94-fd7299502fa2",
+ "activity_id": None,
+ "flow_id": "3013dfb1-9202-4577-80f2-ba7450fb5831",
+ "notification": {
+ "notifications": [
+ {"trigger_type": "FIXED", "at_time": "08:30:00"},
+ ],
+ "reminder": {
+ "activity_incomplete": 1,
+ "reminder_time": "08:30:00",
+ },
+ },
+ }
+
+ response = await self.client.post(
+ self.schedule_url.format(
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b1"
+ ),
+ data=create_data,
+ )
+ event = response.json()["result"]
+
+ update_data = {
+ "start_time": "00:00:15",
+ "end_time": "00:00:15",
+ "periodicity": {
+ "type": "MONTHLY",
+ "start_date": "2021-09-01",
+ "end_date": "2021-09-01",
+ "selected_date": "2023-09-01",
+ },
+ }
+
+ response = await self.client.put(
+ self.schedule_detail_url.format(
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b1",
+ event_id=event["id"],
+ ),
+ data=update_data,
+ )
+ assert response.status_code == 422
+
@rollback
async def test_schedule_update(self):
await self.client.login(
@@ -581,6 +685,80 @@ async def test_schedules_get_user_all(self):
assert response.status_code == 200
assert response.json()["count"] == 6
+ @rollback
+ async def test_respondent_schedules_get_user_two_weeks(self):
+ await self.client.login(
+ self.login_url, "tom@mindlogger.com", "Test1234!"
+ )
+
+ response = await self.client.get(
+ self.erspondent_schedules_user_two_weeks_url
+ )
+
+ assert response.status_code == 200
+ assert response.json()["count"] == 2
+
+ data = sorted(response.json()["result"], key=lambda x: x["appletId"])
+ apppet_0 = data[0]
+ apppet_1 = data[1]
+ assert set(apppet_0.keys()) == {
+ "appletId",
+ "events",
+ }
+
+ apppet_0["appletId"] = "92917a56-d586-4613-b7aa-991f2c4b15b1"
+ assert len(apppet_0["events"]) == 3
+ events_data = sorted(apppet_0["events"], key=lambda x: x["id"])
+ assert set(events_data[0].keys()) == {
+ "id",
+ "entityId",
+ "availability",
+ "selectedDate",
+ "timers",
+ "availabilityType",
+ "notificationSettings",
+ }
+ assert set(events_data[0]["availability"].keys()) == {
+ "oneTimeCompletion",
+ "periodicityType",
+ "timeFrom",
+ "timeTo",
+ "allowAccessBeforeFromTime",
+ "startDate",
+ "endDate",
+ }
+ events_data[0]["id"] = "04c93c4a-2cd4-45ce-9aec-b1912f330584"
+ events_data[0]["entityId"] = "09e3dbf0-aefb-4d0e-9177-bdb321bf3612"
+ events_data[1]["id"] = "04c93c4a-2cd4-45ce-9aec-b1912f330583"
+ events_data[1]["entityId"] = "09e3dbf0-aefb-4d0e-9177-bdb321bf3611"
+ events_data[2]["id"] = "04c93c4a-2cd4-45ce-9aec-b1912f330582"
+ events_data[2]["entityId"] = "3013dfb1-9202-4577-80f2-ba7450fb5832"
+
+ apppet_1["appletId"] = "92917a56-d586-4613-b7aa-991f2c4b15b2"
+ assert len(apppet_1["events"]) == 1
+ # events_data = sorted(apppet_1["events"], key=lambda x: x["id"])
+ events_data = apppet_1["events"]
+ assert set(events_data[0].keys()) == {
+ "id",
+ "entityId",
+ "availability",
+ "selectedDate",
+ "timers",
+ "availabilityType",
+ "notificationSettings",
+ }
+ assert set(events_data[0]["availability"].keys()) == {
+ "oneTimeCompletion",
+ "periodicityType",
+ "timeFrom",
+ "timeTo",
+ "allowAccessBeforeFromTime",
+ "startDate",
+ "endDate",
+ }
+ events_data[0]["id"] = "04c93c4a-2cd4-45ce-9aec-b1912f330584"
+ events_data[0]["entityId"] = "09e3dbf0-aefb-4d0e-9177-bdb321bf3612"
+
@rollback
async def test_schedule_get_user_by_applet(self):
await self.client.login(
diff --git a/src/apps/shared/changes_generator.py b/src/apps/shared/changes_generator.py
index 12929e44262..bc0b70956df 100644
--- a/src/apps/shared/changes_generator.py
+++ b/src/apps/shared/changes_generator.py
@@ -1,29 +1,24 @@
-from apps.activities.domain.activity_item_history import (
- ActivityItemHistoryChange,
-)
-from apps.shared.domain.base import to_camelcase
-
-__all__ = ["ChangeTextGenerator", "ChangeGenerator"]
-
"""
Dictionary to generate needed text in one format
"""
_DICTIONARY = dict(
en=dict(
- added='"{0}" is added.',
- removed='"{0}" is removed.',
- changed='"{0}" is changed to "{1}".',
- cleared='"{0}" is cleared.',
- filled='"{0}" is updated to "{1}".',
- updated='"{0}" is updated.',
- changed_dict='For {0} language "{1}" is changed to "{2}".',
- set_to='"{0}" is set to "{1}".',
- set_dict='For {0} language "{1}" is set to "{2}".',
- set_bool='"{0}" option was "{1}".',
+ added="{0} was added",
+ removed="{0} was removed",
+ changed="{0} was changed to {1}",
+ cleared="{0} was cleared",
+ filled="{0} was changed to {1}",
+ updated="{0} was updated",
+ changed_dict="For {0} language {1} was changed to {2}",
+ set_to="{0} was set to {1}",
+ set_dict="For {0} language {1} was set to {2}",
+ set_bool="{0} option was {1}",
+ bool_enabled="{0} was enabled",
+ bool_disabled="{0} was disabled",
)
)
-EMPY_VALUES: tuple = (None, "", 0, dict())
+EMPTY_VALUES: tuple = (None, "", 0, dict(), dict(en=""), [])
class ChangeTextGenerator:
@@ -36,7 +31,7 @@ def __init__(
@classmethod
def is_considered_empty(cls, value) -> bool:
- return value in EMPY_VALUES
+ return value in EMPTY_VALUES
def added_text(self, object_name: str) -> str:
"""
@@ -50,26 +45,25 @@ def removed_text(self, object_name: str) -> str:
"""
return self._dictionary["removed"].format(object_name)
- def changed_text(self, from_, to_) -> str:
- """
- Generates text for value updating.
- """
- return self._dictionary["changed"].format(str(from_), str(to_))
-
- def changed_dict(self, from_, to_) -> str:
- """
- Generates text of dicts for value updating.
- """
- changes = ""
-
- # get all keys from both dicts, in set
- keys = set(from_.keys()) | set(to_.keys())
- for key in keys:
- changes += self._dictionary["changed_dict"].format(
- key, from_.get(key, None), to_.get(key, None)
- )
-
- return changes
+ def changed_text(
+ self,
+ field: str,
+ value: str | dict[str, str] | list[str],
+ is_initial=False,
+ ) -> str:
+ """
+ Generates text for value chaning or setting if it is initial value.
+ """
+ # We don't support translations yet
+ if isinstance(value, dict):
+ v = list(value.values())[0]
+ elif isinstance(value, list):
+ v = ", ".join(value)
+ else:
+ v = value
+ if is_initial:
+ return self._dictionary["set_to"].format(field, v)
+ return self._dictionary["filled"].format(field, v)
def cleared_text(self, field: str) -> str:
"""
@@ -77,12 +71,6 @@ def cleared_text(self, field: str) -> str:
"""
return self._dictionary["cleared"].format(field)
- def filled_text(self, field: str, value: str) -> str:
- """
- Generates text for setting value.
- """
- return self._dictionary["filled"].format(field, value)
-
def updated_text(self, field: str) -> str:
"""
Generates text for setting value.
@@ -95,599 +83,24 @@ def set_text(self, field: str, value: str) -> str:
"""
return self._dictionary["set_to"].format(field, value)
- def set_dict(self, field, value) -> str:
- """
- Generates text for setting value.
- """
- changes = ""
-
- # get all keys from both dicts, in set
- keys = set(value.keys())
- for key in keys:
- changes += self._dictionary["set_dict"].format(
- key, field, value.get(key, None)
- )
-
- return changes
-
- def set_bool(self, field: str, value: str) -> str:
+ def set_bool(self, field_name: str, value: bool) -> str:
"""
Generates text for setting value.
"""
- return self._dictionary["set_bool"].format(field, value)
+ if value:
+ return self._dictionary["bool_enabled"].format(field_name)
+ return self._dictionary["bool_disabled"].format(field_name)
-class ChangeGenerator:
+class BaseChangeGenerator:
def __init__(self):
self._change_text_generator = ChangeTextGenerator()
- def generate_applet_changes(self, new_applet, old_applet):
- changes = []
- for field, old_value in old_applet.dict().items():
- new_value = getattr(new_applet, field, None)
- if not any([old_value, new_value]):
- continue
- if new_value == old_value:
- continue
- if self._change_text_generator.is_considered_empty(new_value):
- changes.append(
- self._change_text_generator.cleared_text(
- to_camelcase(field)
- ),
- )
- elif self._change_text_generator.is_considered_empty(old_value):
- changes.append(
- self._change_text_generator.filled_text(
- to_camelcase(field), new_value
- ),
- )
- else:
- changes.append(
- self._change_text_generator.changed_text(
- f"Applet {field}", new_value
- )
- if field not in ["about", "description"]
- else f"Applet {to_camelcase(field)} updated: {self._change_text_generator.changed_dict(old_value, new_value)}." # noqa: E501
- )
-
- return changes
-
- def generate_activity_insert(self, new_activity):
- changes = list()
- for field, value in new_activity.dict().items():
- if field == "items":
- continue
- elif field == "name":
- changes.append(
- self._change_text_generator.set_text(
- f"Activity {to_camelcase(field)}", value
- )
- )
- elif field in [
- "id",
- "created_at",
- "id_version",
- "applet_id",
- ]:
- continue
- elif field in [
- "scores_and_reports",
- "subscale_setting",
- ]:
- if field == "scores_and_reports":
- if value:
- for key, val in value.items():
- if key in [
- "generate_report",
- "show_score_summary",
- ]:
- changes.append(
- self._change_text_generator.set_bool(
- f"Activity {to_camelcase(key)}",
- "enabled" if val else "disabled",
- )
- )
- elif key == "reports":
- for rep in val:
- text = ""
- if rep["type"] == "score":
- text = f"Activity score {rep['name']}"
- elif rep["type"] == "section":
- text = (
- f"Activity section {rep['name']}"
- )
- if text == "":
- continue
- self._change_text_generator.added_text(
- text
- )
-
- elif field == "subscale_setting":
- if value:
- for key, val in value.items():
- if key == "subscales":
- for v in val:
- changes.append(
- self._change_text_generator.added_text(
- f'Activity subscale {v["name"]}'
- )
- )
- elif key == "total_scores_table_data":
- changes.append(
- self._change_text_generator.added_text(
- f"Activity subscale {to_camelcase(key)}" # noqa: E501
- )
- )
-
- elif key == "calculate_total_score":
- changes.append(
- self._change_text_generator.set_text(
- f"Activity subscale {to_camelcase(key)}", # noqa: E501
- val,
- )
- )
-
- elif type(value) == bool:
- changes.append(
- self._change_text_generator.set_bool(
- f"Activity {to_camelcase(field)}",
- "enabled" if value else "disabled",
- ),
- )
- else:
- if value:
- changes.append(
- self._change_text_generator.set_text(
- f"Activity {to_camelcase(field)}", value
- )
- if field not in ["description"]
- else self._change_text_generator.set_dict(
- f"Activity {to_camelcase(field)}", value
- ),
- )
- return changes
-
- def generate_activity_update(self, new_activity, old_activity):
- changes = list()
-
- for field, value in new_activity.dict().items():
- old_value = getattr(old_activity, field, None)
- if field == "items":
- continue
- elif field in [
- "id",
- "created_at",
- "id_version",
- "applet_id",
- ]:
- continue
- elif field in [
- "scores_and_reports",
- "subscale_setting",
- ]:
- if field == "scores_and_reports":
- if value and value != old_value:
- for key, val in value.items():
- old_val = getattr(old_activity, key, None)
- if key in [
- "generate_report",
- "show_score_summary",
- ]:
- changes.append(
- self._change_text_generator.set_bool(
- f"Activity {to_camelcase(key)}",
- "enabled" if val else "disabled",
- )
- )
- elif key == "scores":
- if val:
- old_names = []
- if old_val:
- old_names = [
- old_v.name for old_v in old_val
- ]
- new_names = [v["name"] for v in val]
- deleted_names = list(
- set(old_names) - set(new_names)
- )
- for k, v in enumerate(val):
- if v["name"] not in old_names:
- changes.append(
- self._change_text_generator.added_text( # noqa: E501
- f'Activity score {v["name"]}' # noqa: E501
- )
- )
- else:
- if (
- getattr(
- old_val, k, None
- ).dict()
- != v.dict()
- ):
- changes.append(
- self._change_text_generator.changed_text( # noqa: E501
- f'Activity score {v["name"]}' # noqa: E501
- )
- )
-
- if deleted_names:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- f'Activity scores {", ".join(deleted_names)}' # noqa: E501
- )
- )
- else:
- if old_val:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- "Activity scores"
- )
- )
-
- elif key == "sections":
- if val:
- old_names = []
- if old_val:
- old_names = [
- old_v.name for old_v in old_val
- ]
- new_names = [v["name"] for v in val]
- deleted_names = list(
- set(old_names) - set(new_names)
- )
-
- for k, v in enumerate(val):
- if v["name"] not in old_names:
- changes.append(
- self._change_text_generator.added_text( # noqa: E501
- f'Activity section {v["name"]}' # noqa: E501
- )
- )
- else:
- if (
- getattr(
- old_val, k, None
- ).dict()
- != v.dict()
- ):
- changes.append(
- self._change_text_generator.changed_text( # noqa: E501
- f'Activity section {v["name"]}' # noqa: E501
- )
- )
-
- if deleted_names:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- f'Activity section {", ".join(deleted_names)}' # noqa: E501
- )
- )
- else:
- if old_val:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- "Activity sections"
- )
- )
- else:
- if old_value:
- changes.append(
- self._change_text_generator.removed_text(
- f"Activity {to_camelcase(field)}"
- )
- )
- elif field == "subscale_setting":
- if value and value != old_value:
- for key, val in value.items():
- old_val = getattr(old_activity, key, None)
-
- if key == "subscales":
- if val:
- old_names = []
- if old_val:
- old_names = [
- old_v.name for old_v in old_val
- ]
- new_names = [v["name"] for v in val]
- deleted_names = list(
- set(old_names) - set(new_names)
- )
- for k, v in enumerate(val):
- if v["name"] not in old_names:
- changes.append(
- self._change_text_generator.added_text( # noqa: E501
- f'Activity subscale {v["name"]}' # noqa: E501
- )
- )
- else:
- if (
- getattr(
- old_val, k, None
- ).dict()
- != v.dict()
- ):
- changes.append(
- self._change_text_generator.changed_text( # noqa: E501
- f'Activity subscale {v["name"]}' # noqa: E501
- )
- )
-
- if deleted_names:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- f'Activity subscale {", ".join(deleted_names)}' # noqa: E501
- )
- )
- else:
- if old_val:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- "Activity subscales"
- )
- )
- else:
- if val != old_val:
- if val and not old_val:
- changes.append(
- self._change_text_generator.set_text( # noqa: E501
- f"Activity subscale {to_camelcase(key)}", # noqa: E501
- val,
- )
- )
- elif not val and old_val:
- changes.append(
- self._change_text_generator.removed_text( # noqa: E501
- f"Activity subscale {to_camelcase(key)}" # noqa: E501
- )
- )
- else:
- changes.append(
- self._change_text_generator.changed_text( # noqa: E501
- f"Activity subscale {to_camelcase(key)}", # noqa: E501
- val,
- )
- )
- else:
- if old_value:
- changes.append(
- self._change_text_generator.removed_text(
- f"Activity {to_camelcase(field)}"
- )
- )
-
- elif type(value) == bool:
- if value and value != old_value:
- changes.append(
- self._change_text_generator.set_bool(
- f"Activity {to_camelcase(field)}",
- "enabled" if value else "disabled",
- ),
- )
- else:
- if value != old_value:
- if field == "description":
- desc_change = f"Activity {to_camelcase(field)} updated: {self._change_text_generator.changed_dict(old_value, value)}." # noqa: E501
- changes.append(desc_change)
-
- else:
- changes.append(
- self._change_text_generator.changed_text(
- f"Activity {to_camelcase(field)}", value
- )
- )
- return changes, bool(changes)
-
- def generate_activity_items_insert(self, items):
- change_items = []
- for item in items:
- change = ActivityItemHistoryChange(
- name=self._change_text_generator.added_text(
- f"Item {item.name}"
- )
- )
- changes = []
- for field, value in item.dict().items():
- if field == "name":
- changes.append(
- self._change_text_generator.set_text(
- f"Item {to_camelcase(field)}", value
- )
- )
- elif field in [
- "id",
- "created_at",
- "id_version",
- "activity_id",
- ]:
- continue
- elif type(value) == bool:
- changes.append(
- self._change_text_generator.set_bool(
- f"Item {to_camelcase(field)}",
- "enabled" if value else "disabled",
- ),
- )
-
- elif field in [
- "response_values",
- "config",
- "conditional_logic",
- ]:
- if field == "response_values":
- if value:
- changes.append(
- self._change_text_generator.added_text(
- f"Item {field}"
- )
- )
- elif field == "config":
- if value:
- for key, val in value.items():
- if type(val) == bool:
- changes.append(
- self._change_text_generator.set_bool(
- f"Item {to_camelcase(key)}",
- "enabled" if val else "disabled",
- )
- )
-
- elif type(val) == dict:
- for k, v in val.items():
- if type(v) == bool:
- changes.append(
- self._change_text_generator.set_bool( # noqa: E501
- f"Item {to_camelcase(k)}",
- "enabled"
- if v
- else "disabled",
- )
- )
- else:
- changes.append(
- self._change_text_generator.added_text( # noqa: E501
- f"Item {to_camelcase(k)}",
- )
- )
- else:
- changes.append(
- self._change_text_generator.added_text(
- f"Item {to_camelcase(key)}"
- )
- )
-
- else:
- if value:
- changes.append(
- self._change_text_generator.set_text(
- f"Item {to_camelcase(field)}", value
- )
- if field not in ["question"]
- else self._change_text_generator.set_dict(
- f"Item {to_camelcase(field)}", value
- ),
- )
-
- change.changes = changes
- change_items.append(change)
-
- return change_items
-
- def generate_activity_items_update(self, item_groups):
- change_items = []
-
- for _, (prev_item, new_item) in item_groups.items():
- if not prev_item and new_item:
- change_items.extend(
- self.generate_activity_items_insert(
- [
- new_item,
- ]
- )
- )
- elif not new_item and prev_item:
- change_items.append(
- ActivityItemHistoryChange(
- name=self._change_text_generator.removed_text(
- f"Item {prev_item.name}"
- )
- )
- )
- elif new_item and prev_item:
- changes, has_changes = self._generate_activity_item_update(
- new_item, prev_item
- )
- if has_changes:
- change_items.append(
- ActivityItemHistoryChange(
- name=self._change_text_generator.updated_text(
- f"Item {new_item.name}",
- ),
- changes=changes,
- )
- )
-
- return change_items, bool(change_items)
-
- def _generate_activity_item_update(self, new_item, prev_item):
- changes = list()
-
- for field, value in new_item.dict().items():
- old_value = getattr(prev_item, field, None)
- if field in [
- "id",
- "created_at",
- "id_version",
- "activity_id",
- ]:
- continue
- elif type(value) == bool:
- if value and value != old_value:
- changes.append(
- self._change_text_generator.set_bool(
- f"Item {to_camelcase(field)}",
- "enabled" if value else "disabled",
- ),
- )
-
- elif field in [
- "response_values",
- "config",
- "conditional_logic",
- ]:
- if field == "response_values":
- if value and value != old_value:
- changes.append(
- self._change_text_generator.added_text(
- f"Item {field}"
- )
- )
- elif field == "config":
- if value and value != old_value:
- for key, val in value.items():
- old_val = getattr(old_value, key, None)
- if val != old_val:
- if type(val) == bool:
- changes.append(
- self._change_text_generator.set_bool(
- f"Item {to_camelcase(key)}",
- "enabled" if val else "disabled",
- )
- )
-
- elif type(val) == dict:
- for k, v in val.items():
- old_v = getattr(old_val, k, None)
- if v != old_v:
- if type(v) == bool:
- changes.append(
- self._change_text_generator.set_bool( # noqa: E501
- f"Item {to_camelcase(k)}", # noqa: E501
- "enabled"
- if v
- else "disabled",
- )
- )
- else:
- changes.append(
- self._change_text_generator.added_text( # noqa: E501
- f"Item {to_camelcase(k)}", # noqa: E501
- )
- )
- else:
- changes.append(
- self._change_text_generator.added_text(
- f"Item {to_camelcase(key)}"
- )
- )
-
- else:
- if value and value != old_value:
- changes.append(
- self._change_text_generator.changed_text(
- f"Item {to_camelcase(field)}", value
- )
- if field not in ["question"]
- else f"Item {to_camelcase(field)} updated: {self._change_text_generator.changed_dict(old_value, value)}." # noqa: E501
- )
-
- return changes, bool(changes)
+ def _populate_bool_changes(
+ self, field_name: str, value: bool, changes: list[str]
+ ) -> None:
+ # Invert value for hidden (UI name contains visibility) because on UI
+ # it will be visibility
+ if "Visibility" in field_name:
+ value = not value
+ changes.append(self._change_text_generator.set_bool(field_name, value))
diff --git a/src/apps/shared/commands/__init__.py b/src/apps/shared/commands/__init__.py
new file mode 100644
index 00000000000..aec44ee4476
--- /dev/null
+++ b/src/apps/shared/commands/__init__.py
@@ -0,0 +1 @@
+from apps.shared.commands.patch_commands import app as patch # noqa: F401
diff --git a/src/apps/shared/commands/domain.py b/src/apps/shared/commands/domain.py
new file mode 100644
index 00000000000..04f8f448504
--- /dev/null
+++ b/src/apps/shared/commands/domain.py
@@ -0,0 +1,19 @@
+import os
+
+from pydantic import validator
+
+from apps.shared.domain import InternalModel
+
+
+class Patch(InternalModel):
+ file_path: str
+ task_id: str
+ description: str
+ manage_session: bool
+
+ @validator("file_path")
+ def validate_file_existance(cls, v):
+ path = os.path.join(os.path.dirname(__file__), "patches", v)
+ if not os.path.exists(path):
+ raise ValueError("File does not exist")
+ return v
diff --git a/src/apps/shared/commands/patch.py b/src/apps/shared/commands/patch.py
new file mode 100644
index 00000000000..3360cf35b2c
--- /dev/null
+++ b/src/apps/shared/commands/patch.py
@@ -0,0 +1,44 @@
+from apps.shared.commands.domain import Patch
+
+
+class PatchRegister:
+ patches: list[Patch] | None = None
+
+ @classmethod
+ def register(
+ self,
+ file_path: str,
+ task_id: str,
+ description: str,
+ manage_session: bool,
+ ):
+ self.patches = self.patches or []
+ # check if task_id already exist
+ found_patch = next(
+ (p for p in self.patches if p.task_id == task_id), None
+ )
+ if found_patch:
+ raise ValueError(f"Patch with task_id {task_id} already exist")
+ self.patches.append(
+ Patch(
+ file_path=file_path,
+ task_id=task_id,
+ description=description,
+ manage_session=manage_session,
+ )
+ )
+
+ @classmethod
+ def get_all(self):
+ return self.patches or []
+
+ @classmethod
+ def get_by_task_id(self, task_id: str):
+ if not self.patches:
+ return []
+ # find patch by task_id
+ found_patch = next(
+ (p for p in self.patches if p.task_id == task_id), None
+ )
+
+ return found_patch
diff --git a/src/apps/shared/commands/patch_commands.py b/src/apps/shared/commands/patch_commands.py
new file mode 100644
index 00000000000..8da68ce109c
--- /dev/null
+++ b/src/apps/shared/commands/patch_commands.py
@@ -0,0 +1,177 @@
+import asyncio
+import importlib
+import uuid
+from functools import wraps
+from pathlib import Path
+from typing import Optional
+
+import typer
+from rich import print
+from rich.style import Style
+from rich.table import Table
+
+from apps.shared.commands.domain import Patch
+from apps.shared.commands.patch import PatchRegister
+from apps.workspaces.errors import WorkspaceNotFoundError
+from apps.workspaces.service.workspace import WorkspaceService
+from infrastructure.database import atomic, session_manager
+
+PatchRegister.register(
+ file_path="slider_tickmark_label.py",
+ task_id="M2-3781",
+ description="Slider tick marks and labels fix patch",
+ manage_session=False,
+)
+
+
+app = typer.Typer()
+
+
+def coro(f):
+ @wraps(f)
+ def wrapper(*args, **kwargs):
+ return asyncio.run(f(*args, **kwargs))
+
+ return wrapper
+
+
+def print_data_table(data: list[Patch]):
+ table = Table(
+ "Task ID",
+ "Description",
+ "Manage session inside patch",
+ show_header=True,
+ title="Patches",
+ title_style=Style(bold=True),
+ )
+
+ for patch in data:
+ table.add_row(
+ f"[bold]{patch.task_id}[bold]",
+ str(patch.description),
+ str(patch.manage_session),
+ )
+ print(table)
+
+
+def wrap_error_msg(msg):
+ return f"[bold red]Error: \n{msg}[/bold red]"
+
+
+@app.command(short_help="Show list of registered patches.")
+@coro
+async def show():
+ data = PatchRegister.get_all()
+ if not data:
+ print("[bold green]Patches not registered[/bold green]")
+ return
+ print_data_table(data)
+
+
+@app.command(short_help="Execute registered patch.")
+@coro
+async def exec(
+ task_id: str = typer.Argument(..., help="Patch task id"),
+ owner_id: Optional[uuid.UUID] = typer.Option(
+ None,
+ "--owner-id",
+ "-o",
+ help="Workspace owner id",
+ ),
+):
+ patch = PatchRegister.get_by_task_id(task_id)
+ if not patch:
+ print(wrap_error_msg("Patch not registered"))
+ else:
+ await exec_patch(patch, owner_id)
+
+ return
+
+
+async def exec_patch(patch: Patch, owner_id: Optional[uuid.UUID]):
+ session_maker = session_manager.get_session()
+ arbitrary = None
+ try:
+ async with session_maker() as session:
+ async with atomic(session):
+ if owner_id:
+ try:
+ arbitrary = await WorkspaceService(
+ session, owner_id
+ ).get_arbitrary_info_by_owner_id(owner_id)
+ if not arbitrary:
+ raise WorkspaceNotFoundError("Workspace not found")
+
+ except WorkspaceNotFoundError as e:
+ print(wrap_error_msg(e))
+ raise
+ finally:
+ await session_maker.remove()
+
+ arbitrary_session_maker = None
+ if arbitrary:
+ arbitrary_session_maker = session_manager.get_session(
+ arbitrary.database_uri
+ )
+
+ session_maker = session_manager.get_session()
+
+ if patch.file_path.endswith(".sql"):
+ # execute sql file
+ try:
+ async with session_maker() as session:
+ async with atomic(session):
+ try:
+ with open(
+ (
+ str(Path(__file__).parent.resolve())
+ + "/patches/"
+ + patch.file_path
+ ),
+ "r",
+ ) as f:
+ sql = f.read()
+ await session.execute(sql)
+ await session.commit()
+ print(
+ f"[bold green]Patch {patch.task_id} executed[/bold green]" # noqa: E501
+ )
+ return
+ except Exception as e:
+ print(wrap_error_msg(e))
+ finally:
+ await session_maker.remove()
+ elif patch.file_path.endswith(".py"):
+ try:
+ # run main from the file
+ patch_file = importlib.import_module(
+ str(__package__)
+ + ".patches."
+ + patch.file_path.replace(".py", ""),
+ )
+
+ # if manage_session is True, pass sessions to patch_file main
+ if patch.manage_session:
+ await patch_file.main(session_maker, arbitrary_session_maker)
+ else:
+ try:
+ async with session_maker() as session:
+ async with atomic(session):
+ if arbitrary_session_maker:
+ async with arbitrary_session_maker() as arbitrary_session: # noqa: E501
+ async with atomic(arbitrary_session):
+ await patch_file.main(
+ session, arbitrary_session
+ )
+ else:
+ await patch_file.main(session)
+ finally:
+ await session_maker.remove()
+ if arbitrary_session_maker:
+ await arbitrary_session_maker.remove()
+
+ print(
+ f"[bold green]Patch {patch.task_id} executed[/bold green]" # noqa: E501
+ )
+ except Exception as e:
+ print(wrap_error_msg(e))
diff --git a/src/apps/shared/commands/patches/__init__.py b/src/apps/shared/commands/patches/__init__.py
new file mode 100644
index 00000000000..3abe530ad3e
--- /dev/null
+++ b/src/apps/shared/commands/patches/__init__.py
@@ -0,0 +1 @@
+from apps.shared.commands.patches.slider_tickmark_label import * # noqa: F401 F403 E501
diff --git a/src/apps/shared/commands/patches/sample.sql b/src/apps/shared/commands/patches/sample.sql
new file mode 100644
index 00000000000..e8c2e440887
--- /dev/null
+++ b/src/apps/shared/commands/patches/sample.sql
@@ -0,0 +1 @@
+update public.invitations set is_deleted = True where is_deleted = True;
\ No newline at end of file
diff --git a/src/apps/shared/commands/patches/sample_arbitrary.py b/src/apps/shared/commands/patches/sample_arbitrary.py
new file mode 100644
index 00000000000..2d115f53acb
--- /dev/null
+++ b/src/apps/shared/commands/patches/sample_arbitrary.py
@@ -0,0 +1,7 @@
+from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def main(
+ session: AsyncSession, arbitrary_session: AsyncSession, *args, **kwargs
+):
+ pass
diff --git a/src/apps/shared/commands/patches/sample_manage_session_arbitrary.py b/src/apps/shared/commands/patches/sample_manage_session_arbitrary.py
new file mode 100644
index 00000000000..d6342b3d25e
--- /dev/null
+++ b/src/apps/shared/commands/patches/sample_manage_session_arbitrary.py
@@ -0,0 +1,19 @@
+from infrastructure.database import atomic
+
+
+async def main(session_maker, arbitrary_session_maker, *args, **kwargs):
+ try:
+ async with session_maker() as session:
+ async with atomic(session):
+ pass
+
+ finally:
+ await session_maker.remove()
+
+ if arbitrary_session_maker is not None:
+ try:
+ async with arbitrary_session_maker() as arb_session:
+ async with atomic(arb_session):
+ pass
+ finally:
+ await arbitrary_session_maker.remove()
diff --git a/src/apps/shared/commands/patches/slider_tickmark_label.py b/src/apps/shared/commands/patches/slider_tickmark_label.py
new file mode 100644
index 00000000000..f93f37c5294
--- /dev/null
+++ b/src/apps/shared/commands/patches/slider_tickmark_label.py
@@ -0,0 +1,33 @@
+import uuid
+
+from sqlalchemy import select, update
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.orm import Query
+
+from apps.activities.db.schemas import ActivityItemSchema, ActivitySchema
+from apps.activities.domain.response_type_config import ResponseType
+
+
+async def main(session: AsyncSession, *args, **kwargs):
+ query: Query = select(ActivityItemSchema)
+ query = query.join(
+ ActivitySchema, ActivityItemSchema.activity_id == ActivitySchema.id
+ )
+ query = query.where(
+ ActivitySchema.applet_id
+ == uuid.UUID("62d06045-acd3-5a10-54f1-06f600000000")
+ )
+ query = query.where(
+ ActivityItemSchema.response_type == ResponseType.SLIDER
+ )
+ res = await session.execute(query)
+ slider_items: list[ActivityItemSchema] = res.scalars().all()
+ for item in slider_items:
+ item.config["show_tick_marks"] = True
+ item.config["show_tick_labels"] = True
+
+ await session.execute(
+ update(ActivityItemSchema)
+ .where(ActivityItemSchema.id == item.id)
+ .values(config=item.config)
+ )
diff --git a/src/apps/shared/exception.py b/src/apps/shared/exception.py
index 3f3e12efab6..696032664a4 100644
--- a/src/apps/shared/exception.py
+++ b/src/apps/shared/exception.py
@@ -15,18 +15,24 @@ class ExceptionTypes(str, Enum):
class BaseError(Exception):
+ message_is_template: bool = False
message = _("Oops, something went wrong.")
fallback_language = Language.ENGLISH
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
type = ExceptionTypes.UNDEFINED
- def __init__(self, **kwargs):
+ def __init__(self, *args, **kwargs):
self.kwargs = kwargs
+ self.updated_message = None
+ if self.args and not self.message_is_template:
+ self.updated_message = args[0]
super().__init__(self.message.format(**kwargs))
@property
def error(self):
+ if self.updated_message:
+ return self.updated_message
return _(self.message).format(**self.kwargs)
diff --git a/src/apps/shared/test/client.py b/src/apps/shared/test/client.py
index a1be21aa9e7..7f9fc1ccde7 100644
--- a/src/apps/shared/test/client.py
+++ b/src/apps/shared/test/client.py
@@ -30,7 +30,7 @@ def _get_updated_headers(self, headers: dict | None = None) -> dict:
@staticmethod
def _get_body(data: dict | None = None):
if data:
- return json.dumps(data)
+ return json.dumps(data, default=str)
return None
async def post(
diff --git a/src/apps/test_data/service.py b/src/apps/test_data/service.py
index d9c7a6815af..eb7bb5060cb 100644
--- a/src/apps/test_data/service.py
+++ b/src/apps/test_data/service.py
@@ -230,6 +230,7 @@ def _generate_response_value_config(self, type_: ResponseType):
"tooltip": None,
"is_hidden": False,
"color": None,
+ "value": 0,
},
{
"id": str(uuid.uuid4()),
@@ -239,6 +240,7 @@ def _generate_response_value_config(self, type_: ResponseType):
"tooltip": None,
"is_hidden": False,
"color": None,
+ "value": 1,
},
]
}
@@ -269,6 +271,7 @@ def _generate_response_value_config(self, type_: ResponseType):
"tooltip": None,
"is_hidden": False,
"color": None,
+ "value": 0,
},
{
"id": str(uuid.uuid4()),
@@ -278,6 +281,7 @@ def _generate_response_value_config(self, type_: ResponseType):
"tooltip": None,
"is_hidden": False,
"color": None,
+ "value": 1,
},
]
}
diff --git a/src/apps/test_data/test_data.py b/src/apps/test_data/test_data.py
index 8f67a87877b..146c09b12b2 100644
--- a/src/apps/test_data/test_data.py
+++ b/src/apps/test_data/test_data.py
@@ -1,7 +1,5 @@
import uuid
-import pytest
-
from apps.shared.test import BaseTest
from infrastructure.database import rollback
@@ -10,6 +8,7 @@ class TestData(BaseTest):
fixtures = [
"users/fixtures/users.json",
"folders/fixtures/folders.json",
+ "themes/fixtures/themes.json",
]
login_url = "/auth/login"
@@ -17,7 +16,6 @@ class TestData(BaseTest):
generate_applet_url = f"{generating_url}/generate_applet"
applet_list_url = "applets"
- @pytest.mark.skip
@rollback
async def test_generate_applet(self):
await self.client.login(
diff --git a/src/apps/themes/db/schemas.py b/src/apps/themes/db/schemas.py
index 1522f4f0b38..7b8538692de 100644
--- a/src/apps/themes/db/schemas.py
+++ b/src/apps/themes/db/schemas.py
@@ -14,8 +14,6 @@ class ThemeSchema(Base):
tertiary_color = Column(String(length=100))
public = Column(Boolean(), default=False)
allow_rename = Column(Boolean(), default=False)
- creator_id = Column(
- ForeignKey("users.id", ondelete="RESTRICT"), nullable=False
- )
+ creator_id = Column(ForeignKey("users.id", ondelete="RESTRICT"))
small_logo = Column(Text())
is_default = Column(Boolean(), default=False, nullable=False)
diff --git a/src/apps/themes/domain.py b/src/apps/themes/domain.py
index 06c4483a161..aa2434ac537 100644
--- a/src/apps/themes/domain.py
+++ b/src/apps/themes/domain.py
@@ -68,7 +68,7 @@ def validate_color(cls, value):
class Theme(ThemeBase, InternalModel):
id: uuid.UUID
- creator_id: uuid.UUID
+ creator_id: uuid.UUID | None
public: bool
allow_rename: bool
@@ -79,6 +79,52 @@ class PublicTheme(ThemeBase, PublicModel):
allow_rename: bool
+class PublicThemeMobile(PublicModel):
+ id: uuid.UUID
+ name: str = Field(
+ ...,
+ description="Name of the theme",
+ example="My theme",
+ max_length=100,
+ )
+ logo: str | None = Field(
+ ...,
+ description="URL to logo image",
+ example="https://example.com/logo.png",
+ )
+ background_image: str | None = Field(
+ ...,
+ description="URL to background image",
+ example="https://example.com/background.png",
+ )
+ primary_color: Color = Field(
+ ...,
+ description="Primary color",
+ example="#FFFFFF",
+ )
+ secondary_color: Color = Field(
+ ...,
+ description="Secondary color",
+ example="#FFFFFF",
+ )
+ tertiary_color: Color = Field(
+ ...,
+ description="Tertiary color",
+ example="#FFFFFF",
+ )
+
+ def __str__(self) -> str:
+ return self.name
+
+ @validator("logo", "background_image")
+ def validate_image(cls, value):
+ return validate_image(value) if value else value
+
+ @validator("primary_color", "secondary_color", "tertiary_color")
+ def validate_color(cls, value):
+ return validate_color(value) if value else value
+
+
class ThemeRequest(ThemeBase, PublicModel):
pass
diff --git a/src/apps/themes/errors.py b/src/apps/themes/errors.py
index 646302779bd..3436f056ee2 100644
--- a/src/apps/themes/errors.py
+++ b/src/apps/themes/errors.py
@@ -8,6 +8,7 @@
class ThemeNotFoundError(NotFoundError):
+ message_is_template: bool = True
message = _("No such theme with {key}={value}.")
diff --git a/src/apps/transfer_ownership/constants.py b/src/apps/transfer_ownership/constants.py
new file mode 100644
index 00000000000..4f9d445c0ac
--- /dev/null
+++ b/src/apps/transfer_ownership/constants.py
@@ -0,0 +1,7 @@
+from enum import Enum
+
+
+class TransferOwnershipStatus(str, Enum):
+ PENDING = "pending"
+ APPROVED = "approved"
+ DECLINED = "declined"
diff --git a/src/apps/transfer_ownership/crud.py b/src/apps/transfer_ownership/crud.py
index 618b06d238d..feea6c6d2aa 100644
--- a/src/apps/transfer_ownership/crud.py
+++ b/src/apps/transfer_ownership/crud.py
@@ -1,7 +1,8 @@
import uuid
-from sqlalchemy import delete
+from sqlalchemy import select, update
+from apps.transfer_ownership.constants import TransferOwnershipStatus
from apps.transfer_ownership.db.schemas import TransferSchema
from apps.transfer_ownership.domain import Transfer
from apps.transfer_ownership.errors import TransferNotFoundError
@@ -17,17 +18,37 @@ async def create(self, transfer: Transfer) -> TransferSchema:
return await self._create(TransferSchema(**transfer.dict()))
async def get_by_key(self, key: uuid.UUID) -> TransferSchema:
- if not (instance := await self._get(key="key", value=key)):
+ query = select(self.schema_class)
+ query = query.where(self.schema_class.key == key)
+ query = query.where(
+ self.schema_class.status == TransferOwnershipStatus.PENDING
+ )
+ result = await self._execute(query)
+ instance = result.scalars().first()
+ if not instance:
raise TransferNotFoundError()
return instance
- async def delete_all_by_applet_id(self, applet_id: uuid.UUID) -> None:
- query = delete(self.schema_class)
+ async def decline_all_pending_by_applet_id(
+ self, applet_id: uuid.UUID
+ ) -> None:
+ query = update(self.schema_class)
query = query.where(TransferSchema.applet_id == applet_id)
+ query = query.where(
+ self.schema_class.status == TransferOwnershipStatus.PENDING
+ )
+ query = query.values(status=TransferOwnershipStatus.DECLINED)
+ await self._execute(query)
+
+ async def decline_by_key(self, key: uuid.UUID) -> None:
+ query = update(self.schema_class)
+ query = query.where(self.schema_class.key == key)
+ query = query.values(status=TransferOwnershipStatus.DECLINED)
await self._execute(query)
- async def delete_by_key(self, key: uuid.UUID) -> None:
- query = delete(self.schema_class)
+ async def approve_by_key(self, key: uuid.UUID) -> None:
+ query = update(self.schema_class)
query = query.where(self.schema_class.key == key)
+ query = query.values(status=TransferOwnershipStatus.APPROVED)
await self._execute(query)
diff --git a/src/apps/transfer_ownership/db/schemas.py b/src/apps/transfer_ownership/db/schemas.py
index d09a9f13c77..de74a280bc6 100644
--- a/src/apps/transfer_ownership/db/schemas.py
+++ b/src/apps/transfer_ownership/db/schemas.py
@@ -1,14 +1,18 @@
-from sqlalchemy import Column, ForeignKey, String
+from sqlalchemy import Column, ForeignKey, String, Unicode
from sqlalchemy.dialects.postgresql import UUID
+from sqlalchemy_utils import StringEncryptedType
+from apps.shared.encryption import get_key
+from apps.transfer_ownership.constants import TransferOwnershipStatus
from infrastructure.database import Base
class TransferSchema(Base):
__tablename__ = "transfer_ownership"
- email = Column(String())
+ email = Column(StringEncryptedType(Unicode, get_key))
applet_id = Column(
ForeignKey("applets.id", ondelete="RESTRICT"), nullable=False
)
key = Column(UUID(as_uuid=True))
+ status = Column(String(), server_default=TransferOwnershipStatus.PENDING)
diff --git a/src/apps/transfer_ownership/domain.py b/src/apps/transfer_ownership/domain.py
index a90e86c86cc..15ac4d58412 100644
--- a/src/apps/transfer_ownership/domain.py
+++ b/src/apps/transfer_ownership/domain.py
@@ -3,6 +3,7 @@
from pydantic import EmailStr
from apps.shared.domain import InternalModel
+from apps.transfer_ownership.constants import TransferOwnershipStatus
__all__ = [
"Transfer",
@@ -16,6 +17,7 @@ class Transfer(InternalModel):
email: EmailStr
applet_id: uuid.UUID
key: uuid.UUID
+ status: TransferOwnershipStatus
class InitiateTransfer(InternalModel):
diff --git a/src/apps/transfer_ownership/fixtures/transfers.json b/src/apps/transfer_ownership/fixtures/transfers.json
index e71bbd7fe76..1c4f632d101 100644
--- a/src/apps/transfer_ownership/fixtures/transfers.json
+++ b/src/apps/transfer_ownership/fixtures/transfers.json
@@ -6,9 +6,12 @@
"created_at": "2023-01-05T15:49:51.752113",
"updated_at": "2023-01-05T15:49:51.752113",
"is_deleted": false,
- "email": "lucy@gmail.com",
+ "email": "VwkVdPjpEtq4bS35CpEtwg==",
"applet_id": "92917a56-d586-4613-b7aa-991f2c4b15b1",
"key": "6a3ab8e6-f2fa-49ae-b2db-197136677da7"
+ },
+ "note": {
+ "plain_email": "lucy@gmail.com"
}
}
]
\ No newline at end of file
diff --git a/src/apps/transfer_ownership/service.py b/src/apps/transfer_ownership/service.py
index 070f903f00b..01b3043c543 100644
--- a/src/apps/transfer_ownership/service.py
+++ b/src/apps/transfer_ownership/service.py
@@ -8,6 +8,7 @@
from apps.invitations.services import InvitationsService
from apps.mailing.domain import MessageSchema
from apps.mailing.services import MailingService
+from apps.transfer_ownership.constants import TransferOwnershipStatus
from apps.transfer_ownership.crud import TransferCRUD
from apps.transfer_ownership.domain import InitiateTransfer, Transfer
from apps.transfer_ownership.errors import TransferEmailError
@@ -39,6 +40,7 @@ async def initiate_transfer(
email=transfer_request.email,
applet_id=applet_id,
key=uuid.uuid4(),
+ status=TransferOwnershipStatus.PENDING,
)
await TransferCRUD(self.session).create(transfer)
try:
@@ -81,6 +83,7 @@ async def accept_transfer(self, applet_id: uuid.UUID, key: uuid.UUID):
"""Respond to a transfer of ownership of an applet."""
await AppletsCRUD(self.session).get_by_id(applet_id)
await AppletsCRUD(self.session).clear_encryption(applet_id)
+ await AppletsCRUD(self.session).clear_report_settings(applet_id)
transfer = await TransferCRUD(self.session).get_by_key(key=key)
if (
@@ -99,8 +102,8 @@ async def accept_transfer(self, applet_id: uuid.UUID, key: uuid.UUID):
await AnswersCRUD(self.session).delete_by_applet_user(
applet_id=transfer.applet_id
)
-
- await TransferCRUD(self.session).delete_all_by_applet_id(
+ await TransferCRUD(self.session).approve_by_key(key=key)
+ await TransferCRUD(self.session).decline_all_pending_by_applet_id(
applet_id=transfer.applet_id
)
@@ -119,8 +122,8 @@ async def accept_transfer(self, applet_id: uuid.UUID, key: uuid.UUID):
role=Role.RESPONDENT,
meta=dict(
secretUserId=str(uuid.uuid4()),
- nickname=f"{self._user.first_name} {self._user.last_name}",
),
+ nickname=f"{self._user.first_name} {self._user.last_name}",
**roles_data,
),
]
@@ -147,4 +150,4 @@ async def decline_transfer(self, applet_id: uuid.UUID, key: uuid.UUID):
raise PermissionsError()
# delete transfer
- await TransferCRUD(self.session).delete_by_key(key=key)
+ await TransferCRUD(self.session).decline_by_key(key=key)
diff --git a/src/apps/transfer_ownership/tests.py b/src/apps/transfer_ownership/tests.py
index e933d9750dd..647035d5d15 100644
--- a/src/apps/transfer_ownership/tests.py
+++ b/src/apps/transfer_ownership/tests.py
@@ -10,11 +10,13 @@ class TestTransfer(BaseTest):
"applets/fixtures/applets.json",
"applets/fixtures/applet_user_accesses.json",
"transfer_ownership/fixtures/transfers.json",
+ "themes/fixtures/themes.json",
]
login_url = "/auth/login"
transfer_url = "/applets/{applet_id}/transferOwnership"
response_url = "/applets/{applet_id}/transferOwnership/{key}"
+ applet_details_url = "/applets/{applet_id}"
@rollback
async def test_initiate_transfer(self):
@@ -140,3 +142,47 @@ async def test_re_accept_transfer(self):
)
assert response.status_code == 404
+
+ @rollback
+ async def test_accept_transfer_report_settings_are_cleared(self):
+ report_settings_keys = (
+ "reportServerIp",
+ "reportPublicKey",
+ "reportRecipients",
+ "reportEmailBody",
+ "reportIncludeUserId",
+ "reportIncludeCaseId",
+ )
+ await self.client.login(
+ self.login_url, "tom@mindlogger.com", "Test1234!"
+ )
+ resp = await self.client.get(
+ self.applet_details_url.format(
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b1"
+ )
+ )
+ assert resp.status_code == 200
+ resp_data = resp.json()["result"]
+ # Fot this test all report settings are set for applet
+ for key in report_settings_keys:
+ assert resp_data[key]
+
+ await self.client.login(self.login_url, "lucy@gmail.com", "Test123")
+ response = await self.client.post(
+ self.response_url.format(
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b1",
+ key="6a3ab8e6-f2fa-49ae-b2db-197136677da7",
+ ),
+ )
+ assert response.status_code == 200
+
+ resp = await self.client.get(
+ self.applet_details_url.format(
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b1"
+ )
+ )
+ assert resp.status_code == 200
+ resp_data = resp.json()["result"]
+ # After accept transfership all report settings must be cleared
+ for key in report_settings_keys:
+ assert not resp_data[key]
diff --git a/src/apps/users/api/password.py b/src/apps/users/api/password.py
index 9e1ad911f35..fcfb6261bb2 100644
--- a/src/apps/users/api/password.py
+++ b/src/apps/users/api/password.py
@@ -19,7 +19,11 @@
User,
UserChangePassword,
)
-from apps.users.errors import ReencryptionInProgressError, UserNotFound
+from apps.users.errors import (
+ PasswordHasSpacesError,
+ ReencryptionInProgressError,
+ UserNotFound,
+)
from apps.users.services import PasswordRecoveryCache, PasswordRecoveryService
from apps.users.tasks import reencrypt_answers
from config import settings
@@ -37,6 +41,9 @@ async def password_update(
session=Depends(get_session),
) -> Response[PublicUser]:
"""General endpoint for update password for signin."""
+ if " " in schema.password:
+ raise PasswordHasSpacesError()
+
reencryption_in_progress = await JobService(
session, user.id
).is_job_in_progress("reencrypt_answers")
diff --git a/src/apps/users/api/users.py b/src/apps/users/api/users.py
index 568a567c342..9c4cfb848b9 100644
--- a/src/apps/users/api/users.py
+++ b/src/apps/users/api/users.py
@@ -13,7 +13,7 @@
UserCreateRequest,
UserUpdateRequest,
)
-from apps.users.errors import EmailAddressNotValid
+from apps.users.errors import EmailAddressNotValid, PasswordHasSpacesError
from apps.workspaces.crud.workspaces import UserWorkspaceCRUD
from apps.workspaces.db.schemas import UserWorkspaceSchema
from infrastructure.database.core import atomic
@@ -24,6 +24,8 @@ async def user_create(
user_create_schema: UserCreateRequest = Body(...),
session=Depends(get_session),
) -> Response[PublicUser]:
+ if " " in user_create_schema.password:
+ raise PasswordHasSpacesError()
async with atomic(session):
email_hash = hash_sha224(user_create_schema.email)
user_schema = await UsersCRUD(session).save(
diff --git a/src/apps/users/cruds/user.py b/src/apps/users/cruds/user.py
index d248085866f..852b856bc3a 100644
--- a/src/apps/users/cruds/user.py
+++ b/src/apps/users/cruds/user.py
@@ -148,6 +148,11 @@ async def get_anonymous_respondent(self) -> UserSchema | None:
async def get_by_ids(self, ids: Collection[uuid.UUID]) -> List[UserSchema]:
query: Query = select(UserSchema)
- query.where(UserSchema.id.in_(ids))
+ query = query.where(UserSchema.id.in_(ids))
db_result = await self._execute(query)
return db_result.scalars().all() # noqa
+
+ async def get_user_or_none_by_email(self, email: str) -> UserSchema | None:
+ email_hash = hash_sha224(email)
+ user = await self._get("email", email_hash)
+ return user
diff --git a/src/apps/users/errors.py b/src/apps/users/errors.py
index 4ec053e2e02..9faa2ca774e 100644
--- a/src/apps/users/errors.py
+++ b/src/apps/users/errors.py
@@ -8,7 +8,9 @@ class UserNotFound(NotFoundError):
class UserAlreadyExistError(ValidationError):
- message = _("That email is already registered in the system.")
+ message = _(
+ "That email address is already associated with a MindLogger account."
+ )
class EmailAddressError(ValidationError):
@@ -16,6 +18,7 @@ class EmailAddressError(ValidationError):
class EmailAddressNotValid(ValidationError):
+ message_is_template: bool = True
message = _("Email address: {email} is not valid.")
@@ -23,6 +26,10 @@ class PasswordRecoveryKeyNotFound(NotFoundError):
message = _("Password recovery key not found.")
+class PasswordHasSpacesError(NotFoundError):
+ message = _("Password should not contain blank spaces")
+
+
class UserIsDeletedError(NotFoundError):
message = _("User is deleted.")
@@ -32,6 +39,7 @@ class UserDeviceNotFound(NotFoundError):
class UsersError(ValidationError):
+ message_is_template: bool = True
message = _("Can not make the looking up by {key} {value}.")
diff --git a/src/apps/users/services/user.py b/src/apps/users/services/user.py
index 79f4fbdaf71..a5967f0aec0 100644
--- a/src/apps/users/services/user.py
+++ b/src/apps/users/services/user.py
@@ -1,6 +1,9 @@
+import uuid
+
from apps.authentication.services import AuthenticationService
from apps.users import UserSchema, UsersCRUD
from apps.users.domain import User
+from apps.users.errors import UserNotFound
from apps.workspaces.crud.workspaces import UserWorkspaceCRUD
from apps.workspaces.db.schemas import UserWorkspaceSchema
from config import settings
@@ -88,3 +91,11 @@ async def create_anonymous_respondent(self):
async def get_by_email(self, email: str) -> User:
crud = UsersCRUD(self.session)
return await crud.get_by_email(email)
+
+ async def exists_by_id(self, user_id: uuid.UUID):
+ user_exist = await UsersCRUD(self.session).exist_by_id(id_=user_id)
+ if not user_exist:
+ raise UserNotFound()
+
+ async def get(self, user_id: uuid.UUID) -> User | None:
+ return await UsersCRUD(self.session).get_by_id(user_id)
diff --git a/src/apps/users/tests/test_password.py b/src/apps/users/tests/test_password.py
index afed084e6d4..dc965ece823 100644
--- a/src/apps/users/tests/test_password.py
+++ b/src/apps/users/tests/test_password.py
@@ -2,7 +2,6 @@
import datetime
from unittest.mock import patch
-import pytest
from asynctest import CoroutineMock
from httpx import Response as HttpResponse
from starlette import status
@@ -93,7 +92,6 @@ async def test_password_update(self, task_mock: CoroutineMock):
assert internal_response.status_code == status.HTTP_200_OK
task_mock.assert_awaited_once()
- @pytest.mark.skip
@rollback
async def test_password_recovery(
self,
@@ -118,7 +116,9 @@ async def test_password_recovery(
cache = RedisCache()
assert response.status_code == status.HTTP_201_CREATED
- keys = await cache.keys()
+ keys = await cache.keys(
+ key="PasswordRecoveryCache:tom2@mindlogger.com*"
+ )
assert len(keys) == 1
assert password_recovery_request.email in keys[0]
assert len(TestMail.mails) == 1
@@ -137,7 +137,9 @@ async def test_password_recovery(
assert response.status_code == status.HTTP_201_CREATED
- new_keys = await cache.keys()
+ new_keys = await cache.keys(
+ key="PasswordRecoveryCache:tom2@mindlogger.com*"
+ )
assert len(keys) == 1
assert keys[0] != new_keys[0]
assert len(TestMail.mails) == 2
@@ -145,7 +147,6 @@ async def test_password_recovery(
TestMail.mails[0].recipients[0] == password_recovery_request.email
)
- @pytest.mark.skip
@rollback
async def test_password_recovery_approve(
self,
@@ -171,8 +172,10 @@ async def test_password_recovery_approve(
data=password_recovery_request.dict(),
)
- assert response.status_code == status.HTTP_200_OK
- key = (await cache.keys())[0].split(":")[-1]
+ assert response.status_code == status.HTTP_201_CREATED
+ key = (
+ await cache.keys(key="PasswordRecoveryCache:tom2@mindlogger.com*")
+ )[0].split(":")[-1]
data = {
"email": self.create_request_user.dict()["email"],
@@ -185,14 +188,15 @@ async def test_password_recovery_approve(
data=data,
)
- keys = await cache.keys()
+ keys = await cache.keys(
+ key="PasswordRecoveryCache:tom2@mindlogger.com*"
+ )
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_result
assert len(keys) == 0
assert len(keys) == 0
- @pytest.mark.skip
@rollback
async def test_password_recovery_approve_expired(
self,
@@ -217,8 +221,10 @@ async def test_password_recovery_approve_expired(
data=password_recovery_request.dict(),
)
- assert response.status_code == status.HTTP_200_OK
- key = (await cache.keys())[0].split(":")[-1]
+ assert response.status_code == status.HTTP_201_CREATED
+ key = (
+ await cache.keys(key="PasswordRecoveryCache:tom2@mindlogger.com*")
+ )[0].split(":")[-1]
await asyncio.sleep(2)
data = {
@@ -232,7 +238,9 @@ async def test_password_recovery_approve_expired(
data=data,
)
- keys = await cache.keys()
+ keys = await cache.keys(
+ key="PasswordRecoveryCache:tom2@mindlogger.com*"
+ )
assert response.status_code == status.HTTP_404_NOT_FOUND
assert len(keys) == 0
diff --git a/src/apps/workspaces/api.py b/src/apps/workspaces/api.py
index 0cc65c5d538..cac57432194 100644
--- a/src/apps/workspaces/api.py
+++ b/src/apps/workspaces/api.py
@@ -3,10 +3,15 @@
from fastapi import Body, Depends, Query
+from apps.answers.deps.preprocess_arbitrary import (
+ get_answer_session_by_owner_id,
+)
+from apps.answers.service import AnswerService
from apps.applets.domain.applet_full import PublicAppletFull
from apps.applets.filters import AppletQueryParams
from apps.applets.service import AppletService
from apps.authentication.deps import get_current_user
+from apps.invitations.services import InvitationsService
from apps.shared.domain import Response, ResponseMulti
from apps.shared.query_params import (
BaseQueryParams,
@@ -14,6 +19,7 @@
parse_query_params,
)
from apps.users.domain import User
+from apps.users.services.user import UserService
# from apps.workspaces.crud.user_applet_access import UserAppletAccessCRUD
from apps.workspaces.domain.constants import Role, UserPinRole
@@ -23,6 +29,7 @@
RemoveManagerAccess,
RemoveRespondentAccess,
RespondentInfo,
+ RespondentInfoPublic,
)
from apps.workspaces.domain.workspace import (
PublicWorkspace,
@@ -253,6 +260,16 @@ async def workspace_remove_manager_access(
"""Remove manager access from a specific user."""
async with atomic(session):
await UserAccessService(session, user.id).remove_manager_access(schema)
+ # Get applets where user still have access
+ ex_admin = await UserService(session).get(schema.user_id)
+ if ex_admin:
+ management_applets = await UserAccessService(
+ session, schema.user_id
+ ).get_management_applets(schema.applet_ids)
+ ids_to_remove = set(schema.applet_ids) - set(management_applets)
+ await InvitationsService(session, ex_admin).delete_for_managers(
+ list(ids_to_remove)
+ )
async def applet_remove_respondent_access(
@@ -264,6 +281,11 @@ async def applet_remove_respondent_access(
await UserAccessService(session, user.id).remove_respondent_access(
schema
)
+ ex_resp = await UserService(session).get(schema.user_id)
+ if ex_resp:
+ await InvitationsService(session, ex_resp).delete_for_respondents(
+ schema.applet_ids
+ )
async def workspace_respondents_list(
@@ -273,6 +295,7 @@ async def workspace_respondents_list(
parse_query_params(WorkspaceUsersQueryParams)
),
session=Depends(get_session),
+ answer_session=Depends(get_answer_session_by_owner_id),
) -> ResponseMulti[PublicWorkspaceRespondent]:
service = WorkspaceService(session, user.id)
await service.exists_by_owner_id(owner_id)
@@ -284,8 +307,10 @@ async def workspace_respondents_list(
data, total = await service.get_workspace_respondents(
owner_id, None, deepcopy(query_params)
)
-
- return ResponseMulti(result=data, count=total)
+ respondents = await AnswerService(
+ session=session, arbitrary_session=answer_session
+ ).fill_last_activity(data)
+ return ResponseMulti(result=respondents, count=total)
async def workspace_applet_respondents_list(
@@ -296,6 +321,7 @@ async def workspace_applet_respondents_list(
parse_query_params(WorkspaceUsersQueryParams)
),
session=Depends(get_session),
+ answer_session=Depends(get_answer_session_by_owner_id),
) -> ResponseMulti[PublicWorkspaceRespondent]:
service = WorkspaceService(session, user.id)
await service.exists_by_owner_id(owner_id)
@@ -307,8 +333,10 @@ async def workspace_applet_respondents_list(
data, total = await service.get_workspace_respondents(
owner_id, applet_id, deepcopy(query_params)
)
-
- return ResponseMulti(result=data, count=total)
+ respondents = await AnswerService(
+ session=session, arbitrary_session=answer_session
+ ).fill_last_activity(data, applet_id)
+ return ResponseMulti(result=respondents, count=total)
async def workspace_managers_list(
@@ -443,10 +471,34 @@ async def workspace_managers_applet_access_set(
):
async with atomic(session):
await WorkspaceService(session, user.id).exists_by_owner_id(owner_id)
+ await AppletService(session, user.id).exist_by_ids(
+ [access.applet_id for access in accesses.accesses]
+ )
await CheckAccessService(
session, user.id
).check_workspace_manager_accesses_access(owner_id)
+ await UserService(session).exists_by_id(manager_id)
await UserAccessService(session, user.id).set(
owner_id, manager_id, accesses
)
+
+
+async def workspace_applet_get_respondent(
+ owner_id: uuid.UUID,
+ applet_id: uuid.UUID,
+ respondent_id: uuid.UUID,
+ user: User = Depends(get_current_user),
+ session=Depends(get_session),
+) -> Response[RespondentInfoPublic]:
+ async with atomic(session):
+ await AppletService(session, user.id).exist_by_id(applet_id)
+ await WorkspaceService(session, user.id).exists_by_owner_id(owner_id)
+ await CheckAccessService(
+ session, user.id
+ ).check_applet_respondent_list_access(applet_id)
+
+ respondent_info = await UserAppletAccessService(
+ session, user.id, applet_id
+ ).get_respondent_info(respondent_id, applet_id, owner_id)
+ return Response(result=respondent_info)
diff --git a/src/apps/workspaces/commands/__init__.py b/src/apps/workspaces/commands/__init__.py
new file mode 100644
index 00000000000..916ed20411e
--- /dev/null
+++ b/src/apps/workspaces/commands/__init__.py
@@ -0,0 +1,3 @@
+from apps.workspaces.commands.arbitrary_server import ( # noqa: F401
+ app as arbitrary_server_cli,
+)
diff --git a/src/apps/workspaces/commands/arbitrary_server.py b/src/apps/workspaces/commands/arbitrary_server.py
new file mode 100644
index 00000000000..02bf4c2107d
--- /dev/null
+++ b/src/apps/workspaces/commands/arbitrary_server.py
@@ -0,0 +1,186 @@
+import asyncio
+import uuid
+from functools import wraps
+from typing import Optional
+
+import typer
+from pydantic import ValidationError
+from rich import print
+from rich.style import Style
+from rich.table import Table
+
+from apps.workspaces.constants import StorageType
+from apps.workspaces.domain.workspace import (
+ WorkspaceArbitraryCreate,
+ WorkspaceArbitraryFields,
+)
+from apps.workspaces.errors import (
+ ArbitraryServerSettingsError,
+ WorkspaceNotFoundError,
+)
+from apps.workspaces.service.workspace import WorkspaceService
+from infrastructure.database import atomic, session_manager
+
+app = typer.Typer()
+
+
+def coro(f):
+ @wraps(f)
+ def wrapper(*args, **kwargs):
+ return asyncio.run(f(*args, **kwargs))
+
+ return wrapper
+
+
+def print_data_table(data: WorkspaceArbitraryFields):
+ table = Table(
+ show_header=False,
+ title="Arbitrary server settings",
+ title_style=Style(bold=True),
+ )
+ for k, v in data.dict(by_alias=False).items():
+ table.add_row(f"[bold]{k}[/bold]", str(v))
+
+ print(table)
+
+
+def wrap_error_msg(msg):
+ return f"[bold red]Error: \n{msg}[/bold red]"
+
+
+@app.command(short_help="Add arbitrary server settings")
+@coro
+async def add(
+ owner_id: uuid.UUID = typer.Argument(..., help="Workspace owner id"),
+ database_uri: str = typer.Option(
+ ...,
+ "--db-uri",
+ "-d",
+ help="Arbitrary server database uri",
+ ),
+ storage_type: StorageType = typer.Option(
+ ...,
+ "--storage-type",
+ "-t",
+ help="Arbitrary server storage type",
+ ),
+ storage_url: str = typer.Option(
+ None,
+ "--storage-url",
+ "-u",
+ help="Arbitrary server storage url",
+ ),
+ storage_access_key: str = typer.Option(
+ None,
+ "--storage-access-key",
+ "-a",
+ help="Arbitrary server storage access key",
+ ),
+ storage_secret_key: str = typer.Option(
+ ...,
+ "--storage-secret-key",
+ "-s",
+ help="Arbitrary server storage secret key",
+ ),
+ storage_region: str = typer.Option(
+ None,
+ "--storage-region",
+ "-r",
+ help="Arbitrary server storage region",
+ ),
+ storage_bucket: str = typer.Option(
+ None,
+ "--storage-bucket",
+ "-b",
+ help="Arbitrary server storage bucket",
+ ),
+ use_arbitrary: bool = typer.Option(
+ True,
+ is_flag=True,
+ help="Use arbitrary server for workspace",
+ ),
+ force: bool = typer.Option(
+ False,
+ "--force",
+ "-f",
+ is_flag=True,
+ help="Rewrite existing settings",
+ ),
+):
+ try:
+ data = WorkspaceArbitraryCreate(
+ database_uri=database_uri,
+ storage_type=storage_type,
+ storage_url=storage_url,
+ storage_access_key=storage_access_key,
+ storage_secret_key=storage_secret_key,
+ storage_region=storage_region,
+ storage_bucket=storage_bucket,
+ use_arbitrary=use_arbitrary,
+ )
+ except ValidationError as e:
+ err = next(iter(e.errors()))
+ loc = err["loc"]
+ loc_str = ""
+ if isinstance(loc[-1], int) and len(loc) > 1:
+ loc_str = f"{loc[-2]}.{loc[-1]}: "
+ elif loc[-1] != "__root__":
+ loc_str = f"{loc[-1]}: "
+ print(wrap_error_msg(loc_str + err["msg"]))
+ return
+
+ session_maker = session_manager.get_session()
+ try:
+ async with session_maker() as session:
+ async with atomic(session):
+ try:
+ await WorkspaceService(
+ session, owner_id
+ ).set_arbitrary_server(data, rewrite=force)
+ except WorkspaceNotFoundError as e:
+ print(wrap_error_msg(e))
+ except ArbitraryServerSettingsError as e:
+ print(
+ wrap_error_msg(
+ "Arbitrary server is already set. "
+ "Use --force to rewrite."
+ )
+ )
+ print_data_table(e.data)
+ else:
+ print("[bold green]Success:[/bold green]")
+ print_data_table(data)
+ finally:
+ await session_maker.remove()
+
+
+@app.command(short_help="Show arbitrary server settings")
+@coro
+async def show(
+ owner_id: Optional[uuid.UUID] = typer.Argument(
+ None, help="Workspace owner id"
+ ),
+):
+ session_maker = session_manager.get_session()
+ try:
+ async with session_maker() as session:
+ if owner_id:
+ data = await WorkspaceService(
+ session, owner_id
+ ).get_arbitrary_info_by_owner_id(owner_id)
+ if not data:
+ print(
+ "[bold green]"
+ "Arbitrary server not configured"
+ "[/bold green]"
+ )
+ return
+ print_data_table(WorkspaceArbitraryFields.from_orm(data))
+ else:
+ workspaces = await WorkspaceService(
+ session, uuid.uuid4()
+ ).get_arbitrary_list()
+ for data in workspaces:
+ print_data_table(WorkspaceArbitraryFields.from_orm(data))
+ finally:
+ await session_maker.remove()
diff --git a/src/apps/workspaces/constants.py b/src/apps/workspaces/constants.py
index 8732d7a1141..4a94e01d72b 100644
--- a/src/apps/workspaces/constants.py
+++ b/src/apps/workspaces/constants.py
@@ -1,4 +1,10 @@
-class StorageType:
+import enum
+
+
+class StorageType(str, enum.Enum):
AWS = "aws"
AZURE = "azure"
GCP = "gcp"
+
+ def __str__(self):
+ return self.value
diff --git a/src/apps/workspaces/crud/applet_access.py b/src/apps/workspaces/crud/applet_access.py
index 71bd663b71f..bd337249633 100644
--- a/src/apps/workspaces/crud/applet_access.py
+++ b/src/apps/workspaces/crud/applet_access.py
@@ -55,7 +55,9 @@ async def check_export_access(
query = query.where(UserAppletAccessSchema.user_id == user_id)
query = query.where(
or_(
- UserAppletAccessSchema.role.in_([Role.OWNER, Role.MANAGER]),
+ UserAppletAccessSchema.role.in_(
+ [Role.OWNER, Role.MANAGER, Role.RESPONDENT]
+ ),
and_(
UserAppletAccessSchema.role == Role.REVIEWER,
func.json_array_length(
diff --git a/src/apps/workspaces/crud/user_applet_access.py b/src/apps/workspaces/crud/user_applet_access.py
index b368641d27f..4c42e9d49f0 100644
--- a/src/apps/workspaces/crud/user_applet_access.py
+++ b/src/apps/workspaces/crud/user_applet_access.py
@@ -6,6 +6,7 @@
from asyncpg.exceptions import UniqueViolationError
from pydantic import parse_obj_as
from sqlalchemy import (
+ Unicode,
and_,
any_,
case,
@@ -20,15 +21,22 @@
true,
update,
)
-from sqlalchemy.dialects.postgresql import UUID, aggregate_order_by, insert
+from sqlalchemy.dialects.postgresql import (
+ ARRAY,
+ UUID,
+ aggregate_order_by,
+ insert,
+)
from sqlalchemy.engine import Result
from sqlalchemy.exc import NoResultFound
from sqlalchemy.orm import Query
from sqlalchemy.sql.functions import count
+from sqlalchemy_utils import StringEncryptedType
from apps.applets.db.schemas import AppletSchema
from apps.folders.db.schemas import FolderAppletSchema
from apps.schedule.db.schemas import EventSchema, UserEventsSchema
+from apps.shared.encryption import get_key
from apps.shared.filtering import Comparisons, FilterField, Filtering
from apps.shared.ordering import Ordering
from apps.shared.paging import paging
@@ -103,14 +111,13 @@ class _AppletRespondentOrdering(Ordering):
class _WorkspaceRespondentSearch(Searching):
search_fields = [
- func.array_agg(UserAppletAccessSchema.meta["nickname"].astext),
+ func.array_agg(UserAppletAccessSchema.nickname),
func.array_agg(UserAppletAccessSchema.meta["secretUserId"].astext),
]
class _AppletRespondentSearch(Searching):
search_fields = [
- UserAppletAccessSchema.meta["nickname"].astext,
UserAppletAccessSchema.meta["secretUserId"].astext,
]
@@ -266,6 +273,17 @@ async def get_applet_role_by_user_id(
return db_result.scalars().first()
+ async def get_applet_role_by_user_id_exist(
+ self, applet_id: uuid.UUID, user_id: uuid.UUID, role: Role
+ ) -> UserAppletAccessSchema | None:
+ query: Query = select(UserAppletAccessSchema)
+ query = query.where(UserAppletAccessSchema.applet_id == applet_id)
+ query = query.where(UserAppletAccessSchema.user_id == user_id)
+ query = query.where(UserAppletAccessSchema.role == role)
+ db_result = await self._execute(query)
+
+ return db_result.scalars().first()
+
def user_applet_ids_query(self, user_id: uuid.UUID) -> Query:
query: Query = select(UserAppletAccessSchema.applet_id)
query = query.where(UserAppletAccessSchema.soft_exists())
@@ -356,6 +374,7 @@ async def upsert_user_applet_access(
"role": schema.role,
"is_deleted": schema.is_deleted,
"meta": schema.meta,
+ "nickname": schema.nickname,
}
stmt = insert(UserAppletAccessSchema).values(values)
stmt = stmt.on_conflict_do_update(
@@ -374,6 +393,7 @@ async def upsert_user_applet_access(
"created_at": datetime.utcnow(),
"updated_at": datetime.utcnow(),
"meta": stmt.excluded.meta,
+ "nickname": stmt.excluded.nickname,
},
where=where,
).returning(UserAppletAccessSchema)
@@ -399,6 +419,7 @@ async def upsert_user_applet_access_list(
"role": schema.role,
"is_deleted": schema.is_deleted,
"meta": schema.meta,
+ "nickname": schema.nickname,
}
for schema in schemas
]
@@ -411,11 +432,14 @@ async def upsert_user_applet_access_list(
UserAppletAccessSchema.role,
],
set_={
+ "invitor_id": stmt.excluded.invitor_id,
+ "owner_id": stmt.excluded.owner_id,
"user_id": stmt.excluded.user_id,
"applet_id": stmt.excluded.applet_id,
"role": stmt.excluded.role,
"is_deleted": stmt.excluded.is_deleted,
"meta": stmt.excluded.meta,
+ "nickname": stmt.excluded.nickname,
},
)
@@ -624,11 +648,12 @@ async def get_workspace_respondents(
UserSchema.last_seen_at, UserSchema.created_at
).label("last_seen"),
- func.array_agg(
- aggregate_order_by(
- func.distinct(field_nickname), field_nickname
- )
- ).label("nicknames"),
+ func.array_remove(
+ func.array_agg(
+ func.distinct(field_nickname)
+ ), None)
+ .cast(ARRAY(StringEncryptedType(Unicode, get_key)))
+ .label("nicknames"),
func.array_agg(
aggregate_order_by(
@@ -1022,6 +1047,7 @@ async def get_respondent_accesses_by_owner_id(
query: Query = select(
UserAppletAccessSchema.meta,
+ UserAppletAccessSchema.nickname,
AppletSchema.id,
AppletSchema.display_name,
AppletSchema.image,
@@ -1043,6 +1069,7 @@ async def get_respondent_accesses_by_owner_id(
results = db_result.all()
for (
meta,
+ nickname,
applet_id,
display_name,
image,
@@ -1055,7 +1082,7 @@ async def get_respondent_accesses_by_owner_id(
applet_name=display_name,
applet_image=image,
secret_user_id=meta.get("secretUserId", ""),
- nickname=meta.get("nickname", ""),
+ nickname=nickname,
has_individual_schedule=has_individual,
encryption=encryption,
)
@@ -1188,11 +1215,13 @@ async def remove_manager_accesses_by_user_id_in_workspace(
await self._execute(query)
- async def update_meta_by_access_id(self, access_id: uuid.UUID, meta: dict):
+ async def update_meta_by_access_id(
+ self, access_id: uuid.UUID, meta: dict, nickname: str
+ ):
query: Query = update(UserAppletAccessSchema)
query = query.where(UserAppletAccessSchema.soft_exists())
query = query.where(UserAppletAccessSchema.id == access_id)
- query = query.values(meta=meta)
+ query = query.values(meta=meta, nickname=nickname)
await self._execute(query)
@@ -1276,7 +1305,7 @@ async def get_responsible_persons(
async def get_user_nickname(
self, applet_id: uuid.UUID, user_id: uuid.UUID
) -> str | None:
- query: Query = select(UserAppletAccessSchema.meta)
+ query: Query = select(UserAppletAccessSchema.nickname)
query = query.where(
UserAppletAccessSchema.applet_id == applet_id,
UserAppletAccessSchema.user_id == user_id,
@@ -1284,4 +1313,38 @@ async def get_user_nickname(
)
db_result = await self._execute(query)
db_result = db_result.first()
- return db_result[0].get("nickname") if db_result else None
+ return db_result[0] if db_result else None
+
+ async def get_respondent_by_applet_and_owner(
+ self,
+ respondent_id: uuid.UUID,
+ applet_id: uuid.UUID,
+ owner_id: uuid.UUID,
+ ) -> UserAppletAccessSchema | None:
+ query: Query = select(UserAppletAccessSchema)
+ query = query.where(
+ UserAppletAccessSchema.owner_id == owner_id,
+ UserAppletAccessSchema.applet_id == applet_id,
+ UserAppletAccessSchema.user_id == respondent_id,
+ UserAppletAccessSchema.role == Role.RESPONDENT,
+ UserAppletAccessSchema.soft_exists(),
+ )
+ db_result = await self._execute(query)
+ db_result = db_result.first() # noqa
+ return db_result[0] if db_result else None
+
+ async def get_management_applets(
+ self,
+ user_id: uuid.UUID,
+ applet_ids: list[uuid.UUID],
+ ) -> list[uuid.UUID]:
+ query: Query = select(UserAppletAccessSchema.applet_id)
+ query = query.where(
+ UserAppletAccessSchema.applet_id.in_(applet_ids),
+ UserAppletAccessSchema.user_id == user_id,
+ UserAppletAccessSchema.role.in_(Role.managers()),
+ UserAppletAccessSchema.soft_exists(),
+ )
+ db_result = await self._execute(query)
+ db_result = db_result.scalars().all() # noqa
+ return db_result
diff --git a/src/apps/workspaces/crud/workspaces.py b/src/apps/workspaces/crud/workspaces.py
index 80c70cf3b8c..deb6febfb22 100644
--- a/src/apps/workspaces/crud/workspaces.py
+++ b/src/apps/workspaces/crud/workspaces.py
@@ -6,13 +6,12 @@
from sqlalchemy.orm import Query
from apps.applets.db.schemas import AppletSchema
-from apps.users import User
from apps.workspaces.db.schemas import (
UserAppletAccessSchema,
UserWorkspaceSchema,
)
from apps.workspaces.domain.constants import Role
-from apps.workspaces.domain.workspace import UserAnswersDBInfo, UserWorkspace
+from apps.workspaces.domain.workspace import UserAnswersDBInfo
from infrastructure.database.crud import BaseCRUD
__all__ = ["UserWorkspaceCRUD"]
@@ -48,23 +47,6 @@ async def save(self, schema: UserWorkspaceSchema) -> UserWorkspaceSchema:
"""Return UserWorkspace instance."""
return await self._create(schema)
- async def update(self, user: User, workspace_prefix: str) -> UserWorkspace:
- # Update UserWorkspace in database
- instance = await self._update_one(
- lookup="user_id",
- value=user.id,
- schema=UserWorkspaceSchema(
- user_id=user.id,
- workspace_name=workspace_prefix,
- is_modified=True,
- ),
- )
-
- # Create internal data model
- user_workspace = UserWorkspace.from_orm(instance)
-
- return user_workspace
-
async def update_by_user_id(
self, user_id: uuid.UUID, schema: UserWorkspaceSchema
) -> UserWorkspaceSchema:
@@ -85,14 +67,65 @@ async def get_by_applet_id(
UserAppletAccessSchema.applet_id == applet_id,
)
)
- access_subquery = access_subquery.subquery()
-
query: Query = select(UserWorkspaceSchema)
query = query.where(UserWorkspaceSchema.user_id.in_(access_subquery))
db_result = await self._execute(query)
res = db_result.scalars().first()
return res
+ async def get_arbitraries_map_by_applet_ids(
+ self, applet_ids: list[uuid.UUID]
+ ) -> dict[str | None, list[uuid.UUID]]:
+ """Returning map {"arbitrary_uri": [applet_ids]}"""
+ applet_owner_map = await self._get_applet_owners_map_by_applet_ids(
+ applet_ids
+ )
+ owner_ids = set(applet_owner_map.values())
+
+ query: Query = select(UserWorkspaceSchema)
+ query = query.where(UserWorkspaceSchema.user_id.in_(owner_ids))
+ db_result = await self._execute(query)
+ res = db_result.scalars().all()
+
+ user_arb_uri_map: dict[uuid.UUID, str] = dict()
+ for user_workspace in res:
+ user_arb_uri_map[user_workspace.user_id] = (
+ user_workspace.database_uri
+ if user_workspace.use_arbitrary
+ else None
+ )
+
+ arb_uri_applet_ids_map: dict[str | None, list[uuid.UUID]] = dict()
+ for applet_id in applet_ids:
+ user_id = applet_owner_map[applet_id]
+ arb_uri = user_arb_uri_map[user_id]
+ arb_uri_applet_ids_map.setdefault(arb_uri, list())
+ arb_uri_applet_ids_map[arb_uri].append(applet_id)
+
+ return arb_uri_applet_ids_map
+
+ async def _get_applet_owners_map_by_applet_ids(
+ self, applet_ids: list[uuid.UUID]
+ ) -> dict[uuid.UUID, uuid.UUID]:
+ """Returning map {"applet_id": owner_id(user_id)}"""
+ query: Query = select(UserAppletAccessSchema)
+ query = query.where(
+ and_(
+ UserAppletAccessSchema.role == Role.OWNER,
+ UserAppletAccessSchema.applet_id.in_(applet_ids),
+ )
+ )
+ db_result = await self._execute(query)
+ res = db_result.scalars().all()
+
+ applet_owner_map: dict[uuid.UUID, uuid.UUID] = dict()
+ for user_applet_access in res:
+ applet_owner_map[
+ user_applet_access.applet_id
+ ] = user_applet_access.owner_id
+
+ return applet_owner_map
+
async def get_bucket_info(self, applet_id: uuid.UUID):
query: Query = select(
UserWorkspaceSchema.storage_access_key,
@@ -147,3 +180,9 @@ async def get_user_answers_db_info(
res = db_result.all()
return parse_obj_as(list[UserAnswersDBInfo], res)
+
+ async def get_arbitrary_list(self) -> UserWorkspaceSchema:
+ query: Query = select(UserWorkspaceSchema)
+ query = query.where(UserWorkspaceSchema.database_uri.isnot(None))
+ result: Result = await self._execute(query)
+ return result.scalars().all()
diff --git a/src/apps/workspaces/db/schemas/user_applet_access.py b/src/apps/workspaces/db/schemas/user_applet_access.py
index d8e8ada0b20..343aef7ef44 100644
--- a/src/apps/workspaces/db/schemas/user_applet_access.py
+++ b/src/apps/workspaces/db/schemas/user_applet_access.py
@@ -8,6 +8,7 @@
ForeignKey,
Index,
String,
+ Unicode,
UniqueConstraint,
case,
func,
@@ -16,7 +17,9 @@
)
from sqlalchemy.dialects.postgresql import JSONB, UUID
from sqlalchemy.ext.hybrid import hybrid_property
+from sqlalchemy_utils import StringEncryptedType
+from apps.shared.encryption import get_key
from apps.workspaces.domain.constants import UserPinRole
from infrastructure.database.base import Base
@@ -42,6 +45,8 @@ class UserAppletAccessSchema(Base):
ForeignKey("users.id", ondelete="RESTRICT"), nullable=False
)
meta = Column(JSONB())
+ nickname = Column(StringEncryptedType(Unicode, get_key))
+
is_pinned = Column(Boolean(), default=False)
__table_args__ = (
Index(
@@ -55,11 +60,11 @@ class UserAppletAccessSchema(Base):
@hybrid_property
def respondent_nickname(self):
- return self.meta.get("nickname")
+ return self.nickname
@respondent_nickname.expression # type: ignore[no-redef]
def respondent_nickname(cls):
- return cls.meta[text("'nickname'")].astext
+ return cls.nickname
@hybrid_property
def respondent_secret_id(self):
diff --git a/src/apps/workspaces/db/schemas/user_workspace.py b/src/apps/workspaces/db/schemas/user_workspace.py
index b89634d24c3..845b7741d08 100644
--- a/src/apps/workspaces/db/schemas/user_workspace.py
+++ b/src/apps/workspaces/db/schemas/user_workspace.py
@@ -1,4 +1,4 @@
-from sqlalchemy import Boolean, Column, ForeignKey, String, Unicode
+from sqlalchemy import Boolean, Column, ForeignKey, Unicode
from sqlalchemy_utils import StringEncryptedType
from apps.shared.encryption import get_key
@@ -18,11 +18,11 @@ class UserWorkspaceSchema(Base):
StringEncryptedType(Unicode, get_key), nullable=False, index=True
)
is_modified = Column(Boolean(), default=False)
- database_uri = Column(String())
- storage_type = Column(String())
- storage_access_key = Column(String())
- storage_secret_key = Column(String())
- storage_region = Column(String())
- storage_url = Column(String(), nullable=True, default=None)
- storage_bucket = Column(String(), nullable=True, default=None)
+ database_uri = Column(StringEncryptedType(Unicode, get_key))
+ storage_type = Column(StringEncryptedType(Unicode, get_key))
+ storage_access_key = Column(StringEncryptedType(Unicode, get_key))
+ storage_secret_key = Column(StringEncryptedType(Unicode, get_key))
+ storage_region = Column(StringEncryptedType(Unicode, get_key))
+ storage_url = Column(StringEncryptedType(Unicode, get_key))
+ storage_bucket = Column(StringEncryptedType(Unicode, get_key))
use_arbitrary = Column(Boolean(), default=False)
diff --git a/src/apps/workspaces/domain/user_applet_access.py b/src/apps/workspaces/domain/user_applet_access.py
index 7c42ad9c3ae..fd6471f6630 100644
--- a/src/apps/workspaces/domain/user_applet_access.py
+++ b/src/apps/workspaces/domain/user_applet_access.py
@@ -132,3 +132,8 @@ class RespondentExportData(InternalModel):
secret_id: str | None
legacy_profile_id: str | None
is_manager: bool
+
+
+class RespondentInfoPublic(PublicModel):
+ nickname: str | None
+ secret_user_id: str
diff --git a/src/apps/workspaces/domain/workspace.py b/src/apps/workspaces/domain/workspace.py
index d971ffaf1f9..6852dcec7e0 100644
--- a/src/apps/workspaces/domain/workspace.py
+++ b/src/apps/workspaces/domain/workspace.py
@@ -2,10 +2,14 @@
import uuid
from typing import Optional
-from pydantic import Field, validator
+from pydantic import Field, root_validator, validator
+from sqlalchemy import Unicode
+from sqlalchemy.dialects.postgresql.asyncpg import PGDialect_asyncpg
+from sqlalchemy_utils import StringEncryptedType
from apps.applets.domain.base import Encryption
from apps.shared.domain import InternalModel, PublicModel
+from apps.shared.encryption import get_key
__all__ = [
"PublicWorkspace",
@@ -17,8 +21,11 @@
"WorkspaceInfo",
"PublicWorkspaceInfo",
"WorkspaceArbitrary",
+ "WorkspaceArbitraryCreate",
+ "WorkspaceArbitraryFields",
]
+from apps.workspaces.constants import StorageType
from apps.workspaces.domain.constants import Role
@@ -67,13 +74,24 @@ class WorkspaceRespondentDetails(InternalModel):
has_individual_schedule: bool = False
encryption: WorkspaceAppletEncryption | None = None
+ @root_validator
+ def decrypt_nickname(cls, values):
+ nickname = values.get("respondent_nickname")
+ if nickname:
+ nickname = StringEncryptedType(
+ Unicode, get_key
+ ).process_result_value(nickname, dialect=PGDialect_asyncpg.name)
+ values["respondent_nickname"] = str(nickname)
+
+ return values
+
class WorkspaceRespondent(InternalModel):
id: uuid.UUID
nicknames: list[str] | None = None
secret_ids: list[str] | None = None
is_anonymous_respondent: bool
- last_seen: datetime.datetime
+ last_seen: datetime.datetime | None
is_pinned: bool = False
details: list[WorkspaceRespondentDetails] | None = None
@@ -133,14 +151,25 @@ def group_applets(cls, value):
return list(applets.values())
+class PublicWorkspaceRespondentDetails(PublicModel):
+ applet_id: uuid.UUID
+ applet_display_name: str
+ applet_image: str | None
+ access_id: uuid.UUID
+ respondent_nickname: str | None = None
+ respondent_secret_id: str | None = None
+ has_individual_schedule: bool = False
+ encryption: WorkspaceAppletEncryption | None = None
+
+
class PublicWorkspaceRespondent(PublicModel):
id: uuid.UUID
nicknames: list[str] | None
secret_ids: list[str] | None
is_anonymous_respondent: bool
- last_seen: datetime.datetime
+ last_seen: datetime.datetime | None
is_pinned: bool = False
- details: list[WorkspaceRespondentDetails] | None = None
+ details: list[PublicWorkspaceRespondentDetails] | None = None
class PublicWorkspaceManager(PublicModel):
@@ -235,17 +264,67 @@ class AppletRoles(InternalModel):
roles: list[Role]
-class WorkspaceArbitrary(InternalModel):
+class WorkspaceArbitraryFields(InternalModel):
+ database_uri: str | None = None
+ storage_type: str | None = None
+ storage_url: str | None = None
+ storage_access_key: str | None = None
+ storage_secret_key: str | None = None
+ storage_region: str | None = None
+ storage_bucket: str | None = None
+ use_arbitrary: bool
+
+ def is_arbitrary_empty(self):
+ return not any(
+ [
+ self.database_uri,
+ self.storage_access_key,
+ self.storage_secret_key,
+ self.storage_region,
+ self.storage_type,
+ self.storage_url,
+ self.storage_bucket,
+ self.use_arbitrary,
+ ]
+ )
+
+ @validator("use_arbitrary", always=True, pre=True)
+ def to_bool(cls, value):
+ if value is None:
+ return False
+
+ return value
+
+
+class WorkspaceArbitraryCreate(WorkspaceArbitraryFields):
+ database_uri: str
+ storage_secret_key: str
+ storage_type: StorageType
+
+ @root_validator()
+ def validate_storage_settings(cls, values):
+ storage_type = values["storage_type"]
+ required = []
+ if storage_type == StorageType.AWS:
+ required = ["storage_access_key", "storage_region"]
+ elif storage_type == StorageType.GCP:
+ required = ["storage_url", "storage_bucket", "storage_access_key"]
+
+ if required and not all((values[itm] is not None) for itm in required):
+ raise ValueError(
+ f"{', '.join(required)} are required "
+ f"for {storage_type} storage"
+ )
+
+ return values
+
+
+class WorkspaceArbitrary(WorkspaceArbitraryFields):
id: uuid.UUID
database_uri: str
- storage_access_key: str
storage_secret_key: str
- storage_region: str
storage_type: str
- storage_url: Optional[str] = None
- storage_bucket: Optional[str] = None
storage_bucket_answer: Optional[str] = None
- use_arbitrary: bool
class AnswerDbApplet(InternalModel):
diff --git a/src/apps/workspaces/errors.py b/src/apps/workspaces/errors.py
index 1246f501039..31b4bb2bf2d 100644
--- a/src/apps/workspaces/errors.py
+++ b/src/apps/workspaces/errors.py
@@ -15,8 +15,12 @@
"AccessDeniedToUpdateOwnAccesses",
"RemoveOwnPermissionAccessDenied",
"UserAccessAlreadyExists",
+ "ArbitraryServerSettingsError",
+ "WorkspaceNotFoundError",
]
+from apps.workspaces.domain.workspace import WorkspaceArbitraryFields
+
class WorkspaceDoesNotExistError(NotFoundError):
message = _("Workspace does not exist.")
@@ -39,6 +43,7 @@ class WorkspaceFolderManipulationAccessDenied(AccessDeniedError):
class UserAppletAccessesNotFound(NotFoundError):
+ message_is_template: bool = True
message = _("No such UserAppletAccess with id={id_}.")
@@ -112,7 +117,7 @@ class InvalidAppletIDFilter(FieldError):
class UserSecretIdAlreadyExists(ValidationError):
- message = _("Secret id already exists.")
+ message = _("Secret User ID already exists")
class UserSecretIdAlreadyExistsInInvitation(ValidationError):
@@ -125,3 +130,13 @@ class AnswerCheckAccessDenied(AccessDeniedError):
class UserAccessAlreadyExists(ValidationError):
message = _("User Access already exists.")
+
+
+class WorkspaceNotFoundError(Exception):
+ ...
+
+
+class ArbitraryServerSettingsError(Exception):
+ def __init__(self, data: WorkspaceArbitraryFields, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.data = data
diff --git a/src/apps/workspaces/router.py b/src/apps/workspaces/router.py
index 4951e3b987b..27b564c8575 100644
--- a/src/apps/workspaces/router.py
+++ b/src/apps/workspaces/router.py
@@ -19,6 +19,7 @@
search_workspace_applets,
user_workspaces,
workspace_applet_detail,
+ workspace_applet_get_respondent,
workspace_applet_managers_list,
workspace_applet_respondent_update,
workspace_applet_respondents_list,
@@ -145,6 +146,15 @@
},
)(workspace_applet_respondent_update)
+router.get(
+ "/{owner_id}/applets/{applet_id}/respondents/{respondent_id}",
+ status_code=status.HTTP_200_OK,
+ responses={
+ **DEFAULT_OPENAPI_RESPONSE,
+ **AUTHENTICATION_ERROR_RESPONSES,
+ },
+)(workspace_applet_get_respondent)
+
router.post(
"/{owner_id}/applets",
description="""This endpoint is used to create a new applet""",
diff --git a/src/apps/workspaces/service/user_access.py b/src/apps/workspaces/service/user_access.py
index 251cc21deb6..4871b18534c 100644
--- a/src/apps/workspaces/service/user_access.py
+++ b/src/apps/workspaces/service/user_access.py
@@ -426,3 +426,10 @@ def raise_for_developer_access(email: str | None):
email_list = config.settings.logs.get_access_emails()
if email not in email_list:
raise AccessDeniedError()
+
+ async def get_management_applets(
+ self, applet_ids: list[uuid.UUID]
+ ) -> list[uuid.UUID]:
+ return await UserAppletAccessCRUD(self.session).get_management_applets(
+ self._user_id, applet_ids
+ )
diff --git a/src/apps/workspaces/service/user_applet_access.py b/src/apps/workspaces/service/user_applet_access.py
index 76863874f43..ecf12327edc 100644
--- a/src/apps/workspaces/service/user_applet_access.py
+++ b/src/apps/workspaces/service/user_applet_access.py
@@ -7,12 +7,16 @@
from apps.invitations.constants import InvitationStatus
from apps.invitations.crud import InvitationCRUD
from apps.invitations.domain import InvitationDetailGeneric
-from apps.users import User, UserNotFound, UsersCRUD
+from apps.shared.exception import NotFoundError
+from apps.users import UserNotFound, UsersCRUD
from apps.workspaces.db.schemas import UserAppletAccessSchema
__all__ = ["UserAppletAccessService"]
-from apps.workspaces.domain.user_applet_access import RespondentInfo
+from apps.workspaces.domain.user_applet_access import (
+ RespondentInfo,
+ RespondentInfoPublic,
+)
from apps.workspaces.errors import (
UserAppletAccessNotFound,
UserSecretIdAlreadyExists,
@@ -39,17 +43,11 @@ async def _get_default_role_meta(
return meta
- async def _get_default_role_meta_for_anonymous_respondent(
- self, user_id: uuid.UUID
- ) -> dict:
+ async def _get_default_role_meta_for_anonymous_respondent(self) -> dict:
meta: dict = {}
-
- user = await UsersCRUD(self.session).get_by_id(user_id)
meta.update(
secretUserId="Guest Account Submission",
- nickname=f"{user.first_name} {user.last_name}",
)
-
return meta
async def add_role(
@@ -62,6 +60,7 @@ async def add_role(
return UserAppletAccess.from_orm(access_schema)
meta = await self._get_default_role_meta(role, user_id)
+ nickname = meta.pop("nickname", None)
access_schema = await UserAppletAccessCRUD(self.session).save(
UserAppletAccessSchema(
@@ -71,6 +70,7 @@ async def add_role(
owner_id=self._user_id,
invitor_id=self._user_id,
meta=meta,
+ nickname=nickname,
)
)
return UserAppletAccess.from_orm(access_schema)
@@ -84,24 +84,29 @@ async def add_role_for_anonymous_respondent(
if anonymous_respondent:
access_schema = await UserAppletAccessCRUD(
self.session
- ).get_applet_role_by_user_id(
+ ).get_applet_role_by_user_id_exist(
self._applet_id, anonymous_respondent.id, Role.RESPONDENT
)
if access_schema:
+ if access_schema.is_deleted:
+ await UserAppletAccessCRUD(self.session).restore(
+ "id", access_schema.id
+ )
return UserAppletAccess.from_orm(access_schema)
- meta = await self._get_default_role_meta_for_anonymous_respondent(
- anonymous_respondent.id,
- )
-
+ meta = await self._get_default_role_meta_for_anonymous_respondent()
+ owner_access = await UserAppletAccessCRUD(
+ self.session
+ ).get_applet_owner(applet_id=self._applet_id)
access_schema = await UserAppletAccessCRUD(self.session).save(
UserAppletAccessSchema(
user_id=anonymous_respondent.id,
applet_id=self._applet_id,
role=Role.RESPONDENT,
- owner_id=self._user_id,
+ owner_id=owner_access.user_id,
invitor_id=self._user_id,
meta=meta,
+ nickname=None,
)
)
return UserAppletAccess.from_orm(access_schema)
@@ -129,6 +134,7 @@ async def add_role_by_invitation(
self.session
).get_applet_owner(invitation.applet_id)
meta: dict = dict()
+ respondent_nickname = invitation.dict().get("nickname", None)
if invitation.role in [Role.RESPONDENT, Role.REVIEWER]:
meta = invitation.meta.dict(by_alias=True) # type: ignore
@@ -138,15 +144,20 @@ async def add_role_by_invitation(
invitation.applet_id, self._user_id, manager_included_roles
)
- access_schema = await UserAppletAccessCRUD(self.session).save(
- UserAppletAccessSchema(
+ access_schema = await UserAppletAccessCRUD(
+ self.session
+ ).upsert_user_applet_access(
+ schema=UserAppletAccessSchema(
user_id=self._user_id,
applet_id=invitation.applet_id,
role=invitation.role,
owner_id=owner_access.user_id,
invitor_id=invitation.invitor_id,
meta=meta,
- )
+ nickname=respondent_nickname,
+ is_deleted=False,
+ ),
+ where=UserAppletAccessSchema.soft_exists(exists=False),
)
if invitation.role != Role.RESPONDENT:
@@ -157,6 +168,7 @@ async def add_role_by_invitation(
meta = await self._get_default_role_meta(
Role.RESPONDENT, self._user_id
)
+ nickname = meta.pop("nickname", None)
schema = UserAppletAccessSchema(
user_id=self._user_id,
applet_id=invitation.applet_id,
@@ -164,6 +176,7 @@ async def add_role_by_invitation(
owner_id=owner_access.user_id,
invitor_id=invitation.invitor_id,
meta=meta,
+ nickname=nickname,
is_deleted=False,
)
@@ -171,17 +184,16 @@ async def add_role_by_invitation(
self.session
).upsert_user_applet_access(schema)
- return UserAppletAccess.from_orm(access_schema)
+ return UserAppletAccess.from_orm(access_schema[0])
async def add_role_by_private_invitation(self, role: Role):
owner_access = await UserAppletAccessCRUD(
self.session
).get_applet_owner(self._applet_id)
- user: User = await UsersCRUD(self.session).get_by_id(self._user_id)
+
if role == Role.RESPONDENT:
meta = dict(
secretUserId=str(uuid.uuid4()),
- nickname=f"{user.first_name} {user.last_name}",
)
else:
meta = dict()
@@ -218,9 +230,11 @@ async def update_meta(
if not access:
raise UserAppletAccessNotFound()
await self._validate_secret_user_id(access.id, schema.secret_user_id)
- for key, val in schema.dict(by_alias=True).items():
- access.meta[key] = val
- await crud.update_meta_by_access_id(access.id, access.meta)
+ # change here
+ access.meta["secretUserId"] = schema.secret_user_id
+ await crud.update_meta_by_access_id(
+ access.id, access.meta, nickname=schema.nickname
+ )
async def _validate_secret_user_id(
self, exclude_id: uuid.UUID, secret_id: str
@@ -364,3 +378,40 @@ async def get_nickname(self) -> str | None:
return await UserAppletAccessCRUD(self.session).get_user_nickname(
self._applet_id, self._user_id
)
+
+ async def get_respondent_info(
+ self,
+ respondent_id: uuid.UUID,
+ applet_id: uuid.UUID,
+ owner_id: uuid.UUID,
+ ) -> RespondentInfoPublic:
+ crud = UserAppletAccessCRUD(self.session)
+ respondent_schema = await crud.get_respondent_by_applet_and_owner(
+ respondent_id, applet_id, owner_id
+ )
+ if not respondent_schema:
+ raise NotFoundError()
+
+ if respondent_schema.meta:
+ return RespondentInfoPublic(
+ nickname=respondent_schema.nickname,
+ secret_user_id=respondent_schema.meta.get("secretUserId"),
+ )
+ else:
+ return RespondentInfoPublic(
+ nickname=respondent_schema.nickname, secret_user_id=None
+ )
+
+ async def has_role(self, role: str) -> bool:
+ manager_roles = set(Role.managers())
+ is_manager = role in manager_roles
+ current_roles = await UserAppletAccessCRUD(
+ self.session
+ ).get_user_roles_to_applet(self._user_id, self._applet_id)
+ if not is_manager:
+ return role in current_roles
+ else:
+ user_roles = set(current_roles)
+ return role in manager_roles and bool(
+ user_roles.intersection(manager_roles)
+ )
diff --git a/src/apps/workspaces/service/workspace.py b/src/apps/workspaces/service/workspace.py
index c2addb95eba..b0dee95e852 100644
--- a/src/apps/workspaces/service/workspace.py
+++ b/src/apps/workspaces/service/workspace.py
@@ -15,15 +15,19 @@
AnswerDbApplets,
WorkspaceApplet,
WorkspaceArbitrary,
+ WorkspaceArbitraryCreate,
+ WorkspaceArbitraryFields,
WorkspaceInfo,
WorkspaceManager,
WorkspaceRespondent,
WorkspaceSearchApplet,
)
from apps.workspaces.errors import (
+ ArbitraryServerSettingsError,
InvalidAppletIDFilter,
WorkspaceAccessDenied,
WorkspaceDoesNotExistError,
+ WorkspaceNotFoundError,
)
from apps.workspaces.service.check_access import CheckAccessService
from apps.workspaces.service.user_access import UserAccessService
@@ -92,9 +96,10 @@ async def update_workspace_name(
if not user_workspace:
user_workspace = await self.create_workspace_from_user(user)
if not user_workspace.is_modified and workspace_prefix:
- await UserWorkspaceCRUD(self.session).update(
- user,
- workspace_prefix,
+ user_workspace.workspace_name = workspace_prefix
+ await UserWorkspaceCRUD(self.session).update_by_user_id(
+ user.id,
+ user_workspace,
)
async def get_workspace_respondents(
@@ -301,6 +306,25 @@ async def get_arbitrary_info(
except ValidationError:
return None
+ async def get_arbitrary_info_by_owner_id(
+ self, owner_id: uuid.UUID
+ ) -> WorkspaceArbitrary | None:
+ schema = await UserWorkspaceCRUD(self.session).get_by_user_id(owner_id)
+ if not schema:
+ return None
+ try:
+ return WorkspaceArbitrary.from_orm(schema) if schema else None
+ except ValidationError:
+ return None
+
+ async def get_arbitraries_map(
+ self, applet_ids: list[uuid.UUID]
+ ) -> dict[str | None, list[uuid.UUID]]:
+ """Returning map {"arbitrary_uri": [applet_ids]}"""
+ return await UserWorkspaceCRUD(
+ self.session
+ ).get_arbitraries_map_by_applet_ids(applet_ids)
+
async def get_user_answer_db_info(self) -> list[AnswerDbApplets]:
db_info = await UserWorkspaceCRUD(
self.session
@@ -328,3 +352,25 @@ async def get_user_answer_db_info(self) -> list[AnswerDbApplets]:
return [default_db_applets, *db_applets_map.values()]
return list(db_applets_map.values())
+
+ async def set_arbitrary_server(
+ self, data: WorkspaceArbitraryCreate, *, rewrite=False
+ ):
+ repository = UserWorkspaceCRUD(self.session)
+ schema = await repository.get_by_user_id(self._user_id)
+ if not schema:
+ raise WorkspaceNotFoundError("Workspace not found")
+ arbitrary_data = WorkspaceArbitraryFields.from_orm(schema)
+ if not arbitrary_data.is_arbitrary_empty() and not rewrite:
+ raise ArbitraryServerSettingsError(
+ arbitrary_data, "Arbitrary settings are already set"
+ )
+ for k, v in data.dict(by_alias=False).items():
+ setattr(schema, k, v)
+ await repository.update_by_user_id(schema.user_id, schema)
+
+ async def get_arbitrary_list(self) -> list[WorkspaceArbitrary]:
+ schemas = await UserWorkspaceCRUD(self.session).get_arbitrary_list()
+ if not schemas:
+ return []
+ return [WorkspaceArbitrary.from_orm(schema) for schema in schemas]
diff --git a/src/apps/workspaces/test_workspaces.py b/src/apps/workspaces/test_workspaces.py
index 1526cfeb8bc..92e7ed69fce 100644
--- a/src/apps/workspaces/test_workspaces.py
+++ b/src/apps/workspaces/test_workspaces.py
@@ -1,7 +1,5 @@
from uuid import uuid4
-import pytest
-
from apps.shared.test import BaseTest
from apps.workspaces.domain.constants import Role
from infrastructure.database import rollback
@@ -63,6 +61,11 @@ class TestWorkspaces(BaseTest):
"/workspaces/{owner_id}/respondents/{user_id}/pin"
)
workspace_managers_pin = "/workspaces/{owner_id}/managers/{user_id}/pin"
+ workspace_get_applet_respondent = (
+ "/workspaces/{owner_id}"
+ "/applets/{applet_id}"
+ "/respondents/{respondent_id}"
+ )
@rollback
async def test_user_workspace_list(self):
@@ -253,12 +256,15 @@ async def test_workspace_applets_respondent_update(self):
role="respondent",
),
)
- assert response.json()["count"] == 3
- assert "New respondent" in response.json()["result"][1]["nicknames"]
- assert (
- "f0dd4996-e0eb-461f-b2f8-ba873a674710"
- in response.json()["result"][1]["secretIds"]
- )
+ payload = response.json()
+ assert payload["count"] == 4
+ nicknames = []
+ secret_ids = []
+ for respondent in payload["result"]:
+ nicknames += respondent.get("nicknames", [])
+ secret_ids += respondent.get("secretIds", [])
+ assert "New respondent" in nicknames
+ assert "f0dd4996-e0eb-461f-b2f8-ba873a674710" in secret_ids
@rollback
async def test_wrong_workspace_applets_list(self):
@@ -284,18 +290,16 @@ async def test_get_workspace_respondents(self):
assert response.status_code == 200, response.json()
data = response.json()
- assert data["count"] == 3
+ assert data["count"] == 5
assert data["result"][0]["nicknames"]
assert data["result"][0]["secretIds"]
# test search
search_params = {
"f0dd4996-e0eb-461f-b2f8-ba873a674788": [
- "jane",
"b2f8-ba873a674788",
],
"f0dd4996-e0eb-461f-b2f8-ba873a674789": [
- "john",
"f0dd4996-e0eb-461f-b2f8-ba873a674789",
],
}
@@ -332,18 +336,18 @@ async def test_get_workspace_applet_respondents(self):
assert response.status_code == 200, response.json()
data = response.json()
- assert data["count"] == 3
+ assert data["count"] == 4
assert data["result"][0]["nicknames"]
assert data["result"][0]["secretIds"]
# test search
search_params = {
"f0dd4996-e0eb-461f-b2f8-ba873a674788": [
- "jane",
+ # "jane",
"b2f8-ba873a674788",
],
"f0dd4996-e0eb-461f-b2f8-ba873a674789": [
- "john",
+ # "john",
"f0dd4996-e0eb-461f-b2f8-ba873a674789",
],
}
@@ -522,7 +526,6 @@ async def test_set_workspace_manager_accesses(self):
assert response.status_code == 200, response.json()
# TODO: check from database results
- @pytest.mark.skip
@rollback
async def test_pin_workspace_respondents(self):
await self.client.login(
@@ -596,7 +599,6 @@ async def test_pin_workspace_respondents(self):
)
assert response.json()["result"][-1]["id"] == user_id
- @pytest.mark.skip
@rollback
async def test_pin_workspace_managers(self):
await self.client.login(
@@ -808,3 +810,51 @@ async def test_applets_flat_list(self):
assert response.status_code == 200
assert response.json()["count"] == 1
assert response.json()["result"][0]["type"] == "applet"
+
+ @rollback
+ async def test_applet_get_respondent_success(self):
+ await self.client.login(
+ self.login_url, "tom@mindlogger.com", "Test1234!"
+ )
+ url = self.workspace_get_applet_respondent.format(
+ owner_id="7484f34a-3acc-4ee6-8a94-fd7299502fa1",
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b2",
+ respondent_id="7484f34a-3acc-4ee6-8a94-fd7299502fa1",
+ )
+ res = await self.client.get(url)
+ assert res.status_code == 200
+ body = res.json()
+ respondent = body.get("result", {})
+ assert len(respondent) == 2
+ # encrypted "hFywashKw+KlcDPazIy5QHz4AdkTOYkD28Q8+dpeDDA=" nickname
+ # is 'Mindlogger ChildMindInstitute'
+ assert respondent["nickname"] == "Mindlogger ChildMindInstitute"
+ assert respondent["secretUserId"] == (
+ "f0dd4996-e0eb-461f-b2f8-ba873a674782"
+ )
+
+ @rollback
+ async def test_applet_get_respondent_not_found(self):
+ await self.client.login(
+ self.login_url, "tom@mindlogger.com", "Test1234!"
+ )
+ url = self.workspace_get_applet_respondent.format(
+ owner_id="7484f34a-3acc-4ee6-8a94-fd7299502fa1",
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b2",
+ respondent_id="7484f34a-3acc-4ee6-8a94-fd7299502fa0",
+ )
+ res = await self.client.get(url)
+ assert res.status_code == 404
+
+ @rollback
+ async def test_applet_get_respondent_access_denied_for_respondent_role(
+ self,
+ ):
+ await self.client.login(self.login_url, "bob@gmail.com", "Test1234!")
+ url = self.workspace_get_applet_respondent.format(
+ owner_id="7484f34a-3acc-4ee6-8a94-fd7299502fa1",
+ applet_id="92917a56-d586-4613-b7aa-991f2c4b15b2",
+ respondent_id="7484f34a-3acc-4ee6-8a94-fd7299502fa0",
+ )
+ res = await self.client.get(url)
+ assert res.status_code == 403
diff --git a/src/broker.py b/src/broker.py
index dd66f181550..c7d15c9441c 100644
--- a/src/broker.py
+++ b/src/broker.py
@@ -1,10 +1,13 @@
import taskiq_fastapi
from taskiq import InMemoryBroker
from taskiq_aio_pika import AioPikaBroker
+from taskiq_redis import RedisAsyncResultBackend
from config import settings
-broker = AioPikaBroker(settings.rabbitmq.url)
+broker = AioPikaBroker(settings.rabbitmq.url).with_result_backend(
+ RedisAsyncResultBackend(settings.redis.url)
+)
if settings.env == "testing":
broker = InMemoryBroker()
diff --git a/src/cli.py b/src/cli.py
new file mode 100644
index 00000000000..75331a7106f
--- /dev/null
+++ b/src/cli.py
@@ -0,0 +1,24 @@
+import os
+
+abspath = os.path.abspath(__file__)
+dname = os.path.dirname(os.path.dirname(abspath))
+os.chdir(dname)
+
+
+import typer # noqa: E402
+
+from apps.activities.commands import activities # noqa: E402
+from apps.answers.commands import convert_assessments # noqa: E402
+from apps.shared.commands import patch # noqa: E402
+from apps.workspaces.commands import arbitrary_server_cli # noqa: E402
+
+cli = typer.Typer()
+cli.add_typer(arbitrary_server_cli, name="arbitrary")
+cli.add_typer(convert_assessments, name="assessments")
+cli.add_typer(activities, name="activities")
+
+cli.add_typer(patch, name="patch")
+
+if __name__ == "__main__":
+ # with app context?
+ cli()
diff --git a/src/config/__init__.py b/src/config/__init__.py
index f64c23608e7..5dfc8b55643 100644
--- a/src/config/__init__.py
+++ b/src/config/__init__.py
@@ -17,7 +17,7 @@
from config.sentry import SentrySettings
from config.service import JsonLdConverterSettings, ServiceSettings
from config.superuser import SuperAdmin
-from config.task import AnswerEncryption
+from config.task import AnswerEncryption, AudioFileConvert, ImageConvert
# NOTE: Settings powered by pydantic
@@ -27,6 +27,7 @@ class Settings(BaseSettings):
apps_dir: Path
locale_dir: Path
default_language: str = "en"
+ content_length_limit: int | None = 150 * 1024 * 1024
debug: bool = True
commit_id: str = "Not assigned"
@@ -79,9 +80,15 @@ class Settings(BaseSettings):
anonymous_respondent = AnonymousRespondent()
task_answer_encryption = AnswerEncryption()
+ task_audio_file_convert = AudioFileConvert()
+ task_image_convert = ImageConvert()
logs: Logs = Logs()
+ @property
+ def uploads_dir(self):
+ return self.root_dir.parent / "uploads"
+
class Config:
env_nested_delimiter = "__"
env_file = ".env"
diff --git a/src/config/cdn.py b/src/config/cdn.py
index 4c17ed2ec43..c92dbf02976 100644
--- a/src/config/cdn.py
+++ b/src/config/cdn.py
@@ -19,7 +19,11 @@ class CDNSettings(BaseModel):
ttl_signed_urls: int = 3600
gcp_endpoint_url = "https://storage.googleapis.com"
+ endpoint_url: str | None = None
+ storage_address: str | None = None
@property
def url(self):
- return f"https://{self.domain}/{{key}}"
+ if self.domain:
+ return f"https://{self.domain}/{{key}}"
+ return f"{self.storage_address}/{self.bucket}/{{key}}"
diff --git a/src/config/task.py b/src/config/task.py
index ec904b65dec..c0f7dec4bdc 100644
--- a/src/config/task.py
+++ b/src/config/task.py
@@ -5,3 +5,18 @@ class AnswerEncryption(BaseModel):
batch_limit: int = 1000
max_retries: int = 5
retry_timeout: int = 12 * 60 * 60
+
+
+class AudioFileConvert(BaseModel):
+ command: str = "ffmpeg -i {fin} -vn -ar 44100 -ac 2 -b:a 192k {fout}"
+ subprocess_timeout: int = 60 # sec
+ task_wait_timeout: int = 30 # sec
+
+
+class ImageConvert(BaseModel):
+ command: str = (
+ "convert -strip -interlace JPEG -sampling-factor 4:2:0 "
+ "-quality 85 -colorspace RGB {fin} {fout}"
+ )
+ subprocess_timeout: int = 20 # sec
+ task_wait_timeout: int = 10 # sec
diff --git a/src/infrastructure/app.py b/src/infrastructure/app.py
index a2daed520a8..88d3b02dfb7 100644
--- a/src/infrastructure/app.py
+++ b/src/infrastructure/app.py
@@ -64,6 +64,13 @@
# Declare your middlewares here
middlewares: Iterable[tuple[Type[middlewares_.Middleware], dict]] = (
+ (
+ middlewares_.ContentLengthLimitMiddleware,
+ dict(
+ content_length_limit=settings.content_length_limit,
+ methods=["POST"],
+ ),
+ ),
(middlewares_.InternalizationMiddleware, {}),
(middlewares_.CORSMiddleware, middlewares_.cors_options),
)
diff --git a/src/infrastructure/database/crud.py b/src/infrastructure/database/crud.py
index d26a495a06d..7131a147351 100644
--- a/src/infrastructure/database/crud.py
+++ b/src/infrastructure/database/crud.py
@@ -137,3 +137,12 @@ async def exist_by_key(self, key: str, val: typing.Any) -> bool:
query = query.exists()
db_result = await self._execute(select(query))
return db_result.scalars().first() or False
+
+ async def restore(self, key: str, val: typing.Any) -> None:
+ field = getattr(self.schema_class, key)
+ query: Query = update(self.schema_class)
+ query = query.where(field == val)
+ query = query.values(is_deleted=False)
+ await self._execute(query)
+
+ return None
diff --git a/src/infrastructure/database/migrations/versions/2023_11_11_19_02-encrypt_workspace_arbitrary_fields.py b/src/infrastructure/database/migrations/versions/2023_11_11_19_02-encrypt_workspace_arbitrary_fields.py
new file mode 100644
index 00000000000..800b495c6b7
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_11_11_19_02-encrypt_workspace_arbitrary_fields.py
@@ -0,0 +1,105 @@
+"""Encrypt workspace arbitrary fields
+
+Revision ID: 0242aa768e9d
+Revises: 8c59c7363c67
+Create Date: 2023-11-11 19:02:32.433001
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import Unicode
+from sqlalchemy_utils import StringEncryptedType
+
+from apps.shared.encryption import get_key
+
+# revision identifiers, used by Alembic.
+revision = "0242aa768e9d"
+down_revision = "8c59c7363c67"
+branch_labels = None
+depends_on = None
+
+
+to_encrypt = [
+ "database_uri",
+ "storage_type",
+ "storage_access_key",
+ "storage_secret_key",
+ "storage_region",
+ "storage_url",
+ "storage_bucket",
+]
+table_name = "users_workspaces"
+
+
+def upgrade() -> None:
+ conn = op.get_bind()
+
+ _cnd = " or ".join([f"{col} is not null" for col in to_encrypt])
+ _cols = ", ".join(to_encrypt)
+ result = conn.execute(
+ sa.text(f"SELECT id, {_cols} FROM {table_name} WHERE {_cnd}")
+ ).all()
+
+ for column_name in to_encrypt:
+ # Changing the field type for encryption with db models
+ op.alter_column(
+ table_name,
+ column_name,
+ type_=StringEncryptedType(Unicode, get_key),
+ existing_type=sa.String(),
+ )
+
+ # Encrypt with db models
+ for row in result:
+ w_id = row.id
+ data = {}
+ for col in to_encrypt:
+ if val := getattr(row, col):
+ encrypted_val = StringEncryptedType(
+ Unicode, get_key
+ ).process_bind_param(val, dialect=conn.dialect)
+ data[col] = encrypted_val
+ if data:
+ upd_cols = ", ".join([f"{col} = :{col}" for col in data.keys()])
+ data["id"] = w_id
+ conn.execute(
+ sa.text(f"UPDATE {table_name} SET {upd_cols} WHERE id = :id"),
+ data,
+ )
+
+
+def downgrade() -> None:
+ conn = op.get_bind()
+
+ _cnd = " or ".join([f"{col} is not null" for col in to_encrypt])
+ _cols = ", ".join(to_encrypt)
+ result = conn.execute(
+ sa.text(f"SELECT id, {_cols} FROM {table_name} WHERE {_cnd}")
+ ).all()
+
+ for column_name in to_encrypt:
+ # Changing the field type for encryption with db models
+ op.alter_column(
+ table_name,
+ column_name,
+ type_=sa.String(),
+ existing_type=StringEncryptedType(Unicode, get_key),
+ )
+
+ # Encrypt with db models
+ for row in result:
+ w_id = row.id
+ data = {}
+ for col in to_encrypt:
+ if encrypted_val := getattr(row, col):
+ val = StringEncryptedType(
+ Unicode, get_key
+ ).process_result_value(encrypted_val, dialect=conn.dialect)
+ data[col] = val
+ if data:
+ upd_cols = ", ".join([f"{col} = :{col}" for col in data.keys()])
+ data["id"] = w_id
+ conn.execute(
+ sa.text(f"UPDATE {table_name} SET {upd_cols} WHERE id = :id"),
+ data,
+ )
diff --git a/src/infrastructure/database/migrations/versions/2023_11_12_21_45-add_field_nickname_to_user_applet_.py b/src/infrastructure/database/migrations/versions/2023_11_12_21_45-add_field_nickname_to_user_applet_.py
new file mode 100644
index 00000000000..42ffaafd4b8
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_11_12_21_45-add_field_nickname_to_user_applet_.py
@@ -0,0 +1,91 @@
+"""Add field nickname to user_applet_accesses
+
+Revision ID: a7faad5855cc
+Revises: 0242aa768e9d
+Create Date: 2023-11-12 21:45:42.636562
+
+"""
+import json
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy_utils.types.encrypted.encrypted_type import StringEncryptedType
+
+from apps.shared.encryption import get_key
+
+# revision identifiers, used by Alembic.
+revision = "a7faad5855cc"
+down_revision = "0242aa768e9d"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ conn = op.get_bind()
+ result = conn.execute(
+ sa.text(
+ "SELECT id, meta FROM user_applet_accesses WHERE role='respondent' and meta is NOT NULL"
+ )
+ )
+ op.add_column(
+ "user_applet_accesses",
+ sa.Column(
+ "nickname",
+ StringEncryptedType(sa.Unicode, get_key),
+ nullable=True,
+ ),
+ )
+ for row in result:
+ pk, meta = row
+ nickname = meta.get("nickname")
+ if nickname and nickname != "":
+ encrypted_field = StringEncryptedType(
+ sa.Unicode, get_key
+ ).process_bind_param(nickname, dialect=conn.dialect)
+ meta["nickname"] = None
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE user_applet_accesses
+ SET nickname = :encrypted_field, meta= :meta
+ WHERE id = :pk
+ """
+ ),
+ {
+ "encrypted_field": encrypted_field,
+ "meta": json.dumps(meta),
+ "pk": pk,
+ },
+ )
+
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ conn = op.get_bind()
+ result = conn.execute(
+ sa.text(
+ "SELECT id, nickname, meta FROM user_applet_accesses WHERE role='respondent'"
+ )
+ )
+ op.drop_column("user_applet_accesses", "nickname")
+ for row in result:
+ pk, nickname, meta = row
+ if nickname is not None:
+ decrypted_field = StringEncryptedType(
+ sa.Unicode, get_key
+ ).process_result_value(nickname, dialect=conn.dialect)
+ meta["nickname"] = decrypted_field
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE user_applet_accesses
+ SET meta = :decrypted_field
+ WHERE id = :pk
+ """
+ ),
+ {"decrypted_field": json.dumps(meta), "pk": pk},
+ )
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/database/migrations/versions/2023_11_16_16_26-add_nickname_encrypted_field_in_.py b/src/infrastructure/database/migrations/versions/2023_11_16_16_26-add_nickname_encrypted_field_in_.py
new file mode 100644
index 00000000000..03bceec3c32
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_11_16_16_26-add_nickname_encrypted_field_in_.py
@@ -0,0 +1,91 @@
+"""Add nickname encrypted field in invitation
+
+Revision ID: 93087521e7ee
+Revises: a7faad5855cc
+Create Date: 2023-11-16 16:26:19.400694
+
+"""
+import json
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy_utils.types.encrypted.encrypted_type import StringEncryptedType
+
+from apps.shared.encryption import get_key
+
+# revision identifiers, used by Alembic.
+revision = "93087521e7ee"
+down_revision = "a7faad5855cc"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+
+ conn = op.get_bind()
+ result = conn.execute(
+ sa.text(
+ "SELECT id, meta FROM invitations WHERE role='respondent' and meta is NOT NULL"
+ )
+ )
+ op.add_column(
+ "invitations",
+ sa.Column(
+ "nickname",
+ StringEncryptedType(sa.Unicode, get_key),
+ nullable=True,
+ ),
+ )
+ for row in result:
+ pk, meta = row
+ nickname = meta.get("nickname")
+ if nickname and nickname != "":
+ encrypted_field = StringEncryptedType(
+ sa.Unicode, get_key
+ ).process_bind_param(nickname, dialect=conn.dialect)
+ meta.pop("nickname")
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE invitations
+ SET nickname = :encrypted_field, meta= :meta
+ WHERE id = :pk
+ """
+ ),
+ {
+ "encrypted_field": encrypted_field,
+ "meta": json.dumps(meta),
+ "pk": pk,
+ },
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ conn = op.get_bind()
+ result = conn.execute(
+ sa.text(
+ "SELECT id, nickname, meta FROM invitations WHERE role='respondent'"
+ )
+ )
+ op.drop_column("invitations", "nickname")
+ for row in result:
+ pk, nickname, meta = row
+ if nickname is not None:
+ decrypted_field = StringEncryptedType(
+ sa.Unicode, get_key
+ ).process_result_value(nickname, dialect=conn.dialect)
+ meta["nickname"] = decrypted_field
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE invitations
+ SET meta = :decrypted_field
+ WHERE id = :pk
+ """
+ ),
+ {"decrypted_field": json.dumps(meta), "pk": pk},
+ )
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/database/migrations/versions/2023_11_28_11_51-creator_id_to_applet_transfer_status_.py b/src/infrastructure/database/migrations/versions/2023_11_28_11_51-creator_id_to_applet_transfer_status_.py
new file mode 100644
index 00000000000..eeb23462eb7
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_11_28_11_51-creator_id_to_applet_transfer_status_.py
@@ -0,0 +1,146 @@
+"""Creator id to applet, transfer status, email encryption
+
+Revision ID: 75c9ca1f506b
+Revises: 93087521e7ee
+Create Date: 2023-11-28 11:51:29.381770
+
+"""
+import uuid
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects import postgresql
+from sqlalchemy_utils.types.encrypted.encrypted_type import StringEncryptedType
+
+from apps.shared.encryption import get_key
+
+# revision identifiers, used by Alembic.
+revision = "75c9ca1f506b"
+down_revision = "93087521e7ee"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "applets",
+ sa.Column("creator_id", postgresql.UUID(as_uuid=True), nullable=True),
+ )
+ op.create_foreign_key(
+ op.f("fk_applets_creator_id_users"),
+ "applets",
+ "users",
+ ["creator_id"],
+ ["id"],
+ ondelete="RESTRICT",
+ )
+ conn = op.get_bind()
+ result = conn.execute(
+ sa.text(
+ f"""
+ SELECT DISTINCT a.id, uaa.user_id, a.extra_fields->>'creator' FROM applets a
+ JOIN user_applet_accesses uaa on a.id = uaa.applet_id
+ WHERE a.is_deleted is false and uaa.role='owner'
+ and uaa.is_deleted is false;
+ """
+ )
+ )
+ for row in result:
+ pk, owner_id, creator_id = row
+ if creator_id:
+ creator_id = uuid.UUID(str(creator_id) + "00000000")
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE applets
+ SET creator_id = :creator_id
+ WHERE id = :pk
+ """
+ ),
+ {"creator_id": creator_id, "pk": pk},
+ )
+ else:
+ if owner_id:
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE applets
+ SET creator_id = :owner_id
+ WHERE id = :pk
+ """
+ ),
+ {"owner_id": owner_id, "pk": pk},
+ )
+
+ op.add_column(
+ "transfer_ownership",
+ sa.Column(
+ "status", sa.String(), server_default="pending", nullable=True
+ ),
+ )
+
+ # encrypt email in transfer_ownership table
+ result_emails = conn.execute(
+ sa.text(
+ "SELECT id, email FROM transfer_ownership WHERE email IS NOT NULL"
+ )
+ )
+ op.alter_column(
+ "transfer_ownership",
+ "email",
+ type_=StringEncryptedType(sa.Unicode, get_key),
+ default=None,
+ )
+ for row in result_emails:
+ pk, email = row
+ encrypted_field = StringEncryptedType(
+ sa.Unicode, get_key
+ ).process_bind_param(email, dialect=conn.dialect)
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE transfer_ownership
+ SET email = :encrypted_field
+ WHERE id = :pk
+ """
+ ),
+ {"encrypted_field": encrypted_field, "pk": pk},
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column("transfer_ownership", "status")
+ op.drop_constraint(
+ op.f("fk_applets_creator_id_users"), "applets", type_="foreignkey"
+ )
+ op.drop_column("applets", "creator_id")
+
+ # decrypt email in transfer_ownership table
+ conn = op.get_bind()
+ result_emails = conn.execute(
+ sa.text(
+ "SELECT id, email FROM transfer_ownership WHERE email IS NOT NULL"
+ )
+ )
+ op.alter_column(
+ "transfer_ownership", "email", type_=sa.String(), default=None
+ )
+ for row in result_emails:
+ pk, email = row
+ decrypted_field = StringEncryptedType(
+ sa.Unicode, get_key
+ ).process_result_value(email, dialect=conn.dialect)
+ conn.execute(
+ sa.text(
+ f"""
+ UPDATE transfer_ownership
+ SET email = :decrypted_field
+ WHERE id = :pk
+ """
+ ),
+ {"decrypted_field": decrypted_field, "pk": pk},
+ )
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/database/migrations/versions/2023_11_29_17_08-cron_removing_expired_blacklisted_tokens.py b/src/infrastructure/database/migrations/versions/2023_11_29_17_08-cron_removing_expired_blacklisted_tokens.py
new file mode 100644
index 00000000000..dddf69081ab
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_11_29_17_08-cron_removing_expired_blacklisted_tokens.py
@@ -0,0 +1,42 @@
+"""Cron removing expired blacklisted tokens
+
+Revision ID: 69b1dfaf3c0d
+Revises: 75c9ca1f506b
+Create Date: 2023-11-29 17:08:41.800439
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import text
+
+from config import settings
+
+# revision identifiers, used by Alembic.
+revision = "69b1dfaf3c0d"
+down_revision = "75c9ca1f506b"
+branch_labels = None
+depends_on = None
+
+task_name = "clear_token_blacklist"
+schedule = "0 9 * * *"
+query = text(
+ "delete from token_blacklist " "where \"exp\" < now() at time zone 'utc'"
+)
+
+
+def upgrade() -> None:
+ if settings.env != "testing":
+ op.execute(
+ text(
+ f"SELECT cron.schedule(:task_name, :schedule, $${query}$$);"
+ ).bindparams(task_name=task_name, schedule=schedule)
+ )
+
+
+def downgrade() -> None:
+ if settings.env != "testing":
+ op.execute(
+ text(f"SELECT cron.unschedule(:task_name);").bindparams(
+ task_name=task_name
+ )
+ )
diff --git a/src/infrastructure/database/migrations/versions/2023_12_04_15_45-remove_nickname_from_guest_account.py b/src/infrastructure/database/migrations/versions/2023_12_04_15_45-remove_nickname_from_guest_account.py
new file mode 100644
index 00000000000..50c076b27eb
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_04_15_45-remove_nickname_from_guest_account.py
@@ -0,0 +1,35 @@
+"""Remove nickname from guest account
+
+Revision ID: 63a2a290c7e6
+Revises: 69b1dfaf3c0d
+Create Date: 2023-12-04 15:45:11.543448
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "63a2a290c7e6"
+down_revision = "69b1dfaf3c0d"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ conn = op.get_bind()
+ result = conn.execute(
+ sa.text(
+ f"""
+ UPDATE user_applet_accesses SET nickname=NULL
+ WHERE user_id in (
+ SELECT id
+ FROM users
+ WHERE is_anonymous_respondent=TRUE
+ );
+ """
+ )
+ )
+
+
+def downgrade() -> None:
+ pass
diff --git a/src/infrastructure/database/migrations/versions/2023_12_06_13_47-add_performance_task_type_column.py b/src/infrastructure/database/migrations/versions/2023_12_06_13_47-add_performance_task_type_column.py
new file mode 100644
index 00000000000..29e4c8c11d4
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_06_13_47-add_performance_task_type_column.py
@@ -0,0 +1,79 @@
+"""Add performance_task_type to the table
+
+Revision ID: 186481f0c0cc
+Revises: 63a2a290c7e6
+Create Date: 2023-12-06 13:47:49.694746
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "186481f0c0cc"
+down_revision = "63a2a290c7e6"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "activities",
+ sa.Column(
+ "performance_task_type", sa.String(length=255), nullable=True
+ ),
+ )
+ op.add_column(
+ "activity_histories",
+ sa.Column(
+ "performance_task_type", sa.String(length=255), nullable=True
+ ),
+ )
+ conn = op.get_bind()
+ conn.execute(
+ sa.text(
+ """
+ with
+ performance as (
+ select distinct
+ activity_id,
+ case when response_type in ('ABTrails', 'flanker') then response_type
+ else config->>'user_input_type'
+ end as performance_task_type
+ from activity_items
+ where response_type in ('ABTrails', 'flanker')
+ or response_type = 'stabilityTracker' and config->>'user_input_type' in ('touch', 'gyroscope')
+ )
+ update activities set performance_task_type = performance.performance_task_type
+ from performance
+ where id = performance.activity_id
+ """
+ )
+ )
+ conn.execute(
+ sa.text(
+ """
+ with
+ performance as (
+ select distinct
+ activity_id,
+ case when response_type in ('ABTrails', 'flanker') then response_type
+ else config->>'user_input_type'
+ end as performance_task_type
+ from activity_item_histories
+ where response_type in ('ABTrails', 'flanker')
+ or response_type = 'stabilityTracker' and config->>'user_input_type' in ('touch', 'gyroscope')
+ )
+ update activity_histories set performance_task_type = performance.performance_task_type
+ from performance
+ where id_version = performance.activity_id
+ """
+ )
+ )
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column("activity_histories", "performance_task_type")
+ op.drop_column("activities", "performance_task_type")
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/database/migrations/versions/2023_12_08_19_55-add_assessment_activity_version_id_on_.py b/src/infrastructure/database/migrations/versions/2023_12_08_19_55-add_assessment_activity_version_id_on_.py
new file mode 100644
index 00000000000..36b0be1ba1d
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_08_19_55-add_assessment_activity_version_id_on_.py
@@ -0,0 +1,40 @@
+"""add assessment activity version id on answers item
+
+Revision ID: 60528d410fd1
+Revises: 8c59c7363c67
+Create Date: 2023-11-13 19:55:57.797942
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "60528d410fd1"
+down_revision = "186481f0c0cc"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "answers_items",
+ sa.Column("assessment_activity_id", sa.Text(), nullable=True),
+ )
+ op.create_index(
+ op.f("ix_answers_items_assessment_activity_id"),
+ "answers_items",
+ ["assessment_activity_id"],
+ unique=False,
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_index(
+ op.f("ix_answers_items_assessment_activity_id"),
+ table_name="answers_items",
+ )
+ op.drop_column("answers_items", "assessment_activity_id")
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/database/migrations/versions/2023_12_13_07_14-update_created_at_from_updated_at.py b/src/infrastructure/database/migrations/versions/2023_12_13_07_14-update_created_at_from_updated_at.py
new file mode 100644
index 00000000000..6b4de7a7560
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_13_07_14-update_created_at_from_updated_at.py
@@ -0,0 +1,79 @@
+"""Update created_at from updated_at
+
+Revision ID: 87d3c8a8de55
+Revises: 60528d410fd1
+Create Date: 2023-12-13 07:14:28.322481
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "87d3c8a8de55"
+down_revision = "60528d410fd1"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ conn = op.get_bind()
+ conn.execute(
+ sa.text(
+ """
+ update activity_histories
+ set created_at = updated_at
+ where migrated_date is not null
+ """
+ )
+ )
+ conn.execute(
+ sa.text(
+ """
+ update activity_item_histories
+ set created_at = updated_at
+ where migrated_date is not null
+ """
+ )
+ )
+
+
+def downgrade() -> None:
+ conn = op.get_bind()
+ conn.execute(
+ sa.text(
+ """
+ with
+ applets_created_at as (
+ select
+ id,
+ created_at
+ from applets
+ where migrated_date is not null
+ )
+ update activity_histories
+ set created_at = applets_created_at.created_at
+ from applets_created_at
+ where applets_created_at.id::text = split_part(applet_id, '_', 1)
+ and migrated_date is not null
+ """
+ )
+ )
+ conn.execute(
+ sa.text(
+ """
+ with
+ activities_created_at as (
+ select distinct
+ id_version,
+ created_at
+ from activity_histories
+ where migrated_date is not null
+ )
+ update activity_item_histories
+ set created_at = activities_created_at.created_at
+ from activities_created_at
+ where activities_created_at.id_version = activity_id
+ and migrated_date is not null
+ """
+ )
+ )
diff --git a/src/infrastructure/database/migrations/versions/2023_12_21_10_30-create_default_theme.py b/src/infrastructure/database/migrations/versions/2023_12_21_10_30-create_default_theme.py
new file mode 100644
index 00000000000..3958cf97b76
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_21_10_30-create_default_theme.py
@@ -0,0 +1,79 @@
+"""Create default theme
+
+Revision ID: b993457637ad
+Revises: 87d3c8a8de55
+Create Date: 2023-12-21 10:30:18.107063
+
+"""
+import datetime
+import uuid
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import Boolean, String, delete, select
+from sqlalchemy.dialects.postgresql import UUID
+from sqlalchemy.sql import column, table
+
+# revision identifiers, used by Alembic.
+revision = "b993457637ad"
+down_revision = "87d3c8a8de55"
+branch_labels = None
+depends_on = None
+
+THEMES_TABLE = table(
+ "themes",
+ column("id", UUID),
+ column("name", String),
+ column("primary_color", String),
+ column("secondary_color", String),
+ column("tertiary_color", String),
+ column("creator_id", UUID),
+ column("is_default", Boolean),
+ column("public", Boolean),
+ column("allow_rename", Boolean),
+)
+FIRST_DEFAULT_THEME = {
+ THEMES_TABLE.c.name: "First default theme",
+ THEMES_TABLE.c.primary_color: "#FFFFFF",
+ THEMES_TABLE.c.secondary_color: "#000000",
+ THEMES_TABLE.c.tertiary_color: "#AAAAAA",
+ THEMES_TABLE.c.is_default: True,
+ THEMES_TABLE.c.public: True,
+ THEMES_TABLE.c.allow_rename: True,
+}
+
+
+def upgrade() -> None:
+ op.alter_column(
+ "themes", "creator_id", existing_type=UUID(), nullable=True
+ )
+
+ conn = op.get_bind()
+ count_themes_query = select(
+ select(THEMES_TABLE).where(THEMES_TABLE.c.is_default == True).exists()
+ )
+ is_default_themes_exists = conn.execute(count_themes_query).first()[0]
+ if not is_default_themes_exists:
+ first_theme = {
+ f"{k.name}": v for (k, v) in FIRST_DEFAULT_THEME.items()
+ }
+ first_theme[f"{THEMES_TABLE.c.id.name}"] = f"{uuid.uuid4()}"
+ op.bulk_insert(THEMES_TABLE, rows=[first_theme])
+
+
+def downgrade() -> None:
+ conn = op.get_bind()
+ first_theme_where = [k == v for (k, v) in FIRST_DEFAULT_THEME.items()]
+ first_default_theme_in_db: tuple | None = conn.execute(
+ select(column("id", UUID))
+ .select_from(THEMES_TABLE)
+ .where(*first_theme_where)
+ ).first()
+
+ if first_default_theme_in_db:
+ theme_id = first_default_theme_in_db[0]
+ conn.execute(delete(THEMES_TABLE).where(THEMES_TABLE.c.id == theme_id))
+
+ op.alter_column(
+ "themes", "creator_id", existing_type=UUID(), nullable=False
+ )
diff --git a/src/infrastructure/database/migrations/versions/2023_12_21_17_25-userid_emails_to_userid_uuid.py b/src/infrastructure/database/migrations/versions/2023_12_21_17_25-userid_emails_to_userid_uuid.py
new file mode 100644
index 00000000000..353fb368a32
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_21_17_25-userid_emails_to_userid_uuid.py
@@ -0,0 +1,44 @@
+"""UserId emails to UserId uuid
+
+Revision ID: 5130eba9f698
+Revises: 87d3c8a8de55
+Create Date: 2023-12-21 17:25:42.256018
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "5130eba9f698"
+down_revision = "b993457637ad"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ conn = op.get_bind()
+ conn.execute(
+ sa.text(
+ """
+ with
+ uuid_email as (
+ select distinct
+ email,
+ id::text
+ from users
+ )
+ update notification_logs
+ set user_id = ue.id
+ from uuid_email ue
+ where ue.email = encode(sha224(user_id::bytea), 'hex');
+ """
+ )
+ )
+ # Delete non-existing emails
+ conn.execute(
+ sa.text("""delete from notification_logs where user_id like '%@%'""")
+ )
+
+
+def downgrade() -> None:
+ pass
diff --git a/src/infrastructure/database/migrations/versions/2023_12_26_14_51-add_user_id_column_to_invitations.py b/src/infrastructure/database/migrations/versions/2023_12_26_14_51-add_user_id_column_to_invitations.py
new file mode 100644
index 00000000000..2c1a6390595
--- /dev/null
+++ b/src/infrastructure/database/migrations/versions/2023_12_26_14_51-add_user_id_column_to_invitations.py
@@ -0,0 +1,42 @@
+"""Add user_id column to invitations
+
+Revision ID: 3fb536a58c94
+Revises: 5130eba9f698
+Create Date: 2023-12-26 14:51:42.568199
+
+"""
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = "3fb536a58c94"
+down_revision = "5130eba9f698"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "invitations",
+ sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=True),
+ )
+ op.create_foreign_key(
+ op.f("fk_invitations_user_id_users"),
+ "invitations",
+ "users",
+ ["user_id"],
+ ["id"],
+ ondelete="RESTRICT",
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_constraint(
+ op.f("fk_invitations_user_id_users"), "invitations", type_="foreignkey"
+ )
+ op.drop_column("invitations", "user_id")
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/database/migrations_arbitrary/env.py b/src/infrastructure/database/migrations_arbitrary/env.py
index edf602d7ac8..449be84d145 100644
--- a/src/infrastructure/database/migrations_arbitrary/env.py
+++ b/src/infrastructure/database/migrations_arbitrary/env.py
@@ -1,13 +1,18 @@
import asyncio
+import logging
import os
+import uuid
+from logging import getLogger
from logging.config import fileConfig
from alembic import context
from alembic.config import Config
-from sqlalchemy import MetaData, engine_from_config, pool, text
+from sqlalchemy import MetaData, Unicode, engine_from_config, pool, text
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
+from sqlalchemy_utils import StringEncryptedType
+from apps.shared.encryption import get_key
from config import settings
from infrastructure.database.migrations.base import Base
@@ -20,47 +25,58 @@
# Override alembic.ini option
config.set_main_option("sqlalchemy.url", settings.database.url)
-arbitrary_urls = []
+arbitrary_data = []
+
+migration_log = getLogger("alembic.arbitrary")
+migration_log.level = logging.INFO
async def get_all_servers(connection):
try:
query = text(
"""
- SELECT uw.database_uri
+ SELECT uw.database_uri, uw.user_id
FROM users_workspaces as uw
- WHERE uw.database_uri is not null
+ WHERE uw.database_uri is not null and uw.database_uri <> ''
"""
)
rows = await connection.execute(query)
- urls = list(map(lambda r: r[0], rows.fetchall()))
+ rows = rows.fetchall()
+ data = []
+ for row in rows:
+ url = StringEncryptedType(Unicode, get_key).process_result_value(
+ row[0], dialect=connection.dialect
+ )
+ data.append((url, row[1]))
+
except Exception as ex:
print(ex)
- urls = []
+ data = []
if os.environ.get("PYTEST_APP_TESTING"):
arbitrary_db_name = os.environ["ARBITRARY_DB"]
url = settings.database.url.replace("/test", f"/{arbitrary_db_name}")
- urls.append(url)
- return urls
+ data.append((url, uuid.uuid4()))
+ return data
async def get_urls():
- global arbitrary_urls
+ global arbitrary_data
connectable = create_async_engine(url=settings.database.url)
async with connectable.connect() as connection:
- arbitrary_urls = await get_all_servers(connection)
+ arbitrary_data = await get_all_servers(connection)
await connectable.dispose()
async def migrate_arbitrary():
- global arbitrary_urls
+ global arbitrary_data
arbitrary_meta = MetaData()
arbitrary_tables = [
Base.metadata.tables["answers"],
Base.metadata.tables["answers_items"],
]
arbitrary_meta.tables = arbitrary_tables
- for url in arbitrary_urls:
+ for url, owner_id in arbitrary_data:
+ migration_log.info(f"Migrating server for owner: {owner_id}")
config.set_main_option("sqlalchemy.url", url)
connectable = AsyncEngine(
engine_from_config(
diff --git a/src/infrastructure/database/migrations_arbitrary/versions/2023_11_13_19_55-add_assessment_activity_version_id_on_.py b/src/infrastructure/database/migrations_arbitrary/versions/2023_11_13_19_55-add_assessment_activity_version_id_on_.py
new file mode 100644
index 00000000000..c10475d2b02
--- /dev/null
+++ b/src/infrastructure/database/migrations_arbitrary/versions/2023_11_13_19_55-add_assessment_activity_version_id_on_.py
@@ -0,0 +1,40 @@
+"""add assessment activity version id on answers item
+
+Revision ID: 60528d410fd1
+Revises: 8c59c7363c67
+Create Date: 2023-11-13 19:55:57.797942
+
+"""
+import sqlalchemy as sa
+from alembic import op
+
+# revision identifiers, used by Alembic.
+revision = "60528d410fd1"
+down_revision = "016848d34c04"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "answers_items",
+ sa.Column("assessment_activity_id", sa.Text(), nullable=True),
+ )
+ op.create_index(
+ op.f("ix_answers_items_assessment_activity_id"),
+ "answers_items",
+ ["assessment_activity_id"],
+ unique=False,
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_index(
+ op.f("ix_answers_items_assessment_activity_id"),
+ table_name="answers_items",
+ )
+ op.drop_column("answers_items", "assessment_activity_id")
+ # ### end Alembic commands ###
diff --git a/src/infrastructure/dependency/cdn.py b/src/infrastructure/dependency/cdn.py
index 0bf7cfee93e..404afdb11f6 100644
--- a/src/infrastructure/dependency/cdn.py
+++ b/src/infrastructure/dependency/cdn.py
@@ -18,6 +18,7 @@
async def get_media_bucket() -> CDNClient:
config = CdnConfig(
+ endpoint_url=settings.cdn.endpoint_url,
region=settings.cdn.region,
bucket=settings.cdn.bucket,
secret_key=settings.cdn.secret_key,
@@ -30,6 +31,9 @@ async def get_media_bucket() -> CDNClient:
async def get_log_bucket() -> CDNClient:
config = CdnConfig(
+ endpoint_url=settings.cdn.endpoint_url,
+ access_key=settings.cdn.access_key,
+ secret_key=settings.cdn.secret_key,
region=settings.cdn.region,
bucket=settings.cdn.bucket_answer,
ttl_signed_urls=settings.cdn.ttl_signed_urls,
diff --git a/src/infrastructure/http/execeptions.py b/src/infrastructure/http/execeptions.py
index b395dd9ffeb..ab9cd4703f9 100644
--- a/src/infrastructure/http/execeptions.py
+++ b/src/infrastructure/http/execeptions.py
@@ -1,4 +1,3 @@
-import logging
import traceback
from fastapi.encoders import jsonable_encoder
@@ -9,8 +8,7 @@
from apps.shared.domain import ErrorResponse, ErrorResponseMulti
from apps.shared.exception import BaseError
-
-logger = logging.getLogger("mindlogger_backend")
+from infrastructure.logger import logger
def custom_base_errors_handler(_: Request, error: BaseError) -> JSONResponse:
@@ -25,8 +23,6 @@ def custom_base_errors_handler(_: Request, error: BaseError) -> JSONResponse:
]
)
- logger.error(response)
-
return JSONResponse(
response.dict(by_alias=True),
status_code=error.status_code,
@@ -41,7 +37,11 @@ def python_base_error_handler(_: Request, error: Exception) -> JSONResponse:
result=[ErrorResponse(message=f"Unhandled error: {error_message}")]
)
- logger.error(response)
+ # NOTE: replace error with warning because application can still work
+ # Also it stops sending duplicate of error to the sentry.
+ # (Default logging level for sending events to the sentry is ERROR.
+ # It means that each logger.error sends additional event to the sentry).
+ logger.warning(response)
return JSONResponse(
content=jsonable_encoder(response.dict(by_alias=True)),
@@ -64,8 +64,6 @@ def pydantic_validation_errors_handler(
]
)
- logger.error(response)
-
return JSONResponse(
content=jsonable_encoder(response.dict(by_alias=True)),
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
diff --git a/src/infrastructure/utility/cdn_client.py b/src/infrastructure/utility/cdn_client.py
index 3b337641fcd..2b7cadb3484 100644
--- a/src/infrastructure/utility/cdn_client.py
+++ b/src/infrastructure/utility/cdn_client.py
@@ -31,6 +31,7 @@ def configure_client(self, config):
if config.access_key and config.secret_key:
return boto3.client(
"s3",
+ endpoint_url=config.endpoint_url,
region_name=config.region,
aws_access_key_id=config.access_key,
aws_secret_access_key=config.secret_key,
diff --git a/src/infrastructure/utility/cdn_config.py b/src/infrastructure/utility/cdn_config.py
index dd4654cdb27..30b4c2742a4 100644
--- a/src/infrastructure/utility/cdn_config.py
+++ b/src/infrastructure/utility/cdn_config.py
@@ -2,6 +2,7 @@
class CdnConfig(BaseSettings):
+ endpoint_url: str | None = None
region: str | None
bucket: str | None
secret_key: str | None
diff --git a/src/middlewares/__init__.py b/src/middlewares/__init__.py
index 5b2f27e3764..9b1922cc1af 100644
--- a/src/middlewares/__init__.py
+++ b/src/middlewares/__init__.py
@@ -1,3 +1,6 @@
+from middlewares.content_length import ( # noqa: F401, F403
+ ContentLengthLimitMiddleware,
+)
from middlewares.cors import * # noqa: F401, F403
from middlewares.domain import * # noqa: F401, F403
from middlewares.internalization import * # noqa: F401, F403
diff --git a/src/middlewares/content_length.py b/src/middlewares/content_length.py
new file mode 100644
index 00000000000..222a65fe300
--- /dev/null
+++ b/src/middlewares/content_length.py
@@ -0,0 +1,49 @@
+from fastapi import HTTPException
+from starlette import status
+from starlette.types import ASGIApp
+
+
+class ContentLengthLimitMiddleware:
+ def __init__(
+ self,
+ app: ASGIApp,
+ content_length_limit: int | None = None,
+ methods: list | None = None,
+ ):
+ self.app = app
+ self.content_length_limit = content_length_limit
+ self.methods = methods
+
+ def method_matches(self, method):
+ if self.methods:
+ return method in self.methods
+ return True
+
+ async def __call__(self, scope, receive, send):
+ if not (
+ scope["type"] == "http"
+ and self.method_matches(scope.get("method"))
+ and self.content_length_limit is not None
+ ):
+ await self.app(scope, receive, send)
+ return
+
+ def _receiver():
+ read_length: int = 0
+
+ async def _receive():
+ nonlocal read_length, receive
+
+ message = await receive()
+ if message["type"] == "http.request":
+ read_length += len(message.get("body", b""))
+ if read_length > self.content_length_limit:
+ raise HTTPException(
+ status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE # noqa: E501
+ )
+ return message
+
+ return _receive
+
+ _receive = _receiver()
+ await self.app(scope, _receive, send)
diff --git a/src/middlewares/exception.py b/src/middlewares/exception.py
deleted file mode 100644
index 3dbec1a59bc..00000000000
--- a/src/middlewares/exception.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import gettext
-import logging
-import traceback
-
-from fastapi.encoders import jsonable_encoder
-from fastapi.responses import JSONResponse
-from pydantic import ValidationError
-from starlette import status
-from starlette.requests import Request
-
-from apps.shared.domain.response.errors import (
- ErrorResponse,
- ErrorResponseMulti,
-)
-from apps.shared.exception import BaseError
-from config import settings
-
-logger = logging.getLogger("mindlogger_backend")
-gettext.bindtextdomain(gettext.textdomain(), settings.locale_dir)
-
-
-def _custom_base_errors_handler(_: Request, error: BaseError) -> JSONResponse:
- """This function is called if the BaseError was raised."""
- response = ErrorResponseMulti(
- result=[
- ErrorResponse(
- message=error.error,
- type=error.type,
- path=getattr(error, "path", []),
- )
- ]
- )
-
- return JSONResponse(
- response.dict(by_alias=True),
- status_code=error.status_code,
- )
-
-
-def _python_base_error_handler(_: Request, error: Exception) -> JSONResponse:
- """This function is called if the Exception was raised."""
-
- error_message = "".join(traceback.format_tb(error.__traceback__))
- response = ErrorResponseMulti(
- result=[ErrorResponse(message=f"Unhandled error: {error_message}")]
- )
-
- logger.error(response)
-
- return JSONResponse(
- content=jsonable_encoder(response.dict(by_alias=True)),
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- )
-
-
-def _pydantic_validation_errors_handler(
- _: Request, error: ValidationError
-) -> JSONResponse:
- """This function is called if the Pydantic validation error was raised."""
-
- response = ErrorResponseMulti(
- result=[
- ErrorResponse(
- message=err["msg"],
- path=list(err["loc"]),
- type=err["type"],
- )
- for err in error.errors()
- ]
- )
-
- return JSONResponse(
- content=jsonable_encoder(response.dict(by_alias=True)),
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
- )
diff --git a/uploads/.gitkeep b/uploads/.gitkeep
new file mode 100644
index 00000000000..e69de29bb2d
|