Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial data portal endpoints #324

Draft
wants to merge 17 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -160,4 +160,7 @@ repl.ipynb

tests/nmdcdb/

neon_cache.sqlite
neon_cache.sqlite

# output of changesheet generation
nmdc_runtime/site/changesheets/changesheets_output/
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ http://nmdcstatus.polyneme.xyz/

## How It Fits In

* [nmdc-metadata](https://github.com/microbiomedata/nmdc-metadata)
tracks issues related to NMDC metadata, which may necessitate work across multiple repos.
* [issues](https://github.com/microbiomedata/issues)
tracks issues related to NMDC, which may necessitate work across multiple repos.

* [nmdc-schema](https://github.com/microbiomedata/nmdc-schema/)
houses the LinkML schema specification, as well as generated artifacts (e.g. JSON Schema).
Expand Down Expand Up @@ -111,7 +111,7 @@ If you are connecting to resources that require an SSH tunnel—for example, a M
The following command could be useful to you, either directly or as a template (see `Makefile`).

```shell
make nersc-ssh-tunnel
make nersc-mongo-tunnels
```

Finally, spin up the Docker Compose stack.
Expand Down
4 changes: 3 additions & 1 deletion components/nmdc_runtime/workflow_execution_activity/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,9 @@ def insert_into_keys(
workflow: Workflow, data_objects: list[DataObject]
) -> dict[str, Any]:
"""Insert data object url into correct workflow input field."""
workflow_dict = workflow.dict()
workflow_dict = workflow.model_dump(
mode="json",
)
for key in workflow_dict["inputs"]:
for do in data_objects:
if workflow_dict["inputs"][key] == str(do.data_object_type):
Expand Down
3 changes: 2 additions & 1 deletion components/nmdc_runtime/workflow_execution_activity/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@
WorkflowExecutionActivity,
Database,
)
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict


class ActivityTree(BaseModel):
model_config = ConfigDict(arbitrary_types_allowed=True)
children: list["ActivityTree"] = []
data: WorkflowExecutionActivity
spec: Workflow
Expand Down
1 change: 1 addition & 0 deletions docker-compose.test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ services:
DAGSTER_POSTGRES_DB: "postgres_db"
depends_on:
- dagster-postgresql
restart: on-failure
volumes:
- ./:/opt/dagster/lib

Expand Down
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ services:
DAGSTER_POSTGRES_DB: "postgres_db"
depends_on:
- dagster-postgresql
restart: on-failure
volumes:
- ./:/opt/dagster/lib

Expand Down
3 changes: 1 addition & 2 deletions docs/nb/get_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -640,8 +640,7 @@
"32,590.4511390833583,590.450730484585,,33051017.328779608,174529.72008540362,95.62528562170476,-1,,,,,,,,unassigned,,,,,,,,,\n",
"33,574.4557295997837,574.4553249571502,,57270445.86539885,179389.47009937343,165.6984621406013,-1,,,,,,,,unassigned,,,,,,,,,\n",
"34,509.29372586837684,509.2933397523243,,3016102.9919072534,115624.41149005273,8.726379197243881,-1,,,,,,,,unassigned,,,,,,,,,\n",
"36,311.1007284631918,311.10042248349583,,2602658.391103224,220831.98006834995,7.530175230287296,-1,,,,,,,,unassigned,,,,,,,,,\n",
"\n"
"36,311.1007284631918,311.10042248349583,,2602658.391103224,220831.98006834995,7.530175230287296,-1,,,,,,,,unassigned,,,,,,,,,\n"
]
}
],
Expand Down
Empty file added nmdc_runtime/__init__.py
Empty file.
23 changes: 20 additions & 3 deletions nmdc_runtime/api/core/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class TokenExpires(BaseModel):
class Token(BaseModel):
access_token: str
token_type: str
expires: Optional[TokenExpires]
expires: Optional[TokenExpires] = None


class TokenData(BaseModel):
Expand Down Expand Up @@ -95,8 +95,25 @@ def __init__(
super().__init__(flows=flows, scheme_name=scheme_name, auto_error=auto_error)

async def __call__(self, request: Request) -> Optional[str]:
authorization: str = request.headers.get("Authorization")
scheme, param = get_authorization_scheme_param(authorization)
header_authorization: str = request.headers.get("Authorization")
cookie_authorization: str = f"Bearer {request.cookies.get('session')}"
header_scheme, header_param = get_authorization_scheme_param(
header_authorization
)
cookie_scheme, cookie_param = get_authorization_scheme_param(
cookie_authorization
)
scheme, param = "", ""
if header_scheme.lower() == "bearer":
authorization = True
scheme = header_scheme
param = header_param
elif cookie_scheme.lower() == "bearer":
authorization = True
scheme = cookie_scheme
param = cookie_param
else:
authorization = False
if not authorization or scheme.lower() != "bearer":
if self.auto_error:
raise HTTPException(
Expand Down
2 changes: 1 addition & 1 deletion nmdc_runtime/api/core/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,6 @@ def generate_secret(length=12):
def json_clean(data, model, exclude_unset=False) -> dict:
"""Run data through a JSON serializer for a pydantic model."""
if not isinstance(data, (dict, BaseModel)):
raise TypeError("`data` must be a pydantic model or its .dict()")
raise TypeError("`data` must be a pydantic model or its .model_dump()")
m = model(**data) if isinstance(data, dict) else data
return json.loads(m.json(exclude_unset=exclude_unset))
4 changes: 2 additions & 2 deletions nmdc_runtime/api/endpoints/find.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def pipeline_search(
response_model=PipelineFindResponse,
response_model_exclude_unset=True,
)
def pipeline_search(
def pipeline_search_form(
pipeline_spec: str = Form(...),
description: str = Form(...),
mdb: MongoDatabase = Depends(get_mongo_db),
Expand All @@ -276,7 +276,7 @@ def pipeline_search(


@router.get("/pipeline_search", response_class=HTMLResponse)
def pipeline_search(
def pipeline_search_get(
mdb: MongoDatabase = Depends(get_mongo_db),
):
template = jinja_env.get_template("pipeline_search.html")
Expand Down
6 changes: 4 additions & 2 deletions nmdc_runtime/api/endpoints/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def create_object(

"""
id_supplied = supplied_object_id(
mdb, client_site, object_in.dict(exclude_unset=True)
mdb, client_site, object_in.model_dump(mode="json", exclude_unset=True)
)
drs_id = local_part(
id_supplied if id_supplied is not None else generate_one_id(mdb, S3_ID_NS)
Expand Down Expand Up @@ -255,7 +255,9 @@ def update_object(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"client authorized for different site_id than {object_mgr_site}",
)
doc_object_patched = merge(doc, object_patch.dict(exclude_unset=True))
doc_object_patched = merge(
doc, object_patch.model_dump(mode="json", exclude_unset=True)
)
mdb.operations.replace_one({"id": object_id}, doc_object_patched)
return doc_object_patched

Expand Down
8 changes: 6 additions & 2 deletions nmdc_runtime/api/endpoints/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,16 @@ def update_operation(
detail=f"client authorized for different site_id than {site_id_op}",
)
op_patch_metadata = merge(
op_patch.dict(exclude_unset=True).get("metadata", {}),
op_patch.model_dump(mode="json", exclude_unset=True).get("metadata", {}),
pick(["site_id", "job", "model"], doc_op.get("metadata", {})),
)
doc_op_patched = merge(
doc_op,
assoc(op_patch.dict(exclude_unset=True), "metadata", op_patch_metadata),
assoc(
op_patch.model_dump(mode="json", exclude_unset=True),
"metadata",
op_patch_metadata,
),
)
mdb.operations.replace_one({"id": op_id}, doc_op_patched)
return doc_op_patched
Expand Down
Loading