Skip to content

Commit

Permalink
Merge pull request #1420 from microbiomedata/upgrade-fastapi
Browse files Browse the repository at this point in the history
Update dependencies and enable sentry tracing
  • Loading branch information
naglepuff authored Oct 18, 2024
2 parents e81b583 + 9920a9f commit 7aa1c2d
Show file tree
Hide file tree
Showing 24 changed files with 447 additions and 314 deletions.
2 changes: 1 addition & 1 deletion nmdc_server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ async def get_version() -> schemas.VersionInfo:

# get the current user information
@router.get("/me", tags=["user"], name="Return the current user name")
async def me(user: User = Depends(get_current_user)) -> Optional[User]:
async def me(user: User = Depends(get_current_user)):
return user


Expand Down
32 changes: 25 additions & 7 deletions nmdc_server/app.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
import logging
import typing
from contextlib import asynccontextmanager

import sentry_sdk
from debug_toolbar.middleware import DebugToolbarMiddleware
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from starlette.middleware.sessions import SessionMiddleware

Expand All @@ -19,11 +23,27 @@ def attach_sentry(app: FastAPI):

sentry_sdk.init(
dsn=settings.sentry_dsn,
integrations=[SqlalchemyIntegration()],
integrations=[
LoggingIntegration(level=logging.INFO, event_level=logging.WARNING),
SqlalchemyIntegration(),
],
in_app_include=["nmdc_server"],
attach_stacktrace=True,
traces_sample_rate=settings.sentry_traces_sample_rate,
)


def create_app(env: typing.Mapping[str, str]) -> FastAPI:
def generate_and_mount_static_files():
static_path = initialize_static_directory(remove_existing=True)
generate_submission_schema_files(directory=static_path)
app.mount("/static", StaticFiles(directory=static_path), name="static")

@asynccontextmanager
async def lifespan(app: FastAPI):
generate_and_mount_static_files()
yield

app = FastAPI(
title="NMDC Data and Submission Portal API",
description="""
Expand All @@ -35,18 +55,16 @@ def create_app(env: typing.Mapping[str, str]) -> FastAPI:
version=__version__,
docs_url="/api/docs",
openapi_url="/api/openapi.json",
debug=settings.debug,
lifespan=lifespan,
)
if settings.environment == "development":
app.add_middleware(DebugToolbarMiddleware)

@app.get("/docs", response_class=RedirectResponse, status_code=301, include_in_schema=False)
async def redirect_docs():
return "/api/docs"

@app.on_event("startup")
async def generate_and_mount_static_files():
static_path = initialize_static_directory(remove_existing=True)
generate_submission_schema_files(directory=static_path)
app.mount("/static", StaticFiles(directory=static_path), name="static")

attach_sentry(app)
errors.attach_error_handlers(app)
app.include_router(api.router, prefix="/api")
Expand Down
7 changes: 4 additions & 3 deletions nmdc_server/attribute_units.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
from typing import Dict, Optional

from pint import Quantity, Unit, UnitRegistry
from pint import Quantity, UnitRegistry
from pint.facets.plain.unit import PlainUnit

_registry = UnitRegistry()

# TODO: This information should come from the upstream schema. For now, we
# hard code relevant attributes here.


_unit_info: Dict[str, Dict[str, Unit]] = {
_unit_info: Dict[str, Dict[str, PlainUnit]] = {
"biosample": {
"depth": _registry("meter").units,
}
}


def get_attribute_units(table: str, attribute: str) -> Optional[Unit]:
def get_attribute_units(table: str, attribute: str) -> Optional[PlainUnit]:
return _unit_info.get(table, {}).get(attribute)


Expand Down
6 changes: 3 additions & 3 deletions nmdc_server/bulk_download_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
from typing import List
from uuid import UUID

from pydantic import ConfigDict

from nmdc_server.data_object_filters import DataObjectFilter
from nmdc_server.query import ConditionSchema
from nmdc_server.schemas import FileDownloadMetadata
Expand All @@ -17,9 +19,7 @@ class BulkDownloadBase(FileDownloadMetadata):
class BulkDownload(BulkDownloadBase):
id: UUID
created: datetime

class Config:
orm_mode = True
model_config = ConfigDict(from_attributes=True)


class BulkDownloadCreate(BulkDownloadBase):
Expand Down
15 changes: 11 additions & 4 deletions nmdc_server/config.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
from __future__ import annotations

import os
from typing import Optional

from pydantic import BaseSettings
from pydantic_settings import BaseSettings, SettingsConfigDict


class Settings(BaseSettings):
environment: str = "production"
debug: bool = False

# Several different database urls are configured for different
# environments. In production, only database_uri and ingest_database_uri
Expand Down Expand Up @@ -67,6 +70,12 @@ def orcid_openid_config_url(self) -> str:

sentry_dsn: Optional[str] = None

# Enable/disable and configure tracing through environment
# variables to lessen friction when fine-tuning settings
# for useful tracing.
sentry_tracing_enabled: bool = False
sentry_traces_sample_rate: float = 0.0

print_sql: bool = False

# App settings related to UI behavior
Expand Down Expand Up @@ -96,9 +105,7 @@ def current_db_uri(self) -> str:
return self.testing_database_uri
return self.database_uri

class Config:
env_prefix = "nmdc_"
env_file = os.getenv("DOTENV_PATH", ".env")
model_config = SettingsConfigDict(env_prefix="nmdc_", env_file=os.getenv("DOTENV_PATH", ".env"))


settings = Settings()
2 changes: 1 addition & 1 deletion nmdc_server/crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def get_or_create(
else:
params = dict(**kwargs)
params.update(defaults or {})
instance = model(**params) # type: ignore
instance = model(**params)
db.add(instance)
return instance, True

Expand Down
4 changes: 2 additions & 2 deletions nmdc_server/data_object_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,5 +84,5 @@ def output_association(self):


class DataObjectFilter(BaseModel):
workflow: Optional[WorkflowActivityTypeEnum]
file_type: Optional[str]
workflow: Optional[WorkflowActivityTypeEnum] = None
file_type: Optional[str] = None
12 changes: 8 additions & 4 deletions nmdc_server/ingest/biosample.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from datetime import datetime
from typing import Any, Dict

from pydantic import root_validator, validator
from pydantic import field_validator
from pydantic.v1 import root_validator, validator
from pymongo.cursor import Cursor
from sqlalchemy.orm import Session

Expand Down Expand Up @@ -32,20 +33,23 @@ def extract_extras(cls, values):
values["longitude"] = float(lon)
return extract_extras(cls, values)

@validator("depth", pre=True)
@field_validator("depth", mode="before")
@classmethod
def normalize_depth(cls, value):
value = extract_value(value)
if isinstance(value, str):
return float(value.split(" ")[0])
return value

@validator("add_date", "mod_date", pre=True)
@field_validator("add_date", "mod_date", mode="before")
@classmethod
def coerce_date(cls, v):
if isinstance(v, str) and date_fmt.match(v):
return datetime.strptime(v, "%d-%b-%y %I.%M.%S.%f000 %p").isoformat()
return v

@validator("collection_date", pre=True)
@field_validator("collection_date", mode="before")
@classmethod
def coerce_collection_date(cls, value):
# { "has_raw_value": ... }
raw_value = value["has_raw_value"]
Expand Down
4 changes: 2 additions & 2 deletions nmdc_server/ingest/common.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
from datetime import datetime
from typing import Any, Dict, Set, Union
from typing import Any, Dict, Optional, Set, Union

from pydantic import BaseModel
from sqlalchemy.exc import IntegrityError
Expand Down Expand Up @@ -45,7 +45,7 @@ def extract_value(value: Any) -> Any:


def extract_extras(
cls: BaseModel, values: Dict[str, Any], exclude: Set[str] = None
cls: BaseModel, values: Dict[str, Any], exclude: Optional[Set[str]] = None
) -> Dict[str, Any]:
# Move unknown attributes into values['annotations']
fields = set(cls.__fields__.keys())
Expand Down
2 changes: 1 addition & 1 deletion nmdc_server/ingest/data_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

def load(db: Session, cursor: Cursor, file_types: List[Dict[str, Any]]):
logger = get_logger(__name__)
fields = set(DataObjectCreate.__fields__.keys()) | {"data_object_type"}
fields = set(DataObjectCreate.model_fields.keys()) | {"data_object_type"}
file_type_map: Dict[str, Tuple[str, str]] = {}

# Load descriptors from mongo collection.
Expand Down
2 changes: 1 addition & 1 deletion nmdc_server/ingest/doi.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
import requests
from requests.adapters import HTTPAdapter
from requests.models import Response
from requests.packages.urllib3.util.retry import Retry
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import Session
from urllib3.util.retry import Retry

from nmdc_server.logger import get_logger
from nmdc_server.models import DOIInfo, DOIType
Expand Down
6 changes: 4 additions & 2 deletions nmdc_server/ingest/omics_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
from datetime import datetime
from typing import Any, Dict, Optional

from pydantic import root_validator, validator
from pydantic import field_validator
from pydantic.v1 import root_validator, validator
from pymongo.collection import Collection
from pymongo.cursor import Cursor
from pymongo.database import Database
Expand Down Expand Up @@ -39,7 +40,8 @@ class OmicsProcessing(OmicsProcessingCreate):
def extract_extras(cls, values):
return extract_extras(cls, values)

@validator("add_date", "mod_date", pre=True)
@field_validator("add_date", "mod_date", mode="before")
@classmethod
def coerce_date(cls, v):
if isinstance(v, str) and date_fmt.match(v):
return datetime.strptime(v, "%d-%b-%y %I.%M.%S.%f000 %p").isoformat()
Expand Down
2 changes: 1 addition & 1 deletion nmdc_server/ingest/study.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from typing import Optional

import requests
from pydantic import root_validator, validator
from pydantic.v1 import root_validator, validator
from pymongo.cursor import Cursor
from sqlalchemy.orm import Session

Expand Down
2 changes: 1 addition & 1 deletion nmdc_server/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.attributes.get("configure_logger", True):
fileConfig(config.config_file_name)
fileConfig(config.config_file_name) # type: ignore

# add your model's MetaData object here
# for 'autogenerate' support
Expand Down
Loading

0 comments on commit 7aa1c2d

Please sign in to comment.