Skip to content

Commit

Permalink
Merge pull request #795 from MolSSI/rel_52
Browse files Browse the repository at this point in the history
Release v0.52
  • Loading branch information
bennybp authored Nov 29, 2023
2 parents 63ace1b + 57e9465 commit 1ca10b9
Show file tree
Hide file tree
Showing 142 changed files with 53 additions and 317 deletions.
16 changes: 16 additions & 0 deletions docs/source/release_notes.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,22 @@
Release Notes
=============

0.52 / 2023-11-29
-----------------

Some improvements and bugfixes, but no breaking changes. Upgrading qcportal is recommended
due to fixes related to JWTs, but is not required. The same is true with compute managers.

Notable pull requests and features:

- (:pr:`781`) Fixes issues related to shutdown of snowflakes, particularly with Python 3.12
- (:pr:`783`, :pr:`793`) Fixes JWT refresh issues that cause errors in clients
- (:pr:`785`) Some cleanups related to Python 3.12 (including removing use of removing `pkg_resources` module)
- (:pr:`787`) Pydantic v1/v2 dual compatibility (L. Naden :contrib:`lnaden`, M. Thompson `mattwthompson`, L. Burns `loriab`)
- (:pr:`792`) Add ability to get status overview of child records (such as optimizations of a torsiondrive)
- (:pr:`794`) Remove use of now-deprecated `utctime` function and improve handling of timezones


0.51 / 2023-10-19
-----------------

Expand Down
1 change: 0 additions & 1 deletion qcarchivetesting/qcarchivetesting/data_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def clean_conda_env(d: Dict[str, Any]):

class DataGeneratorManager(ComputeManager):
def __init__(self, qcf_config: FractalConfig, result_queue: queue.Queue, n_workers: int = 2):

self._qcf_config = qcf_config
self._result_queue = result_queue
self._record_id_map = {} # Maps task id to record id
Expand Down
1 change: 0 additions & 1 deletion qcarchivetesting/qcarchivetesting/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,6 @@ def caplog_handler_at_level(caplog_fixture, level, logger=None):

def terminate_process(proc):
if proc.poll() is None:

# Interrupt (SIGINT)
if sys.platform.startswith("win"):
proc.send_signal(signal.CTRL_BREAK_EVENT)
Expand Down
1 change: 0 additions & 1 deletion qcarchivetesting/qcarchivetesting/test_full_reaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@


def test_reaction_full_1(fulltest_client: PortalClient):

sp_spec = {"program": "psi4", "driver": "energy", "method": "b3lyp", "basis": "def2-tzvp", "keywords": {}}

rxn_keywords = {}
Expand Down
2 changes: 0 additions & 2 deletions qcarchivetesting/qcarchivetesting/testing_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@

class QCATestingPostgresHarness(PostgresHarness):
def __init__(self, config: DatabaseConfig):

PostgresHarness.__init__(self, config)
self.db_name = self.config.database_name
self.template_name = self.db_name + "_template"
Expand Down Expand Up @@ -108,7 +107,6 @@ def __init__(
log_access=True,
extra_config=None,
):

self.pg_harness = pg_harness
self.encoding = encoding

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@


def transformer(old_data):

row = {}
for k, v in old_data.items():
if k == "id":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@


def transformer(old_data):

arr = old_data["return_result"]
if arr is None:
pass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@


def transformer(old_data):

extras = old_data["extras"]
extras.pop("_qcfractal_tags", None) # cleanup old tags

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,21 +25,18 @@


def transformer(old_data):

extras = old_data["extras"]
extras.pop("_qcfractal_tags", None) # cleanup old tags

return {"extras_": msgpackext_dumps(extras)}


def upgrade():

## Task Queue
table_name = "task_queue"
update_columns = {"spec"}

def transformer(old_data):

spec = old_data["spec"]

return {"spec_": msgpackext_dumps(spec)}
Expand All @@ -51,7 +48,6 @@ def transformer(old_data):
update_columns = {"extra"}

def transformer(old_data):

spec = old_data["extra"]

return {"extra_": msgpackext_dumps(spec)}
Expand All @@ -60,7 +56,6 @@ def transformer(old_data):


def downgrade():

## Task Queue
table_name = "task_queue"
update_columns = {"spec"}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@


def migrate_contributed_values_data():

bind = op.get_bind()
session = orm.Session(bind=bind)

Expand All @@ -41,7 +40,6 @@ def migrate_contributed_values_data():
continue

for key, dict_values in ds_contrib.items():

idx, vals = [], []
for key, value in dict_values["values"].items():
idx.append(key)
Expand All @@ -59,7 +57,6 @@ def migrate_contributed_values_data():


def upgrade():

# rename old column with data
op.alter_column("dataset", "contributed_values", new_column_name="contributed_values_data")
op.alter_column("reaction_dataset", "contributed_values", new_column_name="contributed_values_data")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@


def upgrade():

# Removes (harmless) duplicate rows
op.execute(
"DELETE FROM torsion_init_mol_association a USING \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,6 @@ def upgrade():
services = session.query(service_table).all()

for service in services:

if "torsiondrive_state" in service.extra:
# We have a torsiondrive
service_state = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,6 @@ def upgrade():
services = session.query(service_table).all()

for service in services:

if "torsiondrive_state" not in service.service_state:
continue

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,6 @@ def upgrade():
services = session.query(service_table).all()

for service in services:

if "torsiondrive_state" in service.service_state:
continue

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@


def create_reaction_record(conn, ds_id, created_on, modified_on, spec_id, stoichiometries, records):

# We do this allllll by hand. We don't want to depend on the real ORM code
# First, add a base record
r = conn.execute(
Expand Down Expand Up @@ -119,7 +118,6 @@ def create_reaction_record(conn, ds_id, created_on, modified_on, spec_id, stoich


def create_manybody_record(conn, ds_id, created_on, modified_on, spec_id, mol_id, record):

# We do this allllll by hand. We don't want to depend on the real ORM code
# First, add a base record
r = conn.execute(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def upgrade():
)
)

for (mol_hash, mol_ids, _) in duplicates:
for mol_hash, mol_ids, _ in duplicates:
mol_ids = sorted(mol_ids)

# Keep the first id, merge the rest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ def decompress_old_string(compressed_data: bytes, compression_type: CompressionE


def upgrade():

# Table with output data
old_output_table = table(
"output_store",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def _get_colnames(columns):


def _intermediate_table(table_name, columns, read_columns=None):

column_pairs, old_names, new_names = _get_colnames(columns)
table_data = [table_name, sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True)]
table_data.extend([sa.Column(x, old_type) for x in old_names])
Expand All @@ -41,7 +40,6 @@ def _intermediate_table(table_name, columns, read_columns=None):


def json_to_msgpack_table(table_name, block_size, update_columns, transformer, read_columns=None):

if read_columns is None:
read_columns = {}

Expand All @@ -67,7 +65,6 @@ def json_to_msgpack_table(table_name, block_size, update_columns, transformer, r

logger.info("Converting data, this may take some time...")
for block in tqdm.tqdm(range(0, num_records, block_size)):

# Pull chunk to migrate
data = connection.execute(
sa.select([*read_columns], order_by=table.c.id.asc(), offset=block, limit=block_size)
Expand All @@ -84,14 +81,12 @@ def json_to_msgpack_table(table_name, block_size, update_columns, transformer, r


def json_to_msgpack_table_dropcols(table_name, block_size, update_columns):

column_pairs, old_names, new_names = _get_colnames(update_columns)
for col in new_names:
op.drop_column(table_name, col)


def json_to_msgpack_table_altercolumns(table_name, update_columns, nullable_true=None):

if nullable_true is None:
nullable_true = set()

Expand Down
5 changes: 3 additions & 2 deletions qcfractal/qcfractal/components/auth/auth_socket.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ def __init__(self, root_socket: SQLAlchemySocket):
self.unauth_read_permissions = self.root_socket.roles.get("read")["permissions"]
self.protected_resources = {"users", "roles", "me"}

def authenticate(self, username: str, password: str, *, session: Optional[Session] = None) -> Tuple[UserInfo, RoleInfo]:
def authenticate(
self, username: str, password: str, *, session: Optional[Session] = None
) -> Tuple[UserInfo, RoleInfo]:
"""
Authenticates a given username and password, returning info about the user and their role
Expand Down Expand Up @@ -166,7 +168,6 @@ def assert_authorized(
raise AuthorizationFailure(msg)

def allowed_actions(self, subject: Any, resources: Any, actions: Any, policies: Any) -> List[Tuple[str, str]]:

allowed: List[Tuple[str, str]] = []

# if no auth required, always allowed, except for protected endpoints
Expand Down
1 change: 0 additions & 1 deletion qcfractal/qcfractal/components/auth/user_socket.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,6 @@ def get_owner_ids(
def assert_group_member(
self, user_id: Optional[int], group_id: Optional[int], *, session: Optional[Session] = None
):

# No user and group - ok
if user_id is None and group_id is None:
return
Expand Down
1 change: 0 additions & 1 deletion qcfractal/qcfractal/components/create_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def _serialize_orm(orm, exclude=None):


def create_dataset_view(dataset_id: int, socket: SQLAlchemySocket, view_file_path: str):

if os.path.exists(view_file_path):
raise RuntimeError(f"File {view_file_path} exists - will not overwrite")

Expand Down
3 changes: 1 addition & 2 deletions qcfractal/qcfractal/components/dataset_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ def list_dataset_v1():
@api_v1.route("/datasets/<int:dataset_id>", methods=["GET"])
@wrap_route("READ")
def get_general_dataset_v1(dataset_id: int, url_params: ProjURLParameters):

with storage_socket.session_scope(True) as session:
ds_type = storage_socket.datasets.lookup_type(dataset_id, session=session)
ds_socket = storage_socket.datasets.get_socket(ds_type)
Expand Down Expand Up @@ -73,13 +72,13 @@ def delete_dataset_v1(dataset_id: int, url_params: DatasetDeleteParams):
# are different
#################################################################


########################
# Adding a dataset
########################
@api_v1.route("/datasets/<string:dataset_type>", methods=["POST"])
@wrap_route("WRITE")
def add_dataset_v1(dataset_type: str, body_data: DatasetAddBody):

ds_socket = storage_socket.datasets.get_socket(dataset_type)
return ds_socket.add(
name=body_data.name,
Expand Down
3 changes: 0 additions & 3 deletions qcfractal/qcfractal/components/dataset_socket.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,6 @@ def add(
)

with self.root_socket.optional_session(session) as session:

user_id, group_id = self.root_socket.users.get_owner_ids(owner_user, owner_group)
self.root_socket.users.assert_group_member(user_id, group_id, session=session)

Expand Down Expand Up @@ -452,7 +451,6 @@ def update_metadata(
raise MissingDataError(f"Could not find dataset with type={self.dataset_type} and id={dataset_id}")

if ds.name != new_metadata.name:

# If only change in case, no need to check if it already exists
if ds.name.lower() != new_metadata.name.lower():
stmt2 = select(self.dataset_orm.id)
Expand Down Expand Up @@ -1285,7 +1283,6 @@ def modify_records(
"""

with self.root_socket.optional_session(session) as session:

record_ids = self._lookup_record_ids(
session,
dataset_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@


class GridoptimizationDatasetSocket(BaseDatasetSocket):

# Used by the base class
dataset_orm = GridoptimizationDatasetORM
specification_orm = GridoptimizationDatasetSpecificationORM
Expand All @@ -47,7 +46,6 @@ def _add_specification(
def _create_entries(
self, session: Session, dataset_id: int, new_entries: Sequence[GridoptimizationDatasetNewEntry]
):

all_entries = []
for entry in new_entries:
meta, mol_ids = self.root_socket.molecules.add_mixed([entry.initial_molecule], session=session)
Expand Down
Loading

0 comments on commit 1ca10b9

Please sign in to comment.