Skip to content

Commit

Permalink
Lint
Browse files Browse the repository at this point in the history
  • Loading branch information
mattwthompson committed Nov 14, 2022
1 parent f3ba738 commit 2be6ae9
Show file tree
Hide file tree
Showing 9 changed files with 153 additions and 96 deletions.
53 changes: 27 additions & 26 deletions openff/qcsubmit/datasets/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,11 @@
from qcportal.datasets.optimization import OptimizationDatasetNewEntry
from qcportal.datasets.singlepoint import SinglepointDatasetNewEntry
from qcportal.datasets.torsiondrive import TorsiondriveDatasetNewEntry
from qcportal.records.singlepoint import SinglepointDriver, QCSpecification
from qcportal.records.optimization import OptimizationSpecification
from qcportal.records.singlepoint import QCSpecification, SinglepointDriver
from typing_extensions import Literal

from openff.qcsubmit.common_structures import (
CommonBase,
Metadata,
MoleculeAttributes,
)
from openff.qcsubmit.common_structures import CommonBase, Metadata, MoleculeAttributes
from openff.qcsubmit.constraints import Constraints
from openff.qcsubmit.datasets.entries import (
DatasetEntry,
Expand Down Expand Up @@ -150,7 +146,6 @@ def _get_specifications(self) -> "OptimizationSpecification":
"""
raise NotImplementedError()


@abc.abstractmethod
def _get_entries(self) -> List[Any]:
"""Add entries to the Dataset's corresponding Collection.
Expand All @@ -167,7 +162,6 @@ def _get_entries(self) -> List[Any]:
"""
pass


@abc.abstractmethod
def to_tasks(self) -> Dict[str, List[Union[AtomicInput, OptimizationInput]]]:
"""
Expand Down Expand Up @@ -199,7 +193,6 @@ def submit(
"""


# pre submission checks
# make sure we have some QCSpec to submit
self._check_qc_specs()
Expand Down Expand Up @@ -738,7 +731,6 @@ def _molecules_to_inchikey(self) -> List[str]:
return inchikey



# TODO: SinglepointDataset
class BasicDataset(_BaseDataset):
"""
Expand Down Expand Up @@ -799,7 +791,9 @@ def __add__(self, other: "BasicDataset") -> "BasicDataset":

return new_dataset

def _generate_collection(self, client: "PortalClient") -> ptl.datasets.SinglepointDataset:
def _generate_collection(
self, client: "PortalClient"
) -> ptl.datasets.SinglepointDataset:

return client.add_dataset(
dataset_type="singlepoint",
Expand All @@ -817,7 +811,7 @@ def _get_specifications(self) -> Dict[str, QCSpecification]:
"""Needed for `submit` usage."""

ret = {}
for spec_name,spec in self.qc_specifications.items():
for spec_name, spec in self.qc_specifications.items():
ret[spec_name] = QCSpecification(
driver=self.driver,
method=spec.method,
Expand All @@ -829,7 +823,6 @@ def _get_specifications(self) -> Dict[str, QCSpecification]:

return ret


def _get_entries(self) -> List[SinglepointDatasetNewEntry]:

entries: List[SinglepointDatasetNewEntry] = []
Expand All @@ -844,13 +837,16 @@ def _get_entries(self) -> List[SinglepointDatasetNewEntry]:

for j, molecule in enumerate(entry.initial_molecules):
name = index + f"-{tag + j}"
entries.append(SinglepointDatasetNewEntry(name=name, molecule=molecule))
entries.append(
SinglepointDatasetNewEntry(name=name, molecule=molecule)
)
else:
entries.append(
SinglepointDatasetNewEntry(
name=entry_name,
molecule=entry.initial_molecules[0],
))
)
)

return entries

Expand Down Expand Up @@ -1008,7 +1004,7 @@ def _get_specifications(self) -> Dict[str, OptimizationSpecification]:

ret = {}

for spec_name,spec in self.qc_specifications.items():
for spec_name, spec in self.qc_specifications.items():
qc_spec = QCSpecification(
driver=self.driver,
method=spec.method,
Expand All @@ -1021,7 +1017,7 @@ def _get_specifications(self) -> Dict[str, OptimizationSpecification]:
ret[spec_name] = OptimizationSpecification(
program=self.optimization_procedure.program,
qc_specification=qc_spec,
keywords=opt_kw
keywords=opt_kw,
)

return ret
Expand All @@ -1040,17 +1036,21 @@ def _get_entries(self) -> List[OptimizationDatasetNewEntry]:

for j, molecule in enumerate(entry.initial_molecules):
name = index + f"-{tag + j}"
entries.append(OptimizationDatasetNewEntry(name=name, initial_molecule=molecule))
entries.append(
OptimizationDatasetNewEntry(
name=name, initial_molecule=molecule
)
)
else:
entries.append(
OptimizationDatasetNewEntry(
name=entry_name,
initial_molecule=entry.initial_molecules[0],
))
)
)

return entries


def to_tasks(self) -> Dict[str, List[OptimizationInput]]:
"""
Build a list of QCEngine optimisation inputs organised by the optimisation engine which should be used to run the task.
Expand Down Expand Up @@ -1222,15 +1222,16 @@ def _get_entries(self) -> List[TorsiondriveDatasetNewEntry]:

td_keywords.update(entry.keywords.dict(exclude_defaults=True))

entries.append(TorsiondriveDatasetNewEntry(
name=entry_name,
initial_molecules=entry.initial_molecules,
torsiondrive_keywords=td_keywords
))
entries.append(
TorsiondriveDatasetNewEntry(
name=entry_name,
initial_molecules=entry.initial_molecules,
torsiondrive_keywords=td_keywords,
)
)

return entries


def to_tasks(self) -> Dict[str, List[OptimizationInput]]:
"""Build a list of QCEngine procedure tasks which correspond to this dataset."""

Expand Down
2 changes: 1 addition & 1 deletion openff/qcsubmit/procedures.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
The procedure settings controllers
"""

from typing import Dict, Any
from typing import Any, Dict

from pydantic import BaseModel, Field, validator
from qcportal.records.optimization import OptimizationSpecification
Expand Down
18 changes: 12 additions & 6 deletions openff/qcsubmit/results/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,12 @@
from openff.units import unit
from qcportal import PortalClient
from qcportal.molecules import Molecule as QCMolecule
from qcportal.records import TorsiondriveRecord
from qcportal.records import OptimizationRecord, BaseRecord, SinglepointRecord
from qcportal.records import (
BaseRecord,
OptimizationRecord,
SinglepointRecord,
TorsiondriveRecord,
)

if TYPE_CHECKING:
from openff.qcsubmit.results.results import (
Expand Down Expand Up @@ -155,7 +159,7 @@ def cached_query_procedures(client_address: str, record_ids: List[str]) -> List[
client_address = client_address.rstrip("/")
client = cached_fractal_client(client_address)

query_limit = client.api_limits['get_records']
query_limit = client.api_limits["get_records"]

return _cached_client_query(
client_address,
Expand All @@ -182,7 +186,7 @@ def cached_query_molecules(
client_address = client_address.rstrip("/")
client = cached_fractal_client(client_address)

query_limit = client.api_limits['get_molecules']
query_limit = client.api_limits["get_molecules"]

return _cached_client_query(
client_address,
Expand Down Expand Up @@ -326,8 +330,10 @@ def cached_query_torsion_drive_results(

qc_record = qc_records[result.record_id]

qc_grid_molecules = [(grid_point, opt.final_molecule)
for grid_point, opt in qc_record.minimum_optimizations.items()]
qc_grid_molecules = [
(grid_point, opt.final_molecule)
for grid_point, opt in qc_record.minimum_optimizations.items()
]
grid_ids = [*qc_record.minimum_positions]
# order the ids so the conformers follow the torsiondrive scan range
grid_ids.sort(key=lambda s: tuple(float(x) for x in s.strip("[]").split(", ")))
Expand Down
23 changes: 19 additions & 4 deletions openff/qcsubmit/results/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from pydantic import BaseModel, Field, PrivateAttr, root_validator, validator
from qcelemental.molutil import guess_connectivity
from qcportal.records import (
OptimizationRecord,
BaseRecord,
OptimizationRecord,
RecordStatusEnum,
SinglepointRecord,
)
Expand Down Expand Up @@ -230,7 +230,12 @@ class LowestEnergyFilter(SinglepointRecordGroupFilter):
def _filter_function(
self,
entries: List[
Tuple["_BaseResult", Union[SinglepointRecord, OptimizationRecord], Molecule, str]
Tuple[
"_BaseResult",
Union[SinglepointRecord, OptimizationRecord],
Molecule,
str,
]
],
) -> List[Tuple["_BaseResult", str]]:
"""Only return the lowest energy entry or final molecule."""
Expand Down Expand Up @@ -362,7 +367,12 @@ def _compute_rmsd_matrix(self, molecule: Molecule) -> numpy.ndarray:
def _filter_function(
self,
entries: List[
Tuple["_BaseResult", Union[SinglepointRecord, OptimizationRecord], Molecule, str]
Tuple[
"_BaseResult",
Union[SinglepointRecord, OptimizationRecord],
Molecule,
str,
]
],
) -> List[Tuple["_BaseResult", str]]:

Expand Down Expand Up @@ -436,7 +446,12 @@ class MinimumConformersFilter(SinglepointRecordGroupFilter):
def _filter_function(
self,
entries: List[
Tuple["_BaseResult", Union[SinglepointRecord, OptimizationRecord], Molecule, str]
Tuple[
"_BaseResult",
Union[SinglepointRecord, OptimizationRecord],
Molecule,
str,
]
],
) -> List[Tuple["_BaseResult", str]]:

Expand Down
Loading

0 comments on commit 2be6ae9

Please sign in to comment.