Skip to content

Commit

Permalink
Merge pull request #467 from dgasmith/v12
Browse files Browse the repository at this point in the history
Migration: Updates contrib values migration
  • Loading branch information
dgasmith committed Nov 6, 2019
2 parents 01defb5 + 332192b commit 50ed886
Show file tree
Hide file tree
Showing 9 changed files with 142 additions and 40 deletions.
18 changes: 18 additions & 0 deletions devtools/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,3 +71,21 @@ is installed by looking at the `git` tags and how many commits ahead this versio
If the version of this commit is the same as a `git` tag, the installed version is the same as the tag,
e.g. `qcfractal-0.1.2`, otherwise it will be appended with `+X` where `X` is the number of commits
ahead from the last tag, and then `-YYYYYY` where the `Y`'s are replaced with the `git` commit hash.


# Alembic Migration
Alembic migrations form the core of changing the databaes schema. A general guideline is below, but keep in
mind that there often may be required changes to this procedure. When in doubt, ask QCArchive Slack.

Initialize a server from the previous version:
- `git checkout 'prevsion version'`
- `qcfractal-server init --base-folder=tmp --db-port=9000`

Play upgrades ontop of this server:
- `git checkout `master or feature branch'`
- `qcfractal-server upgrade --base-folder=tmp`

Pull alembic command line string and create migration:
- `qcfractal-server info --base-folder=tmp alembic | tail -1` revision --autogenerate

This will create a new file in `qcfractal/alembic/versions` which should be reviewed and edited as needed.
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ def migrate_contributed_values_data():

for ds in ds_ids_data:
(ds_id, ds_contrib) = ds
if ds_contrib is None:
continue

for key, dict_values in ds_contrib.items():

Expand Down Expand Up @@ -67,18 +69,21 @@ def upgrade():
sa.Column('name', sa.String(), nullable=False),
sa.Column('collection_id', sa.Integer(), nullable=False),
sa.Column('citations', sa.JSON(), nullable=True),
sa.Column('theory_level', sa.JSON(), nullable=True),
sa.Column('theory_level', sa.JSON(), nullable=False),
sa.Column('theory_level_details', sa.JSON(), nullable=True),
sa.Column('comments', sa.String(), nullable=True),
sa.Column('values', MsgpackExt(), nullable=True),
sa.Column('index', MsgpackExt(), nullable=True),
sa.Column('values', MsgpackExt(), nullable=False),
sa.Column('index', MsgpackExt(), nullable=False),
sa.Column('external_url', sa.String(), nullable=True),
sa.Column('doi', sa.String(), nullable=True),
sa.Column('units', sa.String(), nullable=True),
sa.Column('units', sa.String(), nullable=False),
sa.Column('values_structure', sa.JSON(), nullable=True, default=lambda: {}),
sa.ForeignKeyConstraint(['collection_id'], ['collection.id'], ondelete='cascade'),
sa.PrimaryKeyConstraint('name', 'collection_id')
)

op.alter_column('contributed_values', 'values_structure', server_default=None, nullable=False)

migrate_contributed_values_data()

op.drop_column('dataset', 'contributed_values_data')
Expand Down
56 changes: 56 additions & 0 deletions qcfractal/alembic/versions/4b27843a188a_v12_final.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""Final migration synchronization from v0.11 to v0.12
Revision ID: 4b27843a188a
Revises: 159ba85908fd
Create Date: 2019-11-06 13:48:39.716633
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = '4b27843a188a'
down_revision = '159ba85908fd'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('collection', 'group',
existing_type=sa.VARCHAR(),
type_=sa.String(length=100),
existing_nullable=False)
op.alter_column('molecule', 'geometry',
existing_type=postgresql.BYTEA(),
nullable=False)
op.alter_column('molecule', 'symbols',
existing_type=postgresql.BYTEA(),
nullable=False)
op.alter_column('task_queue', 'spec',
existing_type=postgresql.BYTEA(),
nullable=False)
op.drop_constraint('task_queue_manager_fkey', 'task_queue', type_='foreignkey')
op.create_foreign_key('task_queue_manager_fkey', 'task_queue', 'queue_manager', ['manager'], ['name'], ondelete='SET NULL')
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('task_queue_manager_fkey', 'task_queue', type_='foreignkey')
op.create_foreign_key('task_queue_manager_fkey', 'task_queue', 'queue_manager', ['manager'], ['name'])
op.alter_column('task_queue', 'spec',
existing_type=postgresql.BYTEA(),
nullable=True)
op.alter_column('molecule', 'symbols',
existing_type=postgresql.BYTEA(),
nullable=True)
op.alter_column('molecule', 'geometry',
existing_type=postgresql.BYTEA(),
nullable=True)
op.alter_column('collection', 'group',
existing_type=sa.String(length=100),
type_=sa.VARCHAR(),
existing_nullable=False)
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,6 @@ def upgrade():
op.add_column('collection', sa.Column('group', sa.String(), nullable=False, server_default='default'))
op.alter_column('collection', 'group', server_default=None)

op.add_column('collection', sa.Column('metadata', sa.JSON(), nullable=True))
op.execute("UPDATE collection SET metadata = '{}'::json")
op.alter_column('collection', 'metadata', nullable=False)

op.add_column('collection', sa.Column('view_url_hdf5', sa.String(), nullable=True))
op.add_column('collection', sa.Column('view_url_plaintext', sa.String(), nullable=True))
op.add_column('collection', sa.Column('view_metadata', sa.JSON(), nullable=True))
Expand Down
26 changes: 13 additions & 13 deletions qcfractal/storage_sockets/models/collections_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ class CollectionORM(Base):

tags = Column(JSON)
tagline = Column(String)
description = Column(String, nullable=True)
description = Column(String)

group = Column(String(100), nullable=True)
group = Column(String(100), nullable=False)
visibility = Column(Boolean, nullable=False)

view_url_hdf5 = Column(String, nullable=True)
view_url_plaintext = Column(String, nullable=True)
view_metadata = Column(JSON, nullable=True)
view_url_hdf5 = Column(String)
view_url_plaintext = Column(String)
view_metadata = Column(JSON)
view_available = Column(Boolean, nullable=False)

provenance = Column(JSON)
Expand All @@ -68,13 +68,13 @@ class DatasetMixin:
Mixin class for common Dataset attributes.
"""

default_benchmark = Column(String, nullable=True)
default_keywords = Column(JSON, nullable=True)
default_benchmark = Column(String)
default_keywords = Column(JSON)

default_driver = Column(String, nullable=True)
default_units = Column(String, nullable=True)
alias_keywords = Column(JSON, nullable=True)
default_program = Column(String, nullable=True)
default_driver = Column(String)
default_units = Column(String)
alias_keywords = Column(JSON)
default_program = Column(String)

history_keys = Column(JSON)
history = Column(JSON)
Expand All @@ -91,7 +91,7 @@ class ContributedValuesORM(Base):
name = Column(String, nullable=False, primary_key=True)
values = Column(MsgpackExt, nullable=False)
index = Column(MsgpackExt, nullable=False)
values_structure = Column(JSON)
values_structure = Column(JSON, nullable=False)

theory_level = Column(JSON, nullable=False)
units = Column(String, nullable=False)
Expand Down Expand Up @@ -242,7 +242,7 @@ class ReactionDatasetORM(CollectionORM, DatasetMixin):

id = Column(Integer, ForeignKey('collection.id', ondelete="CASCADE"), primary_key=True)

ds_type = Column(String, nullable=True)
ds_type = Column(String)

records_obj = relationship(ReactionDatasetEntryORM,
lazy='selectin',
Expand Down
2 changes: 1 addition & 1 deletion qcfractal/storage_sockets/models/sql_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ class TaskQueueORM(Base):
procedure = Column(String)
status = Column(Enum(TaskStatusEnum), default=TaskStatusEnum.waiting)
priority = Column(Integer, default=int(PriorityEnum.NORMAL))
manager = Column(String, ForeignKey('queue_manager.name'), default=None)
manager = Column(String, ForeignKey('queue_manager.name', ondelete="SET NULL"), default=None)
error = Column(String) # TODO: tobe removed - should be in results

created_on = Column(DateTime, default=datetime.datetime.utcnow)
Expand Down
7 changes: 4 additions & 3 deletions qcfractal/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,14 +101,16 @@ def test_collection_portal(test_server, encoding):
"something": "else",
"array": ["12345"],
"visibility": True,
"view_available": False
"view_available": False,
"group": "default"
}

client = ptl.FractalClient(test_server)
client._set_encoding(encoding)

# Test add
_ = client.add_collection(db)
ret = client.add_collection(db, full_return=True)
print(ret)

# Test get
get_db = client.get_collection(db["collection"], db["name"], full_return=True)
Expand All @@ -121,7 +123,6 @@ def test_collection_portal(test_server, encoding):
get_db.data[0].pop("view_url_hdf5", None)
get_db.data[0].pop("view_url_plaintext", None)
get_db.data[0].pop("view_metadata", None)
get_db.data[0].pop("group", None)
get_db.data[0].pop("description", None)

assert db == get_db.data[0]
Expand Down
2 changes: 1 addition & 1 deletion qcfractal/tests/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def test_storage_socket(test_server):
"array": ["54321"],
"visibility": True,
"view_available": False,
"group": "default",
}
# Cast collection type to lower since the server-side does it anyways
storage['collection'] = storage['collection'].lower()
Expand Down Expand Up @@ -67,7 +68,6 @@ def test_storage_socket(test_server):
pdata["data"][0].pop("view_url_hdf5", None)
pdata["data"][0].pop("view_url_plaintext", None)
pdata["data"][0].pop("view_metadata", None)
pdata["data"][0].pop("group", None)
pdata["data"][0].pop("description", None)

assert pdata["data"][0] == storage
Expand Down
54 changes: 40 additions & 14 deletions qcfractal/tests/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,8 @@ def test_collections_add(storage_socket):
"something": "else",
"array": ["54321"],
"visibility": True,
"view_available": False
"view_available": False,
"group": "default",
}

ret = storage_socket.add_collection(db)
Expand All @@ -239,6 +240,7 @@ def test_collections_add(storage_socket):
# assert len(ret["meta"]["missing"]) == 1
assert ret["meta"]["n_found"] == 0


def test_collections_overwrite(storage_socket):

collection = 'TorsionDriveRecord'
Expand All @@ -249,7 +251,8 @@ def test_collections_overwrite(storage_socket):
"something": "else",
"array": ["54321"],
"visibility": True,
"view_available": False
"view_available": False,
"group": "default",
}

ret = storage_socket.add_collection(db)
Expand All @@ -264,6 +267,7 @@ def test_collections_overwrite(storage_socket):
# "id": ret["data"][0]["id"],
"collection": "TorsionDriveRecord", # no need to include
"name": "Torsion123", # no need to include
"group": "default",
"something": "New",
"something2": "else",
"view_available": True,
Expand All @@ -290,6 +294,7 @@ def test_collections_overwrite(storage_socket):
ret = storage_socket.del_collection(collection, name)
assert ret == 1


def test_dataset_add_delete_cascade(storage_socket):

collection = 'dataset'
Expand All @@ -311,6 +316,8 @@ def test_dataset_add_delete_cascade(storage_socket):
True,
"view_available":
False,
"group":
"default",
"records": [{
"name": "He1",
"molecule_id": mol_insert["data"][0],
Expand All @@ -328,13 +335,15 @@ def test_dataset_add_delete_cascade(storage_socket):
"theory_level": 'PBE0',
"units": 'kcal/mol',
"values": [5, 10],
"index": ["He2", "He1"]
"index": ["He2", "He1"],
"values_structure": {},
}
}
}

ret = storage_socket.add_collection(db.copy())
assert ret["meta"]["n_inserted"] == 1
print(ret["meta"]["error_description"])
assert ret["meta"]["n_inserted"] == 1, ret["meta"]["error_description"]

ret = storage_socket.get_collections(collection=collection, name=name)
assert ret["meta"]["success"] is True
Expand All @@ -349,14 +358,16 @@ def test_dataset_add_delete_cascade(storage_socket):
"theory_level": 'PBE0 FHI-AIMS',
"units": 'kcal/mol',
"values": np.array([5, 10], dtype=np.int16),
"index": ["He2", "He1"]
"index": ["He2", "He1"],
"values_structure": {},
},
'contrib2': {
"name": 'contrib2',
"theory_level": 'PBE0 FHI-AIMS tight',
"units": 'kcal/mol',
"values": [np.random.rand(2, 3), np.random.rand(2, 3)],
"index": ["He2", "He1"]
"index": ["He2", "He1"],
"values_structure": {},
}
}

Expand All @@ -381,7 +392,6 @@ def test_dataset_add_delete_cascade(storage_socket):
assert len(ret['data'][0]['contributed_values'].keys()) == 2
assert len(ret['data'][0]['records']) == 0


# cleanup
# Can't delete molecule when datasets refernece it (no cascade)
with pytest.raises(sqlalchemy.exc.IntegrityError):
Expand Down Expand Up @@ -1301,20 +1311,36 @@ def test_collections_include_exclude(storage_socket):
mol_insert = storage_socket.add_molecules([water, water2])

db = {
"collection": collection,
"name": name,
"visibility": True,
"view_available": False,
"records": [{"name": "He1", "molecule_id": mol_insert["data"][0], "comment": None, "local_results": {}},
{"name": "He2", "molecule_id": mol_insert["data"][1], "comment": None, "local_results": {}}]
"collection":
collection,
"name":
name,
"visibility":
True,
"view_available":
False,
"group":
"default",
"records": [{
"name": "He1",
"molecule_id": mol_insert["data"][0],
"comment": None,
"local_results": {}
}, {
"name": "He2",
"molecule_id": mol_insert["data"][1],
"comment": None,
"local_results": {}
}]
}

db2 = {
"collection": collection,
"name": name2,
"visibility": True,
"view_available": False,
"records": []
"records": [],
"group": "default"
}

ret = storage_socket.add_collection(db)
Expand Down

0 comments on commit 50ed886

Please sign in to comment.