Skip to content

Commit

Permalink
fix: fix destination path for rebasing
Browse files Browse the repository at this point in the history
  • Loading branch information
migraf committed Apr 17, 2023
1 parent d527ce5 commit 3a9dd41
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 131 deletions.
18 changes: 16 additions & 2 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,23 @@ RUN apt -y update && apt-get -y install software-properties-common && \
rm -rf /var/lib/apt/lists/* && \
rm -rf /var/cache/apt/archives/*


# Setup poetry environment
ENV POETRY_HOME="/opt/poetry" \
POETRY_VIRTUALENVS_CREATE=false \
POETRY_VIRTUALENVS_IN_PROJECT=false \
POETRY_NO_INTERACTION=1 \
POETRY_VERSION=1.4.2

RUN curl -sSL https://install.python-poetry.org | python3 -
ENV PATH="$PATH:$POETRY_HOME/bin"

COPY . /opt/protocol
# install train container library
RUN pip install --force-reinstall --no-cache-dir -v "pht-train-container-library==2.0.4"
COPY ./docker/entrypoint /opt/protocol/docker/entrypoint
WORKDIR /opt/protocol

RUN poetry install --without dev

RUN chmod +x /opt/protocol/docker/entrypoint/entrypoint.sh

ENTRYPOINT ["/opt/protocol/docker/entrypoint/entrypoint.sh"]
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "pht-train-container-library"
version = "2.0.4"
version = "2.0.5"
description = "Python library for handling containerized PHT trains"
authors = ["Michael Graf <[email protected]>"]
readme = "README.md"
Expand Down
116 changes: 3 additions & 113 deletions testing/sp_test.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
import json
import pprint
import tarfile
import time
from io import BytesIO
from tarfile import TarInfo
from timeit import default_timer as timer
Expand All @@ -15,124 +11,18 @@
extract_train_config,
extract_query_json,
add_archive,
rebase_train_image,
)

from train_lib.security.protocol import SecurityProtocol

IMG = "staging-harbor.tada5hi.net/b0f0imz4srdr7bnl0z8en/b092b95e-7d81-403a-b504-3965d6877908"
IMG = "harbor.personalhealthtrain.de/3ke5ymdmovwot5ac6b09i/bbc2c4a2-0436-4d5e-ad73-1867d920bf4e:latest"


def main():
load_dotenv(find_dotenv())
train_config = extract_train_config(IMG)
digest = hashes.Hash(hashes.SHA512(), backend=default_backend())
pprint.pp(train_config)

query = extract_query_json(IMG)
print(json.loads(query))
print(query.decode("utf-8"))

digest.update(query)
print(digest.finalize().hex())
print(type(query))
with open("query.json", "wb") as f:
f.write(query)

with open("query.json", "rb") as f:
query_read = f.read()

print(query_read == query)
# Execute pre run protocol
sp = SecurityProtocol("10fqi2nugnog5nak0ylec", config=train_config)
start = timer()
sp.pre_run_protocol(img=IMG, private_key_path="test-key.pem")
print(f"Pre run execution time: {timer() - start}")

# # Run the image
# start = timer()
# client = docker.from_env()
# container = client.containers.run(IMG, detach=True)
# container.wait()
# print(container.logs())
# repository, tag = IMG.split(":")
# container.commit(repository=repository, tag=tag)
# print(f"Train execution time: {timer() - start}")
#
# # Post run
# start = timer()
# sp.post_run_protocol(img=IMG, private_key_path=os.path.abspath("./keys/station_aachen_private_key.pem"))
# print(f"Post run execution time: {timer() - start}")


def update_config_with_correct_signature():
train_config = extract_train_config(IMG)
train_hash = bytes.fromhex(train_config["e_h"])
with open("../test/keys/user_private_key.pem", "rb") as pk:
private_key = serialization.load_pem_private_key(
pk.read(), password=None, backend=default_backend()
)
sig = private_key.sign(
train_hash,
padding.PSS(
mgf=padding.MGF1(hashes.SHA512()), salt_length=padding.PSS.MAX_LENGTH
),
utils.Prehashed(hashes.SHA512()),
)
train_config["e_h_sig"] = sig.hex()

# Create archive containing the updated configuration file
archive_obj = BytesIO()

tar = tarfile.open(fileobj=archive_obj, mode="w")
data = json.dumps(train_config, indent=2).encode("utf-8")

# Create TarInfo Object based on the data
info = TarInfo(name="train_config.json")
info.size = len(data)
info.mtime = time.time()

tar.addfile(info, BytesIO(data))
tar.close()
archive_obj.seek(0)
add_archive(IMG, archive_obj, path="/opt")
rebase_train_image("ubuntu:latest", IMG)


if __name__ == "__main__":
# update_config_with_correct_signature()
main()

# img = "harbor.personalhealthtrain.de/pht_incoming/c1623f6a-e734-49e2-b1c1-a0237d5521b4:latest"
# config = extract_train_config(img)
# sp = SecurityProtocol(station_id="1", config=config)
# # files = sp._parse_files(train_dir)
# # print(files)
#
# sp.pre_run_protocol("harbor.personalhealthtrain.de/pht_incoming/c1623f6a-e734-49e2-b1c1-a0237d5521b4",
# "./keys/user_private_key.pem")
#
# file_order = ["final_train/auto_augment.py", "final_train/central_entrypoint.py", "final_train/entrypoint.py",
# "final_train/eval.py", "final_train/models.py", "final_train/test.py", "final_train/train.py",
# "final_train/utils.py", "final_train/__init__.py", "final_train/pc_cfgs/example.py",
# "final_train/pc_cfgs/__init__.py", "final_train/cfgs/effb6_central_multigpu.py"]
#
# session_id = "7ba497dbcb48111f22b406d4a026f6d22c1e71f52d49ac980ac897882124d6f4f6cb5dba326670b960278d6fbdb7369f49a2c2d20580c62c6740c0b5849d9e29"
# # files_hash = hash_immutable_files(
# # immutable_files=files,
# # user_id="3",
# # session_id=bytes.fromhex(session_id),
# # binary_files=False,
# # ordered_file_list=file_order)
# #
# # print("File hash", files_hash.hex())
# #
# archive = extract_archive(img="360be6e2e92a", extract_path="/opt/pht_train")
# # print(archive.getmembers())
#
# archive_files, names = files_from_archive(archive)
# file_info = zip(archive_files, names)
# for f in file_info:
# print(f)
# file_hash = hash_immutable_files(archive_files, user_id="3", session_id=bytes.fromhex(session_id),
# binary_files=True, ordered_file_list=file_order,
# immutable_file_names=names)
# print(file_hash.hex())
20 changes: 5 additions & 15 deletions train_lib/docker_util/docker_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from loguru import logger

import docker
from docker.models.containers import Container
from train_lib.docker_util import TIMEOUT
from train_lib.security.constants import TrainTags
from train_lib.security.train_config import TrainConfig
Expand Down Expand Up @@ -152,23 +151,14 @@ def rebase_train_image(base_image: str, train_image: str):
:param latest_image: the image to rebase
"""
client = docker.from_env(timeout=TIMEOUT)

latest_container = client.containers.create(train_image)
base_container = client.containers.create(base_image)

def _copy(src: Container, dest: Container, path: str):
"""
Copy the given file from the src container to the dest container
:param src: the source container
:param dest: the destination container
"""
src_archive, stat = src.get_archive(path)
src.wait()
dest.put_archive(path, src_archive)
dest.wait()

# copy the archives from the PHT directories and commit the base image with under the latest tag
_copy(latest_container, base_container, "/opt")
src_archive, state = latest_container.get_archive("/opt")
logger.debug(f"Rebase copy state: {state}")
base_container.put_archive("/", src_archive)
base_container.wait()

repo = repository_from_image(train_image)
base_container.commit(repository=repo, tag=TrainTags.LATEST.value)
Expand Down

0 comments on commit 3a9dd41

Please sign in to comment.