Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(ic): Update python formatting rules for the monorepo #1751

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 44 additions & 22 deletions .github/scripts/determine-initial-guest-os-versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,15 @@
from typing import Any, Dict, List, Optional, TypedDict, cast
from urllib.request import urlopen

ROLLOUT_DASHBOARD_ENDPOINT='https://rollout-dashboard.ch1-rel1.dfinity.network/api/v1/rollouts'
PUBLIC_DASHBOARD_ENDPOINT='https://ic-api.internetcomputer.org/api/v3/subnets?format=json'
ROLLOUT_DASHBOARD_ENDPOINT = "https://rollout-dashboard.ch1-rel1.dfinity.network/api/v1/rollouts"
PUBLIC_DASHBOARD_ENDPOINT = "https://ic-api.internetcomputer.org/api/v3/subnets?format=json"

# Key definitions
EXECUTED_TIMESTAMP_SECONDS = 'executed_timestamp_seconds'
REPLICA_VERSIONS = 'replica_versions'
REPLICA_VERSION_ID = 'replica_version_id'
SUBNETS = 'subnets'
EXECUTED_TIMESTAMP_SECONDS = "executed_timestamp_seconds"
REPLICA_VERSIONS = "replica_versions"
REPLICA_VERSION_ID = "replica_version_id"
SUBNETS = "subnets"


# Minimal subset of API structure needed for rollout dashboard.
# Always keep me in sync with https://github.com/dfinity/dre-airflow/blob/main/rollout-dashboard/server/src/types.rs
Expand All @@ -28,11 +29,13 @@ class SubnetRolloutState(Enum):
complete = "complete"
unknown = "unknown"


class Subnet(TypedDict):
subnet_id: str
git_revision: str
state: SubnetRolloutState


class Batch(TypedDict):
subnets: List[Subnet]
# The following three are dates but they are ISO UTF Z,
Expand All @@ -41,6 +44,7 @@ class Batch(TypedDict):
actual_start_time: Optional[str]
end_time: Optional[str]


class RolloutState(Enum):
complete = "complete"
failed = "failed"
Expand All @@ -50,43 +54,54 @@ class RolloutState(Enum):
waiting = "waiting"
problem = "problem"


class Rollout(TypedDict):
name: str
state: RolloutState
batches: Dict[str, Batch]


# Minimal subset of API structure needed for public dashboard.
# Swagger for the public dashboard API: https://ic-api.internetcomputer.org/api/v3/swagger .
class PDReplicaVersion(TypedDict):
executed_timestamp_seconds: Optional[int]
proposal_id: str # really an int
replica_version_id: str


class PDSubnet(TypedDict):
replica_versions: List[PDReplicaVersion]
subnet_id: str


class PDSubnetsResponse(TypedDict):
subnets: List[PDSubnet]


def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)


def eprint_fmt(str, *args):
return # remove me to get some real action
return # remove me to get some real action
print((str % args) if args else str, file=sys.stderr)


def request_json(url: str) -> Any:
resp = urlopen(url, timeout=15)
if resp.status != 200:
try:
data = resp.read()
except Exception:
data = None
raise RuntimeError("Non-200 HTTP response (%s) from %s: %s" % (resp.status, url, data[:160] if data else "(no data in response)"))
raise RuntimeError(
"Non-200 HTTP response (%s) from %s: %s"
% (resp.status, url, data[:160] if data else "(no data in response)")
)
return json.load(resp)

def fetch_versions_from_rollout_dashboard(): # type: () -> list[str] | None

def fetch_versions_from_rollout_dashboard(): # type: () -> list[str] | None
"""
Fetch data from rollout dashboard

Expand All @@ -101,7 +116,7 @@ def fetch_versions_from_rollout_dashboard(): # type: () -> list[str] | None
return []

# The value of the dict entry is datestring, git revision.
subnet_to_revision = {} # type: dict[str, list[tuple[str, str]]]
subnet_to_revision = {} # type: dict[str, list[tuple[str, str]]]

for rollout in reversed(rollouts): # Oldest to newest
for batch_num_ignored, batch in rollout["batches"].items():
Expand All @@ -119,7 +134,7 @@ def fetch_versions_from_rollout_dashboard(): # type: () -> list[str] | None
subnet["git_revision"],
subnet["subnet_id"],
rollout["name"],
subnet["state"]
subnet["state"],
)
continue
else:
Expand All @@ -128,7 +143,7 @@ def fetch_versions_from_rollout_dashboard(): # type: () -> list[str] | None
subnet["git_revision"],
subnet["subnet_id"],
rollout["name"],
subnet["state"]
subnet["state"],
)
t = batch.get("end_time") or batch.get("actual_start_time") or batch["planned_start_time"]
if subnet["subnet_id"] not in subnet_to_revision:
Expand All @@ -139,12 +154,17 @@ def fetch_versions_from_rollout_dashboard(): # type: () -> list[str] | None
# Git revision coupled with the putative date or actual
# finish date for the revision. Let's fish the latest
# revision for each subnet, and get that.
return list(set([
[revision for unused_date, revision in sorted(datestring_revision_tuple)][-1]
for datestring_revision_tuple in subnet_to_revision.values()
]))
return list(
set(
[
[revision for unused_date, revision in sorted(datestring_revision_tuple)][-1]
for datestring_revision_tuple in subnet_to_revision.values()
]
)
)

def fetch_versions_from_public_dashboard(): # type: () -> list[str] | None

def fetch_versions_from_public_dashboard(): # type: () -> list[str] | None
"""
Fetch data from public dashboard

Expand All @@ -165,7 +185,7 @@ def fetch_versions_from_public_dashboard(): # type: () -> list[str] | None
latest_replica_version = list(
sorted(
[r for r in subnet["replica_versions"] if r.get("executed_timestamp_seconds")],
key=lambda rr: rr.get("executed_timestamp_seconds") or 0 # the or 0 to satisfy py3.8 typechecking
key=lambda rr: rr.get("executed_timestamp_seconds") or 0, # the or 0 to satisfy py3.8 typechecking
)
)[-1]
versions.add(latest_replica_version["replica_version_id"])
Expand All @@ -174,6 +194,7 @@ def fetch_versions_from_public_dashboard(): # type: () -> list[str] | None

return list(versions)


def main():
unique_versions = fetch_versions_from_rollout_dashboard()
if not unique_versions:
Expand All @@ -182,12 +203,13 @@ def main():

if not unique_versions:
# At this moment if we don't have any starting version we cannot proceed
raise RuntimeError(f"Didn't find any versions from:\n\t1. {ROLLOUT_DASHBOARD_ENDPOINT}\n\t2. {PUBLIC_DASHBOARD_ENDPOINT}")
raise RuntimeError(
f"Didn't find any versions from:\n\t1. {ROLLOUT_DASHBOARD_ENDPOINT}\n\t2. {PUBLIC_DASHBOARD_ENDPOINT}"
)
eprint(f"Will qualify, starting from versions: {json.dumps(unique_versions)}")
matrix = {
"version": unique_versions
}
matrix = {"version": unique_versions}
print(json.dumps(matrix))


if __name__ == "__main__":
main()
2 changes: 1 addition & 1 deletion .github/scripts/generate-ci.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def main():
for file in os.listdir(workflows_source):
if file.endswith(".yaml") or file.endswith(".yml"):
input_file = workflows_source / file
output_file = workflows_output /file
output_file = workflows_output / file
transform_yaml(input_file, output_file)


Expand Down
23 changes: 10 additions & 13 deletions bazel/candid_integration_tests/candid_integration_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def modify_file_contents(path, find, replacement):
f.write(new_contents)


def run_example_did_git_test(test_bin = "TEST_BIN"):
def run_example_did_git_test(test_bin="TEST_BIN"):
return subprocess.run(
[os.environ[test_bin]],
env={
Expand Down Expand Up @@ -71,7 +71,7 @@ def test_remove_variants_check_fails():


def test_adding_new_did_file_succeeds():
res = run_example_did_git_test(test_bin = "NEW_DID_TEST")
res = run_example_did_git_test(test_bin="NEW_DID_TEST")

message = "is a new file, skipping backwards compatibility check"
assert message in res.stdout.decode("utf-8")
Expand All @@ -95,7 +95,7 @@ def test_remove_required_field_from_input_check_fails():
replacement="// Blank.",
)

res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")

error_message = "Method dance: func (DanceRequest) -> (DanceResponse) is not a subtype of func (DanceRequest/1) -> (DanceResponse/1)"
assert error_message in res.stderr.decode("utf-8")
Expand All @@ -110,7 +110,7 @@ def test_remove_required_field_from_output_check_fails():
replacement="// Blank.",
)

res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")

error_message = "Method dance: func (DanceRequest) -> (DanceResponse) is not a subtype of func (DanceRequest/1) -> (DanceResponse/1)"
assert error_message in res.stderr.decode("utf-8")
Expand All @@ -124,7 +124,7 @@ def test_adding_a_required_field_to_input_check_fails():
replacement="new_required_int : int;",
)

res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")

error_message = "Method dance: func (DanceRequest) -> (DanceResponse) is not a subtype of func (DanceRequest/1) -> (DanceResponse/1)"
assert error_message in res.stderr.decode("utf-8")
Expand All @@ -138,7 +138,7 @@ def test_adding_optional_field_succeeds():
replacement="new_optional_int : opt int;",
)

res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")

message = "bazel/candid_integration_tests/example.did passed candid checks"
assert message in res.stdout.decode("utf-8")
Expand All @@ -152,7 +152,7 @@ def test_adding_optional_field_reverse_succeeds():
replacement="new_optional_int : opt int;",
)

res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")

message = "bazel/candid_integration_tests/example.did passed candid checks"
assert message in res.stdout.decode("utf-8")
Expand All @@ -162,19 +162,16 @@ def test_adding_optional_field_reverse_succeeds():
def test_override_didc_checks_failing_check_succeeds():
modify_file_contents(path=did_file_path, find="happy; sad", replacement="happy")

res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")

error_message = "Method do_stuff: func (Request) -> () is not a subtype of func (Request/1) -> ()"
assert error_message in res.stderr.decode("utf-8")
assert res.returncode == 101

with mock.patch.dict(os.environ, {"OVERRIDE_DIDC_CHECK": "true"}):
res = run_example_did_git_test(test_bin = "TEST_BIN_ALSO_REVERSE")
res = run_example_did_git_test(test_bin="TEST_BIN_ALSO_REVERSE")
assert res.returncode == 0
assert (
"Override didc check requested. Skipping didc_check."
in res.stdout.decode("utf-8")
)
assert "Override didc check requested. Skipping didc_check." in res.stdout.decode("utf-8")


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
stripped from both ends) contains the value that's supposed to be
associated with the aforementioned WASM custom section.
"""

import os
import subprocess
import unittest
Expand Down
1 change: 1 addition & 0 deletions ci/src/git_changes/git_changes.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
Find changed files in the `rs` directory:
python git_changes.py --changed-files-in-dirs rs
"""

import argparse
import functools
import logging
Expand Down
2 changes: 2 additions & 0 deletions ci/src/git_changes/git_changes_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
pytest
"""

import os
import shutil

Expand Down Expand Up @@ -68,6 +69,7 @@ def setup_repo(tmpdir, testcase, branch="feature_branch"):
if "CI_COMMIT_REF_NAME" in os.environ:
del os.environ["CI_COMMIT_REF_NAME"]


@pytest.mark.fails_on_merge_train
def test_change_one_file(tmpdir):
"""Tests that a commit has changed one crate."""
Expand Down
41 changes: 18 additions & 23 deletions ic-os/components/conformance_tests/check_file_references.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,15 @@ def check_paths_in_source(source: str, partition_img_path: str) -> [str]:
return [f"File '{source}' is not a valid file."]

errors = []
allowlisted_unavailable_dependencies = ALLOWED_UNDECLARED_DEPENDENCIES.get(
source, {})
allowlisted_unavailable_dependencies = ALLOWED_UNDECLARED_DEPENDENCIES.get(source, {})
source_content = pathlib.Path(source).read_text()
for dependency in allowlisted_unavailable_dependencies:
if dependency not in source_content:
errors.append(f"{dependency} is in the allowlist of {source} even "
f"though {source} does not contain any references to "
f"it. Remove it from ALLOWED_UNDECLARED_DEPENDENCIES")
errors.append(
f"{dependency} is in the allowlist of {source} even "
f"though {source} does not contain any references to "
f"it. Remove it from ALLOWED_UNDECLARED_DEPENDENCIES"
)

for line_number, line in enumerate(source_content.splitlines(), start=1):
dependencies = re.findall(COMPONENT_FILE_PATTERN, line)
Expand All @@ -67,7 +68,8 @@ def check_paths_in_source(source: str, partition_img_path: str) -> [str]:
errors.append(
f"File '{source}' contains reference to "
f"unavailable file '{dependency}' on line {line_number}. "
f"Ensure that {dependency} is added to the image.")
f"Ensure that {dependency} is added to the image."
)
print()
return errors

Expand All @@ -78,43 +80,36 @@ def exists(path, partition_img_path):
`path`.
"""
debugfs_output = subprocess.run(
["/usr/sbin/debugfs", "-R", f"testi {path}", partition_img_path],
check=True,
capture_output=True).stdout
["/usr/sbin/debugfs", "-R", f"testi {path}", partition_img_path], check=True, capture_output=True
).stdout
return b"marked in use" in debugfs_output


def main():
parser = argparse.ArgumentParser(
description="Check files for allowed sources")
parser.add_argument('-f', '--files', required=True,
help="Comma-separated list of files to check")
parser.add_argument('-i', '--image', required=True,
help="Path to partition image")
parser = argparse.ArgumentParser(description="Check files for allowed sources")
parser.add_argument("-f", "--files", required=True, help="Comma-separated list of files to check")
parser.add_argument("-i", "--image", required=True, help="Path to partition image")
args = parser.parse_args()

files = args.files.split(',')
files = args.files.split(",")

tmpdir = tempfile.mkdtemp(prefix="icosbuild")
atexit.register(lambda: subprocess.run(["rm", "-rf", tmpdir], check=True))
partition_tar_path = os.path.join(tmpdir, "partition.tar")
subprocess.run(["zstd", "-q", "--threads=0", "-f", "-d", args.image, "-o",
partition_tar_path], check=True)
subprocess.run(["zstd", "-q", "--threads=0", "-f", "-d", args.image, "-o", partition_tar_path], check=True)

with tarfile.open(partition_tar_path) as tar:
partition_img = next(
(item for item in tar if item.path == "partition.img"), None)
partition_img = next((item for item in tar if item.path == "partition.img"), None)
if not partition_img:
return "partition.img not found in input image"
tar.extract(partition_img, path=tmpdir)

partition_img_path = os.path.join(tmpdir, "partition.img")

errors = list(itertools.chain.from_iterable(
check_paths_in_source(source, partition_img_path) for source in files))
errors = list(itertools.chain.from_iterable(check_paths_in_source(source, partition_img_path) for source in files))
if errors:
return "\nThe following problems were found:\n" + "\n".join(errors)


if __name__ == '__main__':
if __name__ == "__main__":
sys.exit(main())
Loading
Loading