Skip to content

Commit

Permalink
increase test coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
quintenroets committed Apr 13, 2024
1 parent bccd449 commit f128d7f
Show file tree
Hide file tree
Showing 39 changed files with 83 additions and 264 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Revnets
![Python version](https://img.shields.io/badge/python-3.10+-brightgreen)
![Operating system](https://img.shields.io/badge/os-linux%20%7c%20macOS-brightgreen)
![Coverage](https://img.shields.io/badge/coverage-92%25-brightgreen)
![Coverage](https://img.shields.io/badge/coverage-100%25-brightgreen)

Reverse engineer internal parameters of black box neural networks

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ command_line = "-m pytest"

[tool.coverage.report]
precision = 4
fail_under = 80
fail_under = 100

[tool.mypy]
strict = true
Expand Down
22 changes: 3 additions & 19 deletions src/revnets/context/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,31 +3,15 @@
import torch
from package_utils.context import Context as Context_

from ..models import Config, HyperParameters, Options, Path
from ..models import Config, Options, Path


class Context(Context_[Options, Config, None]):
@property
def training(self) -> HyperParameters:
return (
self.config.reconstruction_training_debug
if self.config.debug
else self.config.reconstruction_training
)

@property
def number_of_epochs(self) -> int:
return self.training.epochs

@property
def batch_size(self) -> int:
return self.training.batch_size

@cached_property
def is_running_in_notebook(self) -> bool:
try:
get_ipython() # type: ignore[name-defined]
is_in_notebook = True
is_in_notebook = True # pragma: nocover
except NameError:
is_in_notebook = False
return is_in_notebook
Expand Down Expand Up @@ -56,7 +40,7 @@ def device(self) -> torch.device:

@property
def dtype(self) -> torch.dtype:
match self.config.precision:
match self.config.precision: # pragma: nocover
case 32:
dtype = torch.float32
case 64:
Expand Down
16 changes: 14 additions & 2 deletions src/revnets/data/mnist1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@

import pickle
from dataclasses import dataclass
from typing import cast

import numpy as np
import requests
import torch
from numpy.typing import NDArray
from package_utils.dataclasses import SerializationMixin
from simple_classproperty import classproperty
from sklearn.preprocessing import StandardScaler
from torch.utils.data import TensorDataset

Expand Down Expand Up @@ -46,8 +48,18 @@ def extract_test(self) -> TensorDataset:

@dataclass
class DataModule(base.DataModule):
path: Path = Path.data / "mnist_1D"
raw_path: Path = Path.data / "mnist_1D.pkl"
@classmethod
@classproperty
def path(cls) -> Path:
path = Path.data / "mnist_1D"
return cast(Path, path)

@classmethod
@classproperty
def raw_path(cls) -> Path:
path = Path.data / "mnist_1D.pkl"
return cast(Path, path)

download_url: str = (
"https://github.com/greydanus/mnist1d/raw/master/mnist1d_data.pkl"
)
Expand Down
2 changes: 1 addition & 1 deletion src/revnets/evaluations/analysis/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def visualize_model_outputs(self, model: Module, name: str) -> None:
inputs = Reconstructor(self.pipeline).create_queries(self.n_inputs)
outputs = QueryDataSet(model).compute_targets(inputs)
if self.activation:
outputs = F.relu(outputs)
outputs = F.relu(outputs) # pragma: nocover
ActivationsVisualizer(outputs, name).run()


Expand Down
4 changes: 3 additions & 1 deletion src/revnets/evaluations/attack/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,10 @@ def show_comparison(
) -> None:
length = len(inputs[0])
indices = np.flip(np.arange(length))
inputs_numpy = inputs.cpu().numpy()[:10]
adversarial_inputs_numpy = adversarial_inputs.cpu().numpy()

for image, adversarial in zip(inputs, adversarial_inputs):
for image, adversarial in zip(inputs_numpy, adversarial_inputs_numpy):
plt.plot(image, indices, color="green", label="original")
plt.plot(adversarial, indices, color="red", label="adversarial")
plt.legend()
Expand Down
2 changes: 1 addition & 1 deletion src/revnets/evaluations/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ def format_evaluation(
return result

def evaluate(self) -> Any:
raise NotImplementedError
raise NotImplementedError # pragma: nocover
12 changes: 7 additions & 5 deletions src/revnets/evaluations/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
from dataclasses import asdict, dataclass, fields
from typing import TYPE_CHECKING

import cli

if TYPE_CHECKING:
from ..utils.table import Table
from rich.table import Table # pragma: nocover


@dataclass
Expand Down Expand Up @@ -40,10 +42,9 @@ def dict(self) -> dict[str, str]:
def values(self) -> Iterator[str]:
yield from self.dict().values()

@property
def table(self) -> Table:
def create_table(self) -> Table:
# slow import
from ..utils.table import Table
from rich.table import Table

table = Table(show_lines=True)
table.add_column("Metric", style="cyan", max_width=20, overflow="fold")
Expand All @@ -54,4 +55,5 @@ def table(self) -> Table:
return table

def show(self) -> None:
self.table.show()
table = self.create_table()
cli.console.print(table)
2 changes: 1 addition & 1 deletion src/revnets/evaluations/weights/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from . import layers_mae, mae, max_ae, mse, named_layers_mae, visualizer
from . import layers_mae, mae, max_ae, mse, named_layers_mae
2 changes: 1 addition & 1 deletion src/revnets/evaluations/weights/mse.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def standardize_networks(self) -> bool:
standardized = self.has_same_architecture()
if standardized:
if context.config.evaluation.use_align:
align(self.original, self.reconstruction)
align(self.original, self.reconstruction) # pragma: nocover
else:
for network in (self.original, self.reconstruction):
Standardizer(network).standardize_scale()
Expand Down
2 changes: 1 addition & 1 deletion src/revnets/evaluations/weights/named_layers_mae.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,5 +30,5 @@ def format_evaluation(cls, value: dict[str, float], precision: int = 3) -> str:
}
formatted_value = json.dumps(values, indent=4)
else:
formatted_value = "/"
formatted_value = "/" # pragma: nocover
return formatted_value
9 changes: 7 additions & 2 deletions src/revnets/evaluations/weights/standardize/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,15 @@
from revnets.models import InternalNeurons

from . import order, scale
from .utils import extract_linear_layer_weights

T = TypeVar("T")


@dataclass
class Standardizer:
model: Module
optimize_mae: bool = False

def run(self) -> None:
"""
Expand All @@ -23,12 +25,14 @@ def run(self) -> None:
self.standardize_scale()
for neurons in self.internal_neurons:
order.Standardizer(neurons).run()
if self.optimize_mae:
self.apply_optimize_mae()

def standardize_scale(self) -> None:
for neurons in self.internal_neurons:
scale.Standardizer(neurons).run()

def optimize_mae(self) -> None:
def apply_optimize_mae(self) -> None:
# optimize mae by distributing last layer scale factor over all layers
if all(neuron.has_norm_isomorphism for neuron in self.internal_neurons):
desired_scale = self.calculate_average_scale_per_layer()
Expand All @@ -37,7 +41,8 @@ def optimize_mae(self) -> None:
scale.Standardizer(neurons).run()

def calculate_average_scale_per_layer(self) -> float:
last_neuron_scales = self.internal_neurons[-1].outgoing.norm(dim=1, p=2)
weights = extract_linear_layer_weights(self.internal_neurons[-1].outgoing)
last_neuron_scales = weights.norm(dim=1, p=2)
last_neuron_scale = sum(last_neuron_scales) / len(last_neuron_scales)
num_internal_layers = len(self.internal_neurons)
average_scale = last_neuron_scale ** (1 / num_internal_layers)
Expand Down
85 changes: 0 additions & 85 deletions src/revnets/evaluations/weights/visualizer.py

This file was deleted.

2 changes: 1 addition & 1 deletion src/revnets/models/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .config import Activation, Config, HyperParameters
from .config import Activation, Config, Evaluation, HyperParameters
from .experiment import Experiment
from .internal_neurons import InternalNeurons
from .options import Options
Expand Down
4 changes: 0 additions & 4 deletions src/revnets/models/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,6 @@ def names(self) -> tuple[str, ...]:
pipeline = "_".join(self.pipeline)
return (reconstruction, pipeline, seeds)

@property
def name(self) -> str:
return "_".join(self.names)

@property
def title(self) -> str:
parts = self.generate_title_parts()
Expand Down
6 changes: 0 additions & 6 deletions src/revnets/models/path.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,6 @@ def weights(cls: type[T]) -> T:
path = cls.assets / "weights"
return typing.cast(T, path)

@classmethod
@classproperty
def outputs(cls: type[T]) -> T:
path = cls.assets / "outputs"
return typing.cast(T, path)

@classmethod
@classproperty
def results(cls: type[T]) -> T:
Expand Down
12 changes: 0 additions & 12 deletions src/revnets/models/split.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,3 @@ class Split(Enum):
valid = "valid"
test = "test"
train_val = "train_val"

@property
def is_train(self) -> bool:
return self == Split.train

@property
def is_valid(self) -> bool:
return self == Split.valid

@property
def is_train_or_valid(self) -> bool:
return self.is_train or self.is_valid
6 changes: 1 addition & 5 deletions src/revnets/networks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,4 @@ def create_network(self, seed: int | None = None) -> Sequential:
return Sequential(*layers)

def create_layers(self) -> Iterable[torch.nn.Module]:
raise NotImplementedError

@classmethod
def get_base_name(cls) -> str:
return NetworkFactory.__module__
raise NotImplementedError # pragma: nocover
4 changes: 2 additions & 2 deletions src/revnets/pipelines/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@

class Pipeline(NamedClass):
def create_target_network(self) -> Sequential:
raise NotImplementedError
raise NotImplementedError # pragma: nocover

def create_initialized_network(self) -> Sequential:
raise NotImplementedError
raise NotImplementedError # pragma: nocover

@classmethod
def get_base_name(cls) -> str:
Expand Down
2 changes: 1 addition & 1 deletion src/revnets/pipelines/mininet/mininet_untrained.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@

class Pipeline(mininet.Pipeline):
def train(self, model: torch.nn.Module) -> None:
pass
pass # pragma: nocover
Loading

0 comments on commit f128d7f

Please sign in to comment.