Skip to content

Commit

Permalink
⚡ 🔥
Browse files Browse the repository at this point in the history
Correct duplicated scaling (scale only once)
Correct kld_warmup in training_loop.py:
   - kld_w as a fraction of beta, no dependence on num_latent
  • Loading branch information
mpielies committed Aug 7, 2024
1 parent 7045e57 commit 30bee60
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 10 deletions.
6 changes: 0 additions & 6 deletions src/move/tasks/encode_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,12 +59,6 @@ def encode_data(config: DataConfig):
)
fig.savefig(fig_path)

values, mask_1d = preprocessing.scale(values)
names = names[mask_1d]
logger.debug(f"Columns with zero variance: {np.sum(~mask_1d)}")
io.dump_names(interim_data_path / f"{dataset_name}.txt", names)
np.save(interim_data_path / f"{dataset_name}.npy", values)

# Plotting the value distribution for all continuous datasets:
fig = plot_value_distributions(values)
fig_path = str(output_path / f"Value_distribution_{dataset_name}.png")
Expand Down
6 changes: 2 additions & 4 deletions src/move/training/training_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,13 +72,11 @@ def training_loop(
counter = 0

kld_weight = 0.0
kld_rate = 20 / len(kld_warmup_steps)
kld_multiplier = 1 + kld_rate


for epoch in range(1, num_epochs + 1):
if epoch in kld_warmup_steps:
kld_weight = 0.05 * kld_multiplier
kld_multiplier += kld_rate
kld_weight += 1 / len(kld_warmup_steps)

if epoch in batch_dilation_steps:
train_dataloader = dilate_batch(train_dataloader)
Expand Down

0 comments on commit 30bee60

Please sign in to comment.