From b34f1f477d427eb1395af8146330b1c7a89ddd5c Mon Sep 17 00:00:00 2001 From: jloveric Date: Mon, 18 Dec 2023 16:13:11 -0800 Subject: [PATCH] Upgrade dependencies --- examples/invariant_mnist.py | 2 +- examples/mnist.py | 2 +- pyproject.toml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/invariant_mnist.py b/examples/invariant_mnist.py index 0b010d3..63be539 100644 --- a/examples/invariant_mnist.py +++ b/examples/invariant_mnist.py @@ -124,7 +124,7 @@ def eval_step(self, batch, batch_idx, name): logits = self(x_new) loss = F.cross_entropy(logits, y) preds = torch.argmax(logits, dim=1) - acc = accuracy(preds, y) + acc = accuracy(preds, y, task='multiclass',num_classes=10) self.log(f"{name}_loss", loss, prog_bar=True) self.log(f"{name}_acc", acc, prog_bar=True) diff --git a/examples/mnist.py b/examples/mnist.py index 4e8d6d6..5d84fd6 100644 --- a/examples/mnist.py +++ b/examples/mnist.py @@ -160,7 +160,7 @@ def eval_step(self, batch, batch_idx, name): logits = self(x) loss = F.cross_entropy(logits, y) preds = torch.argmax(logits, dim=1) - acc = accuracy(preds, y) + acc = accuracy(preds, y, task='multiclass',num_classes=10) # Calling self.log will surface up scalars for you in TensorBoard self.log(f"{name}_loss", loss, prog_bar=True) diff --git a/pyproject.toml b/pyproject.toml index c9adbb6..0298469 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "high-order-layers-torch" -version = "2.2.4" +version = "2.2.5" description = "High order layers in pytorch" authors = ["jloverich "] license = "MIT" @@ -10,11 +10,11 @@ readme = "README.md" python = ">=3.9" torch-optimizer = "^0.3.0" hydra-core = "^1.2.0" -torchmetrics = "^0.10.2" pytorch-lightning = "^2.0.0" torch = ">=2.1.1" torchvision = "^0.16.1" lion-pytorch = "^0.1.2" +torchmetrics = "^1.2.1" [tool.poetry.dev-dependencies] coverage = "^6.3.3"