From fcbbc960f43137aa170b78ba0be2d89aec3bc766 Mon Sep 17 00:00:00 2001 From: ericmarin Date: Mon, 13 Apr 2026 19:42:39 +0200 Subject: New ONNX ops and tests New ops: Slice, Squeeze, Unsqueeze New tests based on papers: - Wide-to-Deep, Deep-to-Wide Transformation - Pruining of stably inactive (always negative) and active (always positive) ReLUs --- examples/mnist/mnist.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'examples/mnist/mnist.py') diff --git a/examples/mnist/mnist.py b/examples/mnist/mnist.py index 0a81878..a1706be 100644 --- a/examples/mnist/mnist.py +++ b/examples/mnist/mnist.py @@ -24,7 +24,7 @@ def train_model(name: str, dim): optimizer = torch.optim.Adam(net.parameters(), lr=0.5e-4) print(f"Training {name} ({dim} neurons)...") - for epoch in range(100): + for epoch in range(10): global loss for data in trainloader: inputs, targets = data @@ -33,8 +33,7 @@ def train_model(name: str, dim): loss = loss_fn(outputs, targets) loss.backward() optimizer.step() - if (epoch + 1) % 10 == 0: - print(f" Epoch {epoch+1}, Loss: {loss.item():.4f}") + print(f" Epoch {epoch+1}, Loss: {loss.item():.4f}") return net if __name__ == "__main__": -- cgit v1.2.3