aboutsummaryrefslogtreecommitdiff
path: root/examples/mnist/mnist.py
diff options
context:
space:
mode:
authorericmarin <maarin.eric@gmail.com>2026-04-13 19:42:39 +0200
committerericmarin <maarin.eric@gmail.com>2026-04-13 21:38:16 +0200
commitfcbbc960f43137aa170b78ba0be2d89aec3bc766 (patch)
tree15e0249bf429888d9b64f19eb0c6e2d9af0901e4 /examples/mnist/mnist.py
parent8f4f24523235965cfa2041ed00cc40fc0b4bd367 (diff)
downloadvein-master.tar.gz
vein-master.zip
New ONNX ops and testsHEADmaster
New ops: Slice, Squeeze, Unsqueeze New tests based on papers: - Wide-to-Deep, Deep-to-Wide Transformation - Pruining of stably inactive (always negative) and active (always positive) ReLUs
Diffstat (limited to '')
-rw-r--r--examples/mnist/mnist.py5
1 files changed, 2 insertions, 3 deletions
diff --git a/examples/mnist/mnist.py b/examples/mnist/mnist.py
index 0a81878..a1706be 100644
--- a/examples/mnist/mnist.py
+++ b/examples/mnist/mnist.py
@@ -24,7 +24,7 @@ def train_model(name: str, dim):
optimizer = torch.optim.Adam(net.parameters(), lr=0.5e-4)
print(f"Training {name} ({dim} neurons)...")
- for epoch in range(100):
+ for epoch in range(10):
global loss
for data in trainloader:
inputs, targets = data
@@ -33,8 +33,7 @@ def train_model(name: str, dim):
loss = loss_fn(outputs, targets)
loss.backward()
optimizer.step()
- if (epoch + 1) % 10 == 0:
- print(f" Epoch {epoch+1}, Loss: {loss.item():.4f}")
+ print(f" Epoch {epoch+1}, Loss: {loss.item():.4f}")
return net
if __name__ == "__main__":