aboutsummaryrefslogtreecommitdiff
path: root/xor/xor.py
diff options
context:
space:
mode:
authorericmarin <maarin.eric@gmail.com>2026-03-21 11:47:40 +0100
committerericmarin <maarin.eric@gmail.com>2026-03-21 12:00:16 +0100
commite2abe9d9ec649b849cc39b516c1db1b4fa592003 (patch)
treed74dcc2e0691bb587d2a9a695639517d3aec9256 /xor/xor.py
parentaf4335cf47984576e7493a0eb6569d3f6ecc31c8 (diff)
downloadvein-e2abe9d9ec649b849cc39b516c1db1b4fa592003.tar.gz
vein-e2abe9d9ec649b849cc39b516c1db1b4fa592003.zip
created class
Diffstat (limited to 'xor/xor.py')
-rw-r--r--xor/xor.py40
1 files changed, 40 insertions, 0 deletions
diff --git a/xor/xor.py b/xor/xor.py
new file mode 100644
index 0000000..ebc5477
--- /dev/null
+++ b/xor/xor.py
@@ -0,0 +1,40 @@
+import torch
+import torch.nn as nn
+import torch.onnx
+
+class xor_mlp(nn.Module):
+ def __init__(self, hidden_dim):
+ super().__init__()
+ self.layers = nn.Sequential(
+ nn.Linear(2, hidden_dim),
+ nn.ReLU(),
+ nn.Linear(hidden_dim, 1)
+ )
+ def forward(self, x):
+ return self.layers(x)
+
+def train_model(name: str, dim):
+ X = torch.tensor([[0,0], [0,1], [1,0], [1,1]], dtype=torch.float32)
+ Y = torch.tensor([[0], [1], [1], [0]], dtype=torch.float32)
+
+ net = xor_mlp(hidden_dim=dim)
+ loss_fn = nn.MSELoss()
+ optimizer = torch.optim.Adam(net.parameters(), lr=0.1)
+
+ print(f"Training {name}...")
+ for epoch in range(1000):
+ optimizer.zero_grad()
+ out = net(X)
+ loss = loss_fn(out, Y)
+ loss.backward()
+ optimizer.step()
+ if (epoch+1) % 100 == 0:
+ print(f" Epoch {epoch+1}, Loss: {loss.item():.4f}")
+ return net
+
+if __name__ == "__main__":
+ torch_net_a = train_model("Network A", 8).eval()
+ torch_net_b = train_model("Network B", 16).eval()
+
+ torch.onnx.export(torch_net_a, (torch.randn(1, 2),), "xor_a.onnx")
+ torch.onnx.export(torch_net_b, (torch.randn(1, 2),), "xor_b.onnx")