aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorericmarin <maarin.eric@gmail.com>2026-03-18 16:43:01 +0100
committerericmarin <maarin.eric@gmail.com>2026-03-18 17:47:15 +0100
commitaf4335cf47984576e7493a0eb6569d3f6ecc31c8 (patch)
tree31f6876cf3b4a14ea2bcb167fa301fcec69c02a5
parent0fca69965786db7deee2e976551b5156531e8ed5 (diff)
downloadvein-af4335cf47984576e7493a0eb6569d3f6ecc31c8.tar.gz
vein-af4335cf47984576e7493a0eb6569d3f6ecc31c8.zip
changed eval() for a proper parser. Now it also handles multiple outputs
-rw-r--r--graph8
-rw-r--r--nneq/nneq.py40
-rw-r--r--notes.norg4
3 files changed, 46 insertions, 6 deletions
diff --git a/graph b/graph
new file mode 100644
index 0000000..c8b3a35
--- /dev/null
+++ b/graph
@@ -0,0 +1,8 @@
+Dup(Dup(Mul(v4, Concrete(-0.5899816751480103)), Mul(v6, Concrete(-0.6716732978820801))), Dup(Mul(v8, Concrete(1.088428258895874)), Mul(v10, Concrete(-1.7469627857208252)))) ~ Linear(Symbolic(X_0), 1.0, 0.0);
+Dup(Dup(Mul(v5, Concrete(-0.6210991740226746)), Mul(v7, Concrete(0.5620369911193848))), Dup(Mul(v9, Concrete(-1.8472174406051636)), Mul(v11, Concrete(1.2568973302841187)))) ~ Linear(Symbolic(X_1), 1.0, 0.0);
+Add(Add(ReLU(Mul(v3, Concrete(1.032384991645813))), v10), v11) ~ Concrete(-0.2882664203643799);
+Add(Add(ReLU(Mul(v2, Concrete(1.596307396888733))), v8), v9) ~ Concrete(-0.46198248863220215);
+Add(Add(ReLU(Mul(v1, Concrete(0.19464343786239624))), v6), v7) ~ Concrete(-0.5869930386543274);
+Add(Add(ReLU(Mul(v0, Concrete(0.5202669501304626))), v4), v5) ~ Concrete(-0.599494457244873);
+Add(Add(Add(Add(Materialize(result0), v0), v1), v2), v3) ~ Concrete(-6.825084134731065e-24);
+result0;
diff --git a/nneq/nneq.py b/nneq/nneq.py
index 4f46cbf..3f2106b 100644
--- a/nneq/nneq.py
+++ b/nneq/nneq.py
@@ -3,6 +3,7 @@ import re
import numpy as np
import subprocess
import onnx
+import ast
from onnx import numpy_helper
from typing import List, Dict
@@ -113,9 +114,15 @@ def inpla_export(model: onnx.ModelProto) -> inpla_str:
interactions[in_name][i].append(f"ReLU({nest_dups(interactions[out_name][i])})")
yield from []
+ def op_flatten(node):
+ out_name, in_name = node.output[0], node.input[0]
+ if out_name in interactions:
+ interactions[in_name] = interactions[out_name]
+ yield from []
+
graph, initializers, name_gen = model.graph, get_initializers(model.graph), NameGen()
interactions: Dict[str, List[List[str]]] = {}
- ops = {"Gemm": op_gemm, "Relu": op_relu}
+ ops = {"Gemm": op_gemm, "Relu": op_relu, "Flatten": op_flatten}
if graph.output:
out = graph.output[0].name
@@ -159,14 +166,39 @@ context = {
wrap = re.compile(r"Symbolic\((.*?)\)")
+
def z3_evaluate(model: z3_str):
- model = wrap.sub(r'Symbolic("\1")', model);
- return eval(model, context)
+ model = wrap.sub(r'Symbolic("\1")', model)
+
+ def evaluate_node(node: ast.AST):
+ if isinstance(node, ast.Expression):
+ return evaluate_node(node.body)
+ if isinstance(node, ast.Call):
+ if not isinstance(node.func, ast.Name):
+ raise ValueError(f"Unsupported function call type: {type(node.func)}")
+ func_name = node.func.id
+ func = context.get(func_name)
+ if not func:
+ raise ValueError(f"Unknown function: {func_name}")
+ return func(*[evaluate_node(arg) for arg in node.args])
+ if isinstance(node, ast.Constant):
+ return node.value
+ if isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.USub):
+ val = evaluate_node(node.operand)
+ if hasattr(val, "__neg__"):
+ return -val
+ raise ValueError(f"Value does not support negation: {type(val)}")
+ raise ValueError(f"Unsupported AST node: {type(node)}")
+
+ lines = [line.strip() for line in model.splitlines() if line.strip()]
+ exprs = [evaluate_node(ast.parse(line, mode='eval')) for line in lines]
+
+ if not exprs: return None
+ return exprs[0] if len(exprs) == 1 else exprs
def net(model: onnx.ModelProto):
return z3_evaluate(inpla_run(inpla_export(model)))
-
def strict_equivalence(net_a, net_b):
solver = z3.Solver()
diff --git a/notes.norg b/notes.norg
index 4471531..96c0d6a 100644
--- a/notes.norg
+++ b/notes.norg
@@ -4,12 +4,12 @@ description: WIP tool to prove NNEQ using Interaction Nets as pre-processor fo m
authors: ericmarin
categories: research
created: 2026-03-14T09:21:24
-updated: 2026-03-18T13:18:15
+updated: 2026-03-18T17:46:56
version: 1.1.1
@end
* TODO
- - (?) Scalability %Maybe done? I have increased the limits of Inpla, but I have yet to test%
+ - (!) Scalability: %Impossible with Inpla, I would need to implement my own engine%
- (x) Soundness of translated NN: {:proof.norg:}[PROOF]
- ( ) Compatibility with other types of NN
- ( ) Comparison with other tool ({https://github.com/NeuralNetworkVerification/Marabou}[Marabou], {https://github.com/guykatzz/ReluplexCav2017}[Reluplex])