From 9fb816496d392638fa6981e71800466d71434680 Mon Sep 17 00:00:00 2001 From: ericmarin Date: Wed, 8 Apr 2026 14:58:51 +0200 Subject: changed name --- .gitignore | 8 - LICENSE | 2 +- README.md | 6 +- docs/proof.md | 562 +++++++++++++++++++++++++++++++++++++++++++++++++++++ examples/verify.py | 4 +- nneq.py | 394 ------------------------------------- proof.md | 562 ----------------------------------------------------- proof.norg | 513 ------------------------------------------------ vein.py | 395 +++++++++++++++++++++++++++++++++++++ 9 files changed, 963 insertions(+), 1483 deletions(-) create mode 100644 docs/proof.md delete mode 100644 nneq.py delete mode 100644 proof.md delete mode 100644 proof.norg create mode 100644 vein.py diff --git a/.gitignore b/.gitignore index fd38b1c..6d0d093 100644 --- a/.gitignore +++ b/.gitignore @@ -5,18 +5,10 @@ /examples/fashion_mnist/fashion_mnist_a.onnx.data /examples/fashion_mnist/fashion_mnist_b.onnx /examples/fashion_mnist/fashion_mnist_b.onnx.data -/examples/fashion_mnist/script_argmax.fish -/examples/fashion_mnist/script_epsilon.fish -/examples/fashion_mnist/script_strict.fish /examples/xor/xor_a.onnx /examples/xor/xor_a.onnx.data /examples/xor/xor_b.onnx /examples/xor/xor_b.onnx.data -/examples/fashion_mnist/FashionMNIST/raw -/notes.norg -/examples/ACASXU/script_argmax.fish -/examples/ACASXU/script_epsilon.fish -/examples/ACASXU/script_strict.fish /examples/iris/iris_b.onnx /examples/iris/iris_b.onnx.data /examples/iris/iris_a.onnx.data diff --git a/LICENSE b/LICENSE index 0307b44..16d9c93 100644 --- a/LICENSE +++ b/LICENSE @@ -629,7 +629,7 @@ to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. - Neural Network Equivalence (NNEQ) + VErification via Interaction Nets (VEIN) Copyright (C) 2026 Eric Marin This program is free software: you can redistribute it and/or modify diff --git a/README.md b/README.md index 3f9c16d..be2b25e 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ -# Neural Network Equivalence (NNEQ) +# VEIN: VErification via Interaction Nets Requires my [fork of Inpla](https://github.com/eric-marin/inpla). - Clone the fork - **make** the executable -- Copy the **inpla** executable in NNEQ/ +- Copy the **inpla** executable in **vein/** -[Soundness proof](./proof.md) +[Soundness proof](./docs/proof.md) ## License diff --git a/docs/proof.md b/docs/proof.md new file mode 100644 index 0000000..07717f9 --- /dev/null +++ b/docs/proof.md @@ -0,0 +1,562 @@ +# Soundness Proof +## Mathematical Definitions +![Linear(x, q, r) ~ out => out = q*x + r](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;out\Rightarrow&space;out=q*x+r&space;) +![Concrete(k) ~ out => out = ](https://latex.codecogs.com/svg.image?Concrete(k)\sim&space;out\Rightarrow&space;out=k&space;) +![Add(out, b) ~ a => out = a + b](https://latex.codecogs.com/svg.image?Add(out,b)\sim&space;a\Rightarrow&space;out=a+b&space;) +![AddCheckLinear(out, x, q, r) ~ b => out = q*x + (r + b)](https://latex.codecogs.com/svg.image?AddCheckLinear(out,x,q,r)\sim&space;b\Rightarrow&space;out=q*x+(r+b)) +![AddCheckConcrete(out, k) ~ b => out = k + b](https://latex.codecogs.com/svg.image?AddCheckConcrete(out,k)\sim&space;b\Rightarrow&space;out=k+b&space;) +![Mul(out, b) ~ a => out = a * b](https://latex.codecogs.com/svg.image?Mul(out,b)\sim&space;a\Rightarrow&space;out=a*b&space;) +![MulCheckLinear(out, x, q, r) ~ b => out = q*b*x + r*b](https://latex.codecogs.com/svg.image?MulCheckLinear(out,x,q,r)\sim&space;b\Rightarrow&space;out=q*b*x+r*b&space;) +![MulCheckConcrete(out, k) ~ b => out = k*b](https://latex.codecogs.com/svg.image?MulCheckConcrete(out,k)\sim&space;b\Rightarrow&space;out=k*b&space;) +![ReLU(out) ~ x => out = IF (x > 0) THEN x ELSE 0](https://latex.codecogs.com/svg.image?ReLU(out)\sim&space;x\Rightarrow&space;out=\text{IF}\;(x>0)\;\text{THEN}\;x\;\text{ELSE}\;0) +![Materialize(out) ~ x => out = x](https://latex.codecogs.com/svg.image?Materialize(out)\sim&space;x\Rightarrow&space;out=x&space;) + +## Soundness of Translation +### ReLU +ONNX ReLU node is defined as: +![Y = X if X > 0 else 0](https://latex.codecogs.com/svg.image?Y=X\;if\;X>0\;else\;0&space;) + +The translation defines the interactions: +![x_i ~ ReLU(y_i)](https://latex.codecogs.com/svg.image?x_i\sim&space;ReLU(y_i)) + +By definition this interaction is equal to: +![y_i = IF (x_i > 0) THEN x_i ELSE 0](https://latex.codecogs.com/svg.image?y_i=\text{IF}\;(x_i>0)\;\text{THEN}\;x_i\;\text{ELSE}\;0) + +### Gemm +ONNX Gemm node is defined as: +![Y = alpha * A * B + beta * C](https://latex.codecogs.com/svg.image?Y=\alpha\cdot&space;A\cdot&space;B+\beta\cdot&space;C&space;) + +The translation defines the interactions: +![a_i ~ Mul(v_i, Concrete(alpha * b_i))](https://latex.codecogs.com/svg.image?a_i\sim&space;Mul(v_i,Concrete(\alpha*b_i))) +![Add(...(Add(y_i, v_1), ...), v_n) ~ Concrete(beta * c_i)](https://latex.codecogs.com/svg.image?Add(...(Add(y_i,v_1),...),v_n)\sim&space;Concrete(\beta*c_i)) + +By definition this interaction is equal to: +![v_i = alpha * a_i * b_i](https://latex.codecogs.com/svg.image?v_i=\alpha*a_i*b_i&space;) +![y_i = v_1 + v_2 + ... + v_n + beta * c_i](https://latex.codecogs.com/svg.image?y_i=v_1+v_2+...+v_n+\beta*c_i&space;) + +By grouping the operations we get: +![Y = alpha * A * B + beta * C](https://latex.codecogs.com/svg.image?Y=\alpha\cdot&space;A\cdot&space;B+\beta\cdot&space;C&space;) + +### Flatten +Just identity mapping because the wires are always Flatten. +![out_i ~ in_i](https://latex.codecogs.com/svg.image?out_i\sim&space;in_i) + +### MatMul +Equal to Gemm with ![alpha=1](https://latex.codecogs.com/svg.image?\inline&space;\alpha=1), ![beta=0](https://latex.codecogs.com/svg.image?\inline&space;\beta=0) and ![C=0](https://latex.codecogs.com/svg.image?\inline&space;&space;C=0). + +### Reshape +Just identity mapping because the wires always Flatten. +![out_i ~ in_i](https://latex.codecogs.com/svg.image?out_i\sim&space;in_i) + +### Add +ONNX Add node is defined as: +![C = A + B](https://latex.codecogs.com/svg.image?&space;C=A+B) + +The translation defines the interactions: +![Add(c_i, b_i) ~ a_i](https://latex.codecogs.com/svg.image?Add(c_i,b_i)\sim&space;a_i) + +By definition this interaction is equal to: +![c_i = a_i + b_i](https://latex.codecogs.com/svg.image?c_i=a_i+b_i) + +By grouping the operations we get: +![C = A + B](https://latex.codecogs.com/svg.image?C=A+B) + +### Sub +ONNX Sub node is defined as: +![C = A - B](https://latex.codecogs.com/svg.image?C=A-B) + +The translation defines the interactions: +![Add(c_i, neg_b_i) ~ a_i](https://latex.codecogs.com/svg.image?Add(c_i,neg_i)\sim&space;a_i) +![Mul(neg_b_i, Concrete(-1)) ~ b_i](https://latex.codecogs.com/svg.image?Mul(neg_i,Concrete(-1))\sim&space;b_i) + +By definition this interaction is equal to: +![c_i = a_i + neg_b_i](https://latex.codecogs.com/svg.image?c_i=a_i+neg_i) +![neg_b_i = -1 * b_i](https://latex.codecogs.com/svg.image?neg_i=-1*b_i) + +By grouping the operations we get: +![C = A - B](https://latex.codecogs.com/svg.image?C=A-B) + +## Soundness of Interaction Rules +### Materialize +The Materialize agent transforms a Linear agent into a tree of explicit mathematical operations +that are used as final representation for the solver. +In the Python module the terms are defined as: +```python +def TermAdd(a, b): + return a + b +def TermMul(a, b): + return a * b +def TermReLU(x): + return z3.If(x > 0, x, 0) +``` + +#### Linear >< Materialize +![Linear(x, q, r) >< Materialize(out) => (1), (2), (3), (4), (5)](https://latex.codecogs.com/svg.image?\inline&space;&space;Linear(x,q,r)> out ~ Concrete(r), x ~ Eraser](https://latex.codecogs.com/svg.image?q=0\Rightarrow&space;out\sim&space;Concrete(r),x\sim&space;Eraser) + +RHS: +![out = r](https://latex.codecogs.com/svg.image?out=r) + +EQUIVALENCE: +![0*x + r = r => r = r](https://latex.codecogs.com/svg.image?0*x+r=r\Rightarrow&space;r=r) + +##### Case 2: +![q = 1, r = 0 => out ~ x](https://latex.codecogs.com/svg.image?q=1,r=0\Rightarrow&space;out~x) + +RHS: +![x = out](https://latex.codecogs.com/svg.image?x=out) +![out = x](https://latex.codecogs.com/svg.image?out=x) + +EQUIVALENCE: +![1*x + 0 = x => x = x](https://latex.codecogs.com/svg.image?1*x+0=x\Rightarrow&space;x=x) + +##### Case 3: +![q = 1 => out ~ TermAdd(x, Concrete(r))](https://latex.codecogs.com/svg.image?q=1\Rightarrow&space;out\sim&space;TermAdd(x,Concrete(r))) + +RHS: +![out = x + r](https://latex.codecogs.com/svg.image?out=x+r) + +EQUIVALENCE: +![1*x + r = x + r => x + r = x + r](https://latex.codecogs.com/svg.image?1*x+r=x+r\Rightarrow&space;x+r=x+r) + +##### Case 4: +![r = 0 => out ~ TermMul(Concrete(q), x)](https://latex.codecogs.com/svg.image?r=0\Rightarrow&space;out\sim&space;TermMul(Concrete(q),x)) + +RHS: +![out = q*x](https://latex.codecogs.com/svg.image?out=q*x) + +EQUIVALENCE: +![q*x + 0 = q*x => q*x = q*x](https://latex.codecogs.com/svg.image?q*x+0=q*x\Rightarrow&space;q*x=q*x) + +##### Case 5: +![otherwise => out ~ TermAdd(TermMul(Concrete(q), x), Concrete(r))](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;out\sim&space;TermAdd(TermMul(Concrete(q),x),Concrete(r))) + +RHS: +![out = q*x + r](https://latex.codecogs.com/svg.image?out=q*x+r) + +EQUIVALENCE: +![q*x + r = q*x + r](https://latex.codecogs.com/svg.image?q*x+r=q*x+r) + +#### Concrete >< Materialize +![Concrete(k) >< Materialize(out) => out ~ Concrete(k)](https://latex.codecogs.com/svg.image?Concrete(k)>< Add +![Linear(x, q, r) >< Add(out, b) => b ~ AddCheckLinear(out, x, q, r)](https://latex.codecogs.com/svg.image?Linear(x,q,r)> q*x + (r + b) = q*x + (r + b)](https://latex.codecogs.com/svg.image?q*x+r+b=q*x+(r+b)\Rightarrow&space;q*x+(r+b)=q*x+(r+b)) + +#### Concrete >< Add +![Concrete(k) >< Add(out, b) => (1), (2)](https://latex.codecogs.com/svg.image?Concrete(k)> out ~ b](https://latex.codecogs.com/svg.image?k=0\Rightarrow&space;out\sim&space;b) + +RHS: +![out = b](https://latex.codecogs.com/svg.image?out=b) + +EQUIVALENCE: +![0 + b = b => b = b](https://latex.codecogs.com/svg.image?0+b=b\Rightarrow&space;b=b) + +##### Case 2: +![otherwise => b ~ AddCheckConcrete(out, k)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;b\sim&space;AddCheckConcrete(out,k)) + +RHS: +![out = k + b](https://latex.codecogs.com/svg.image?out=k+b) + +EQUIVALENCE: +![k + b = k + b](https://latex.codecogs.com/svg.image?k+b=k+b) + +#### Linear >< AddCheckLinear +![Linear(y, s, t) >< AddCheckLinear(out, x, q, r) => (1), (2), (3), (4)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> out ~ Concrete(0), x ~ Eraser, y ~ Eraser](https://latex.codecogs.com/svg.image?q,r,s,t=0\Rightarrow&space;out\sim&space;Concrete(0),x\sim&space;Eraser,y\sim&space;Eraser) + +RHS: +![out = 0](https://latex.codecogs.com/svg.image?out=0) + +EQUIVALENCE: +![0*x + (0 + 0*y + 0) = 0 => 0 = 0](https://latex.codecogs.com/svg.image?0*x+(0+0*y+0)=0\Rightarrow&space;0=0) + +##### Case 2: +![s,t = 0 => out ~ Linear(x, q, r), y ~ Eraser](https://latex.codecogs.com/svg.image?s,t=0\Rightarrow&space;out\sim&space;Linear(x,q,r),y\sim&space;Eraser) + +RHS: +![out = q*x + r](https://latex.codecogs.com/svg.image?out=q*x+r) + +EQUIVALENCE: +![q*x + (r + 0*y + 0) = q*x + r => q*x + r = q*x + r](https://latex.codecogs.com/svg.image?q*x+(r+0*y+0)=q*x+r\Rightarrow&space;q*x+r=q*x+r) + +##### Case 3: +![q, r = 0 => out ~ Linear(y, s, t), x ~ Eraser](https://latex.codecogs.com/svg.image?q,r=0\Rightarrow&space;out\sim&space;Linear(y,s,t),x\sim&space;Eraser) + +RHS: +![out = s*y + t](https://latex.codecogs.com/svg.image?out=s*y+t) + +EQUIVALENCE: +![0*x + (0 + s*y + t) = s*y + t => s*y + t = s*y + t](https://latex.codecogs.com/svg.image?0*x+(0+s*y+t)=s*y+t\Rightarrow&space;s*y+t=s*y+t) + +##### Case 4: +![otherwise => Linear(x, q, r) ~ Materialize(out_x), Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermAdd(out_x, out_y), 1, 0)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;Linear(x,q,r)\sim&space;Materialize(out_x),Linear(y,s,t)\sim&space;Materialize(out_y),out\sim&space;Linear(TermAdd(out_x,out_y),1,0)) + +RHS: +![Linear(x, q, r) ~ wire_1](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;wire_1) +![Materialize(out_x) ~ wire_1](https://latex.codecogs.com/svg.image?Materialize(out_x)\sim&space;wire_1) +![q*x + r = wire_1](https://latex.codecogs.com/svg.image?q*x+r=wire_1) +![out_x = wire_1](https://latex.codecogs.com/svg.image?out_x=wire_1) +![Linear(y, s, t) ~ wire_2](https://latex.codecogs.com/svg.image?Linear(y,s,t)\sim&space;wire_2) +![Materialize(out_y) ~ wire_2](https://latex.codecogs.com/svg.image?Materialize(out_y)\sim&space;wire_2) +![s*y + t = wire_2](https://latex.codecogs.com/svg.image?s*y+t=wire_2) +![out_y = wire_2](https://latex.codecogs.com/svg.image?out_y=wire_2) +![out = 1*TermAdd(out_x, out_y) + 0](https://latex.codecogs.com/svg.image?out=1*TermAdd(out_x,out_y)+0) +Because `TermAdd(a, b)` is defined as `a+b`: +![out = 1*(q*x + r + s*y + t) + 0](https://latex.codecogs.com/svg.image?out=1*(q*x+r+s*y+t)+0) + +EQUIVALENCE: +![q*x + (r + s*y + t) = 1*(q*x + r + s*y + t) + 0 => q*x + r + s*y + t = q*x + r + s*y + t](https://latex.codecogs.com/svg.image?q*x+(r+s*y+t)=1*(q*x+r+s*y+t)+0\Rightarrow&space;q*x+r+s*y+t=q*x+r+s*y+t) + +#### Concrete >< AddCheckLinear +![Concrete(j) >< AddCheckLinear(out, x, q, r) => out ~ Linear(x, q, r + j)](https://latex.codecogs.com/svg.image?Concrete(j)>< AddCheckConcrete +![Linear(y, s, t) >< AddCheckConcrete(out, k) => out ~ Linear(y, s, t + k)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> s*y + (t + k) = s*y + (t + k)](https://latex.codecogs.com/svg.image?k+s*y+t=s*y+(t+k)\Rightarrow&space;s*y+(t+k)=s*y+(t+k)) + +#### Concrete >< AddCheckConcrete +![Concrete(j) >< AddCheckConcrete(out, k) => (1), (2)](https://latex.codecogs.com/svg.image?Concrete(j)> out ~ Concrete(k)](https://latex.codecogs.com/svg.image?j=0\Rightarrow&space;out\sim&space;Concrete(k)) + +RHS: +![out = k](https://latex.codecogs.com/svg.image?out=k) + +EQUIVALENCE: +![k + 0 = k => k = k](https://latex.codecogs.com/svg.image?k+0=k\Rightarrow&space;k=k) + +##### Case 2: +![otherwise => out ~ Concrete(k + j)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;out\sim&space;Concrete(k+j)) + +RHS: +![out = k + j](https://latex.codecogs.com/svg.image?out=k+j) + +EQUIVALENCE: +![k + j = k + j](https://latex.codecogs.com/svg.image?k+j=k+j) + +### Mul +#### Linear >< Mul +![Linear(x, q, r) >< Mul(out, b) => b ~ MulCheckLinear(out, x, q, r)](https://latex.codecogs.com/svg.image?Linear(x,q,r)> q*b*x + r*b = q*b*x + r*b](https://latex.codecogs.com/svg.image?(q*x+r)*b=q*b*x+r*b\Rightarrow&space;q*b*x+r*b=q*b*x+r*b) + +#### Concrete >< Mul +![Concrete(k) >< Mul(out, b) => (1), (2), (3)](https://latex.codecogs.com/svg.image?Concrete(k)> out ~ Concrete(0), b ~ Eraser](https://latex.codecogs.com/svg.image?k=0\Rightarrow&space;out\sim&space;Concrete(0),b\sim&space;Eraser) + +RHS: +![out = 0](https://latex.codecogs.com/svg.image?out=0) + +EQUIVALENCE: +![0 * b = 0 => 0 = 0](https://latex.codecogs.com/svg.image?0*b=0\Rightarrow&space;0=0) + +##### Case 2: +![k = 1 => out ~ b](https://latex.codecogs.com/svg.image?k=1\Rightarrow&space;out\sim&space;b) + +RHS: +![out = b](https://latex.codecogs.com/svg.image?out=b) + +EQUIVALENCE: +![1 * b = b => b = b](https://latex.codecogs.com/svg.image?1*b=b\Rightarrow&space;b=b) + +##### Case 3: +![otherwise => b ~ MulCheckConcrete(out, k)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;b\sim&space;MulCheckConcrete(out,k)) + +RHS: +![out = k * b](https://latex.codecogs.com/svg.image?out=k*b) + +EQUIVALENCE: +![k * b = k * b](https://latex.codecogs.com/svg.image?k*b=k*b) + +#### Linear >< MulCheckLinear +![Linear(y, s, t) >< MulCheckLinear(out, x, q, r) => (1), (2)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> x ~ Eraser, y ~ Eraser, out ~ Concrete(0)](https://latex.codecogs.com/svg.image?(q,r=0)\lor(s,t=0)\Rightarrow&space;x\sim&space;Eraser,y\sim&space;Eraser,out\sim&space;Concrete(0)) + +RHS: +![out = 0](https://latex.codecogs.com/svg.image?out=0) + +EQUIVALENCE: +![0*(s*y + t)*x + 0*(s*y + t) = 0 => 0 = 0](https://latex.codecogs.com/svg.image?0*(s*y+t)*x+0*(s*y+t)=0\Rightarrow&space;0=0) +![or](https://latex.codecogs.com/svg.image?\lor) +![q*(0*y + 0)*x + r*(0*y + 0) = 0 => 0 = 0](https://latex.codecogs.com/svg.image?q*(0*y+0)*x+r*(0*y+0)=0\Rightarrow&space;0=0) + +##### Case 2: +![otherwise => Linear(x, q, r) ~ Materialize(out_x), Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermMul(out_x, out_y), 1, 0)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;Linear(x,q,r)\sim&space;Materialize(out_x),Linear(y,s,t)\sim&space;Materialize(out_y),out\sim&space;Linear(TermMul(out_x,out_y),1,0)) + +RHS: +![Linear(x, q, r) ~ wire_1](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;wire_1) +![Materialize(out_x) ~ wire_1](https://latex.codecogs.com/svg.image?Materialize(out_x)\sim&space;wire_1) +![q*x + r = wire_1](https://latex.codecogs.com/svg.image?q*x+r=wire_1) +![out_x = wire_1](https://latex.codecogs.com/svg.image?out_x=wire_1) +![Linear(y, s, t) ~ wire_2](https://latex.codecogs.com/svg.image?Linear(y,s,t)\sim&space;wire_2) +![Materialize(out_y) ~ wire_2](https://latex.codecogs.com/svg.image?Materialize(out_y)\sim&space;wire_2) +![s*y + t = wire_2](https://latex.codecogs.com/svg.image?s*y+t=wire_2) +![out_y = wire_2](https://latex.codecogs.com/svg.image?out_y=wire_2) +![out = 1*TermMul(out_x, out_y) + 0](https://latex.codecogs.com/svg.image?out=1*TermMul(out_x,out_y)+0) +Because `TermMul(a, b)` is defined as `a*b`: +![out = 1*(q*x + r)*(s*y + t) + 0](https://latex.codecogs.com/svg.image?out=1*(q*x+r)*(s*y+t)+0) + +EQUIVALENCE: +![q*(s*y + t)*x + r*(s*y + t) = 1*(q*x + r)*(s*y + t) => +q*(s*y + t)*x + r*(s*y + t) = (q*x + r)*(s*y + t) => +q*(s*y + t)*x + r*(s*y + t) = q*(s*y + t)*x + r*(s*y + t)](https://latex.codecogs.com/svg.image?q*(s*y+t)*x+r*(s*y+t)=1*(q*x+r)*(s*y+t)\Rightarrow&space;q*(s*y+t)*x+r*(s*y+t)=(q*x+r)*(s*y+t)\Rightarrow&space;q*(s*y+t)*x+r*(s*y+t)=q*(s*y+t)*x+r*(s*y+t)) + + +#### Concrete >< MulCheckLinear +![Concrete(j) >< MulCheckLinear(out, x, q, r) => out ~ Linear(x, q * j, r * j)](https://latex.codecogs.com/svg.image?Concrete(j)>< MulCheckConcrete +![Linear(y, s, t) >< MulCheckConcrete(out, k) => out ~ Linear(y, s * k, t * k)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> s*k*y + t*k = s*k*y + t*k](https://latex.codecogs.com/svg.image?k*(s*y+t)=s*k*y+t*k\Rightarrow&space;s*k*y+t*k=s*k*y+t*k) + + +#### Concrete >< MulCheckConcrete +![Concrete(j) >< MulCheckConcrete(out, k) => (1), (2), (3)](https://latex.codecogs.com/svg.image?Concrete(j)> out ~ Concrete(0)](https://latex.codecogs.com/svg.image?j=0\Rightarrow&space;out\sim&space;Concrete(0)) + +RHS: +![out = 0](https://latex.codecogs.com/svg.image?out=0) + +EQUIVALENCE: +![k * 0 = 0 => 0 = 0](https://latex.codecogs.com/svg.image?k*0=0\Rightarrow&space;0=0) + +##### Case 2: +![j = 1 => out ~ Concrete(k)](https://latex.codecogs.com/svg.image?j=1\Rightarrow&space;out\sim&space;Concrete(k)) + +RHS: +![out = k](https://latex.codecogs.com/svg.image?out=k) + +EQUIVALENCE: +![k * 1 = k => k = k](https://latex.codecogs.com/svg.image?k*1=k\Rightarrow&space;k=k) + +##### Case 3: +![otherwise => out ~ Concrete(k * j)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;out\sim&space;Concrete(k*j)) + +RHS: +![out = k * j](https://latex.codecogs.com/svg.image?out=k*j) + +EQUIVALENCE: +![k * j = k * j](https://latex.codecogs.com/svg.image?k*j=k*j) + +### ReLU +#### Linear >< ReLU +![Linear(x, q, r) >< ReLU(out) => Linear(x, q, r) ~ Materialize(out_x), out ~ Linear(TermReLU(out_x), 1, 0)](https://latex.codecogs.com/svg.image?Linear(x,q,r)> 0 THEN wire ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;wire>0\;\text{THEN}\;wire\;\text{ELSE}\;0) +![out = IF (q*x + r) > 0 THEN (q*x + r) ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0) + +RHS: +![Linear(x, q, r) ~ wire](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;wire) +![Materialize(out_x) ~ wire](https://latex.codecogs.com/svg.image?Materialize(out_x)\sim&space;wire) +![q*x + r = wire](https://latex.codecogs.com/svg.image?q*x+r=wire) +![out_x = wire](https://latex.codecogs.com/svg.image?out_x=wire) +![out = 1*TermReLU(out_x) + 0](https://latex.codecogs.com/svg.image?out=1*TermReLU(out_x)+0) +Because `TermReLU(x)` is defined as `z3.If(x > 0, x, 0)`: +![out = 1*(IF (q*x + r) > 0 THEN (q*x + r) ELSE 0) + 0](https://latex.codecogs.com/svg.image?out=1*(\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0)+0) + +EQUIVALENCE: +![IF (q*x + r) > 0 THEN (q*x + r) ELSE 0 = 1*(IF (q*x + r) > 0 THEN (q*x + r) ELSE 0) + 0 => +IF (q*x + r) > 0 THEN (q*x + r) ELSE 0 = IF (q*x + r) > 0 THEN (q*x + r) ELSE 0](https://latex.codecogs.com/svg.image?\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0=1*(\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0)+0\Rightarrow\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0=\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0) + + +#### Concrete >< ReLU +![Concrete(k) >< ReLU(out) => (1), (2)](https://latex.codecogs.com/svg.image?Concrete(k)> 0 THEN wire ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;wire>0\;\text{THEN}\;wire\;\text{ELSE}\;0) +![out = IF k > 0 THEN k ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;k>0\;\text{THEN}\;k\;\text{ELSE}\;0) + +##### Case 1: +![k > 0 => out ~ Concrete(k)](https://latex.codecogs.com/svg.image?k>0\Rightarrow&space;out\sim&space;Concrete(k)) + +RHS: +![out = k](https://latex.codecogs.com/svg.image?out=k) + +EQUIVALENCE: +![IF true THEN k ELSE 0 = k => k = k](https://latex.codecogs.com/svg.image?\text{IF}\;true\;\text{THEN}\;k\;\text{ELSE}\;0=k\Rightarrow&space;k=k) + +##### Case 2: +![k <= 0 => out ~ Concrete(0)](https://latex.codecogs.com/svg.image?k\leq&space;0\Rightarrow&space;out\sim&space;Concrete(0)) + +RHS: +![out = 0](https://latex.codecogs.com/svg.image?out=0) + +EQUIVALENCE: +![IF false THEN k ELSE 0 = 0 => 0 = 0](https://latex.codecogs.com/svg.image?\text{IF}\;false\;\text{THEN}\;k\;\text{ELSE}\;0=0\Rightarrow&space;0=0) + +## Soundness of Reduction +Let ![IN_0](https://latex.codecogs.com/svg.image?\mathrm{IN_0}) be the Interaction Net translated from a Neural Network ![NN](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{NN}). Let ![IN_n](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{IN_n}) be the state of the net +after ![n](https://latex.codecogs.com/svg.image?\inline&space;n) reduction steps. Then ![forall n in N, [IN_n] = [NN]](https://latex.codecogs.com/svg.image?\inline&space;\forall&space;n\in\mathbb{N},[\mathrm{IN_n}]=[\mathrm{NN}]). + +### Proof by Induction +- Base Case (![n = 0](https://latex.codecogs.com/svg.image?\inline&space;n=0)): By the [Soundness of Translation](#soundness-of-translation), the initial net ![IN_0](https://latex.codecogs.com/svg.image?\mathrm{IN_0}) is constructed such that +its semantics ![[IN_0]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_0}]) exactly match the mathematical definition of the ONNX nodes in ![NN](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{NN}). +- Induction Step (![n -> n + 1](https://latex.codecogs.com/svg.image?\inline&space;n\to&space;n+1)): Assume ![[IN_n] = [NN]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_n}]=[\mathrm{NN}]). If ![IN_n](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{IN_n}) is in normal form, the proof is complete. +Otherwise, there exists an active pair ![A](https://latex.codecogs.com/svg.image?\inline&space;A) that reduces ![IN_n to IN_{n+1}](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{IN_n}\Rightarrow&space;\mathrm{IN_{n+1}}). +By the [Soundness of Interaction Rules](#soundness-of-interaction-rules), the mathematical definition is preserved after any reduction step, +it follows that ![[IN_{n+1}] = [IN_n]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_{n+1}}]=[\mathrm{IN_n}]). By the inductive hypothesis, ![[IN_{n+1}] = [NN]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_{n+1}}]=[\mathrm{NN}]). + +By the principle of mathematical induction, the Interaction Net remains semantically equivalent to the original +Neural Network at every step of the reduction process. + +Since Interaction Nets are confluent, the reduced mathematical expression is unique regardless +of order in which rules are applied. diff --git a/examples/verify.py b/examples/verify.py index 046ec0b..65fb989 100644 --- a/examples/verify.py +++ b/examples/verify.py @@ -2,10 +2,10 @@ import sys, os sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import z3 -import nneq +import vein def check_property(onnx_a, onnx_b, vnnlib): - solver = nneq.Solver() + solver = vein.Solver() print(f"--- Checking {vnnlib} ---") diff --git a/nneq.py b/nneq.py deleted file mode 100644 index 96de1ce..0000000 --- a/nneq.py +++ /dev/null @@ -1,394 +0,0 @@ -# Copyright (C) 2026 Eric Marin -# -# This program is free software: you can redistribute it and/or modify it under the terms of the -# GNU Affero General Public License as published by the Free Software Foundation, either version 3 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License along with this program. -# If not, see . - -import z3 -import re -import numpy as np -import subprocess -import onnx -import onnx.shape_inference -from onnx import numpy_helper -from typing import List, Dict, Optional, Tuple -import os -import tempfile -import hashlib - -rules = """ - Linear(x, float q, float r) >< Add(out, b) => b ~ AddCheckLinear(out, x, q, r); - Concrete(float k) >< Add(out, b) - | k == 0 => out ~ b - | _ => b ~ AddCheckConcrete(out, k); - Linear(y, float s, float t) >< AddCheckLinear(out, x, float q, float r) - | (q == 0) && (r == 0) && (s == 0) && (t == 0) => out ~ Concrete(0), x ~ Eraser, y ~ Eraser - | (s == 0) && (t == 0) => out ~ Linear(x, q, r), y ~ Eraser - | (q == 0) && (r == 0) => out ~ (*L)Linear(y, s, t), x ~ Eraser - | _ => Linear(x, q, r) ~ Materialize(out_x), (*L)Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermAdd(out_x, out_y), 1, 0); - Concrete(float j) >< AddCheckLinear(out, x, float q, float r) => out ~ Linear(x, q, r + j); - Linear(y, float s, float t) >< AddCheckConcrete(out, float k) => out ~ Linear(y, s, t + k); - Concrete(float j) >< AddCheckConcrete(out, float k) - | j == 0 => out ~ Concrete(k) - | _ => out ~ Concrete(k + j); - Linear(x, float q, float r) >< Mul(out, b) => b ~ MulCheckLinear(out, x, q, r); - Concrete(float k) >< Mul(out, b) - | k == 0 => b ~ Eraser, out ~ (*L)Concrete(0) - | k == 1 => out ~ b - | _ => b ~ MulCheckConcrete(out, k); - Linear(y, float s, float t) >< MulCheckLinear(out, x, float q, float r) - | ((q == 0) && (r == 0)) || ((s == 0) && (t == 0)) => out ~ Concrete(0), x ~ Eraser, y ~ Eraser - | _ => Linear(x, q, r) ~ Materialize(out_x), (*L)Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermMul(out_x, out_y), 1, 0); - Concrete(float j) >< MulCheckLinear(out, x, float q, float r) => out ~ Linear(x, q * j, r * j); - Linear(y, float s, float t) >< MulCheckConcrete(out, float k) => out ~ Linear(y, s * k, t * k); - Concrete(float j) >< MulCheckConcrete(out, float k) - | j == 0 => out ~ Concrete(0) - | j == 1 => out ~ Concrete(k) - | _ => out ~ Concrete(k * j); - Linear(x, float q, float r) >< ReLU(out) => (*L)Linear(x, q, r) ~ Materialize(out_x), out ~ Linear(TermReLU(out_x), 1, 0); - Concrete(float k) >< ReLU(out) - | k > 0 => out ~ (*L)Concrete(k) - | _ => out ~ Concrete(0); - Linear(x, float q, float r) >< Materialize(out) - | (q == 0) => out ~ Concrete(r), x ~ Eraser - | (q == 1) && (r == 0) => out ~ x - | (q == 1) && (r != 0) => out ~ TermAdd(x, Concrete(r)) - | (q != 0) && (r == 0) => out ~ TermMul(Concrete(q), x) - | _ => out ~ TermAdd(TermMul(Concrete(q), x), Concrete(r)); - Concrete(float k) >< Materialize(out) => out ~ (*L)Concrete(k); -""" - -_INPLA_CACHE: Dict[Tuple[str, Optional[Tuple]], str] = {} - -def inpla_export(model: onnx.ModelProto, bounds: Optional[Dict[str, List[float]]] = None) -> str: - # TODO: Add Range agent - _ = bounds - class NameGen: - def __init__(self, prefix="v"): - self.counter = 0 - self.prefix = prefix - def next(self) -> str: - name = f"{self.prefix}{self.counter}" - self.counter += 1 - return name - - def get_initializers(graph) -> Dict[str, np.ndarray]: - initializers = {} - for init in graph.initializer: - initializers[init.name] = numpy_helper.to_array(init) - return initializers - - def get_attrs(node: onnx.NodeProto) -> Dict: - return {attr.name: onnx.helper.get_attribute_value(attr) for attr in node.attribute} - - def get_dim(name): - for i in list(graph.input) + list(graph.output) + list(graph.value_info): - if i.name == name: return i.type.tensor_type.shape.dim[-1].dim_value - return None - - def flatten_nest(agent_name: str, terms: List[str]) -> str: - if not terms: return "Eraser" - if len(terms) == 1: return terms[0] - current = terms[0] - for i in range(1, len(terms)): - wire = wire_gen.next() - script.append(f"{wire} ~ {agent_name}({current}, {terms[i]});") - current = wire - return current - - def balance_add(terms: List[str], sink: str): - if not terms: - script.append(f"{sink} ~ Eraser;") - return - if len(terms) == 1: - script.append(f"{sink} ~ {terms[0]};") - return - - nodes = terms - while len(nodes) > 1: - next_level = [] - for i in range(0, len(nodes), 2): - if i + 1 < len(nodes): - wire_out = wire_gen.next() - script.append(f"{nodes[i]} ~ Add({wire_out}, {nodes[i+1]});") - next_level.append(wire_out) - else: - next_level.append(nodes[i]) - nodes = next_level - script.append(f"{nodes[0]} ~ {sink};") - - def op_gemm(node, override_attrs=None): - attrs = override_attrs if override_attrs is not None else get_attrs(node) - - W = initializers[node.input[1]] - if not attrs.get("transB", 0): W = W.T - out_dim, in_dim = W.shape - - B = initializers[node.input[2]] if len(node.input) > 2 else np.zeros(out_dim) - alpha, beta = attrs.get("alpha", 1.0), attrs.get("beta", 1.0) - - if node.input[0] not in interactions: - interactions[node.input[0]] = [[] for _ in range(in_dim)] - - out_terms = interactions.get(node.output[0]) or [[f"Materialize(result{j})"] for j in range(out_dim)] - - for j in range(out_dim): - sink = flatten_nest("Dup", out_terms[j]) - neuron_terms = [] - for i in range(in_dim): - weight = float(alpha * W[j, i]) - if weight != 0: - v = wire_gen.next() - interactions[node.input[0]][i].append(f"Mul({v}, Concrete({weight}))") - neuron_terms.append(v) - - bias_val = float(beta * B[j]) - if bias_val != 0 or not neuron_terms: - neuron_terms.append(f"Concrete({bias_val})") - - balance_add(neuron_terms, sink) - - def op_matmul(node): - op_gemm(node, override_attrs={"alpha": 1.0, "beta": 0.0, "transB": 0}) - - def op_relu(node): - out_name, in_name = node.output[0], node.input[0] - dim = get_dim(out_name) or 1 - - if in_name not in interactions: - interactions[in_name] = [[] for _ in range(dim)] - - out_terms = interactions.get(node.output[0]) or [[f"Materialize(result{j})"] for j in range(dim)] - - for i in range(dim): - sink = flatten_nest("Dup", out_terms[i]) - v = wire_gen.next() - interactions[in_name][i].append(f"ReLU({v})") - script.append(f"{v} ~ {sink};") - - def op_flatten(node): - out_name, in_name = node.output[0], node.input[0] - if out_name in interactions: - interactions[in_name] = interactions[out_name] - - def op_reshape(node): - op_flatten(node) - - def op_add(node): - out_name = node.output[0] - in_a, in_b = node.input[0], node.input[1] - - dim = get_dim(out_name) or get_dim(in_a) or get_dim(in_b) or 1 - - if in_a not in interactions: interactions[in_a] = [[] for _ in range(dim)] - if in_b not in interactions: interactions[in_b] = [[] for _ in range(dim)] - - out_terms = interactions.get(out_name) or [[f"Materialize(result{j})"] for j in range(dim)] - - b_const = initializers.get(in_b) - a_const = initializers.get(in_a) - - for i in range(dim): - sink = flatten_nest("Dup", out_terms[i]) - if b_const is not None: - val = float(b_const.flatten()[i % b_const.size]) - interactions[in_a][i].append(f"Add({sink}, Concrete({val}))") - elif a_const is not None: - val = float(a_const.flatten()[i % a_const.size]) - interactions[in_b][i].append(f"Add({sink}, Concrete({val}))") - else: - v_b = wire_gen.next() - interactions[in_a][i].append(f"Add({sink}, {v_b})") - interactions[in_b][i].append(f"{v_b}") - - def op_sub(node): - out_name = node.output[0] - in_a, in_b = node.input[0], node.input[1] - - dim = get_dim(out_name) or get_dim(in_a) or get_dim(in_b) or 1 - - if out_name not in interactions: - interactions[out_name] = [[f"Materialize(result{i})"] for i in range(dim)] - - out_terms = interactions.get(out_name) or [[f"Materialize(result{j})"] for j in range(dim)] - - if in_a not in interactions: interactions[in_a] = [[] for _ in range(dim)] - if in_b not in interactions: interactions[in_b] = [[] for _ in range(dim)] - - b_const = initializers.get(in_b) - a_const = initializers.get(in_a) - - for i in range(dim): - sink = flatten_nest("Dup", out_terms[i]) - if b_const is not None: - val = float(b_const.flatten()[i % b_const.size]) - interactions[in_a][i].append(f"Add({sink}, Concrete({-val}))") - elif a_const is not None: - val = float(a_const.flatten()[i % a_const.size]) - interactions[in_b][i].append(f"Add({sink}, Concrete({-val}))") - else: - v_b = wire_gen.next() - interactions[in_a][i].append(f"Add({sink}, Mul({v_b}, Concrete(-1.0)))") - interactions[in_b][i].append(f"{v_b}") - - graph, initializers = model.graph, get_initializers(model.graph) - wire_gen = NameGen("w") - interactions: Dict[str, List[List[str]]] = {} - script = [] - ops = { - "Gemm": op_gemm, - "Relu": op_relu, - "Flatten": op_flatten, - "Reshape": op_reshape, - "MatMul": op_matmul, - "Add": op_add, - "Sub": op_sub - } - - if graph.output: - out = graph.output[0].name - dim = get_dim(out) - if dim: - interactions[out] = [[f"Materialize(result{i})"] for i in range(dim)] - - for node in reversed(graph.node): - if node.op_type in ops: - ops[node.op_type](node) - else: - raise RuntimeError(f"Unsupported ONNX operator: {node.op_type}") - - if graph.input: - input = "input" if "input" in interactions else graph.input[0].name - for i, terms in enumerate(interactions[input]): - sink = flatten_nest("Dup", terms) - script.append(f"{sink} ~ Linear(Symbolic(X_{i}), 1.0, 0.0);") - - result_lines = [f"result{i};" for i in range(len(interactions.get(graph.output[0].name, [])))] - return "\n".join(script + result_lines) - - -def inpla_run(model: str) -> str: - with tempfile.NamedTemporaryFile(mode="w", suffix=".inpla", delete=False) as f: - f.write(f"{rules}\n{model}") - temp_path = f.name - try: - res = subprocess.run(["./inpla", "-f", temp_path, "-foptimise-tail-calls"], capture_output=True, text=True) - if res.stderr: print(res.stderr) - return res.stdout - finally: - if os.path.exists(temp_path): - os.remove(temp_path) - - -def z3_evaluate(model: str, X): - def Symbolic(id): - if id not in X: - X[id] = z3.Real(id) - return X[id] - def Concrete(val): return z3.RealVal(val) - def TermAdd(a, b): return a + b - def TermMul(a, b): return a * b - def TermReLU(x): return z3.If(x > 0, x, 0) - - context = { - 'Concrete': Concrete, - 'Symbolic': Symbolic, - 'TermAdd': TermAdd, - 'TermMul': TermMul, - 'TermReLU': TermReLU - } - lines = [line.strip() for line in model.splitlines() if line.strip()] - - def iterative_eval(expr_str): - tokens = re.findall(r'\(|\)|\,|[^(),\s]+', expr_str) - stack = [[]] - for token in tokens: - if token == '(': - stack.append([]) - elif token == ')': - args = stack.pop() - func_name = stack[-1].pop() - func = context.get(func_name) - if not func: - raise ValueError(f"Unknown function: {func_name}") - stack[-1].append(func(*args)) - elif token == ',': - continue - else: - if token in context: - stack[-1].append(token) - else: - try: - stack[-1].append(float(token)) - except ValueError: - stack[-1].append(token) - return stack[0][0] - - exprs = [iterative_eval(line) for line in lines] - return exprs - -def net(model: onnx.ModelProto, X, bounds: Optional[Dict[str, List[float]]] = None): - model_hash = hashlib.sha256(model.SerializeToString()).hexdigest() - bounds_key = tuple(sorted((k, tuple(v)) for k, v in bounds.items())) if bounds else None - cache_key = (model_hash, bounds_key) - - if cache_key not in _INPLA_CACHE: - exported = inpla_export(model, bounds) - reduced = inpla_run(exported) - _INPLA_CACHE[cache_key] = reduced - - res = z3_evaluate(_INPLA_CACHE[cache_key], X) - return res if res is not None else [] - -class Solver(z3.Solver): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.X = {} - self.Y = {} - self.bounds: Dict[str, List[float]] = {} - self.pending_nets: List[Tuple[onnx.ModelProto, Optional[str]]] = [] - - def load_vnnlib(self, file_path): - with open(file_path, "r") as f: - content = f.read() - - for match in re.finditer(r"\(assert\s+\((>=|<=)\s+(X_\d+)\s+([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)\)\)", content): - op, var, val = match.groups() - val = float(val) - if var not in self.bounds: self.bounds[var] = [float('-inf'), float('inf')] - if op == ">=": self.bounds[var][0] = val - else: self.bounds[var][1] = val - - content = re.sub(r"\(vnnlib-version.*?\)", "", content) - assertions = z3.parse_smt2_string(content) - self.add(assertions) - - def load_onnx(self, file, name=None): - model = onnx.load(file) - model = onnx.shape_inference.infer_shapes(model) - self.pending_nets.append((model, name)) - - def _process_nets(self): - y_count = 0 - for model, name in self.pending_nets: - z3_outputs = net(model, self.X, bounds=self.bounds) - if z3_outputs: - for _, out_expr in enumerate(z3_outputs): - y_var = z3.Real(f"Y_{y_count}") - self.add(y_var == out_expr) - if name: - if name not in self.Y: self.Y[name] = [] - self.Y[name].append(out_expr) - y_count += 1 - self.pending_nets = [] - - def check(self, *args): - self._process_nets() - return super().check(*args) diff --git a/proof.md b/proof.md deleted file mode 100644 index 9a1d2c9..0000000 --- a/proof.md +++ /dev/null @@ -1,562 +0,0 @@ -# Soundness Proof -## Mathematical Definitions -![Linear(x, q, r) ~ out => out = q*x + r](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;out\Rightarrow&space;out=q*x+r&space;) -![Concrete(k) ~ out => out = ](https://latex.codecogs.com/svg.image?Concrete(k)\sim&space;out\Rightarrow&space;out=k&space;) -![Add(out, b) ~ a => out = a + b](https://latex.codecogs.com/svg.image?Add(out,b)\sim&space;a\Rightarrow&space;out=a+b&space;) -![AddCheckLinear(out, x, q, r) ~ b => out = q*x + (r + b)](https://latex.codecogs.com/svg.image?AddCheckLinear(out,x,q,r)\sim&space;b\Rightarrow&space;out=q*x+(r+b)) -![AddCheckConcrete(out, k) ~ b => out = k + b](https://latex.codecogs.com/svg.image?AddCheckConcrete(out,k)\sim&space;b\Rightarrow&space;out=k+b&space;) -![Mul(out, b) ~ a => out = a * b](https://latex.codecogs.com/svg.image?Mul(out,b)\sim&space;a\Rightarrow&space;out=a*b&space;) -![MulCheckLinear(out, x, q, r) ~ b => out = q*b*x + r*b](https://latex.codecogs.com/svg.image?MulCheckLinear(out,x,q,r)\sim&space;b\Rightarrow&space;out=q*b*x+r*b&space;) -![MulCheckConcrete(out, k) ~ b => out = k*b](https://latex.codecogs.com/svg.image?MulCheckConcrete(out,k)\sim&space;b\Rightarrow&space;out=k*b&space;) -![ReLU(out) ~ x => out = IF (x > 0) THEN x ELSE 0](https://latex.codecogs.com/svg.image?ReLU(out)\sim&space;x\Rightarrow&space;out=\text{IF}\;(x>0)\;\text{THEN}\;x\;\text{ELSE}\;0) -![Materialize(out) ~ x => out = x](https://latex.codecogs.com/svg.image?Materialize(out)\sim&space;x\Rightarrow&space;out=x&space;) - -## Soundness of Translation -### ReLU -ONNX ReLU node is defined as: -![Y = X if X > 0 else 0](https://latex.codecogs.com/svg.image?Y=X\;if\;X>0\;else\;0&space;) - -The translation defines the interactions: -![x_i ~ ReLU(y_i)](https://latex.codecogs.com/svg.image?x_i\sim&space;ReLU(y_i)) - -By definition this interaction is equal to: -![y_i = IF (x_i > 0) THEN x_i ELSE 0](https://latex.codecogs.com/svg.image?y_i=\text{IF}\;(x_i>0)\;\text{THEN}\;x_i\;\text{ELSE}\;0) - -### Gemm -ONNX Gemm node is defined as: -![Y = alpha * A * B + beta * C](https://latex.codecogs.com/svg.image?Y=\alpha\cdot&space;A\cdot&space;B+\beta\cdot&space;C&space;) - -The translation defines the interactions: -![a_i ~ Mul(v_i, Concrete(alpha * b_i))](https://latex.codecogs.com/svg.image?a_i\sim&space;Mul(v_i,Concrete(\alpha*b_i))) -![Add(...(Add(y_i, v_1), ...), v_n) ~ Concrete(beta * c_i)](https://latex.codecogs.com/svg.image?Add(...(Add(y_i,v_1),...),v_n)\sim&space;Concrete(\beta*c_i)) - -By definition this interaction is equal to: -![v_i = alpha * a_i * b_i](https://latex.codecogs.com/svg.image?v_i=\alpha*a_i*b_i&space;) -![y_i = v_1 + v_2 + ... + v_n + beta * c_i](https://latex.codecogs.com/svg.image?y_i=v_1+v_2+...+v_n+\beta*c_i&space;) - -By grouping the operations we get: -![Y = alpha * A * B + beta * C](https://latex.codecogs.com/svg.image?Y=\alpha\cdot&space;A\cdot&space;B+\beta\cdot&space;C&space;) - -### Flatten -Just identity mapping because the wires are always Flatten. -![out_i ~ in_i](https://latex.codecogs.com/svg.image?out_i\sim&space;in_i) - -### MatMul -Equal to Gemm with ![alpha=1](https://latex.codecogs.com/svg.image?\inline&space;\alpha=1), ![beta=0](https://latex.codecogs.com/svg.image?\inline&space;\beta=0) and ![C=0](https://latex.codecogs.com/svg.image?\inline&space;&space;C=0). - -### Reshape -Just identity mapping because the wires always Flatten. -![out_i ~ iin_i](https://latex.codecogs.com/svg.image?out_i\sim&space;in_i) - -### Add -ONNX Add node is defined as: -![C = A + B](https://latex.codecogs.com/svg.image?&space;C=A+B) - -The translation defines the interactions: -![Add(c_i, b_i) ~ a_i](https://latex.codecogs.com/svg.image?Add(c_i,b_i)\sim&space;a_i) - -By definition this interaction is equal to: -![c_i = a_i + b_i](https://latex.codecogs.com/svg.image?c_i=a_i+b_i) - -By grouping the operations we get: -![C = A + B](https://latex.codecogs.com/svg.image?C=A+B) - -### Sub -ONNX Sub node is defined as: -![C = A - B](https://latex.codecogs.com/svg.image?C=A-B) - -The translation defines the interactions: -![Add(c_i, neg_b_i) ~ a_i](https://latex.codecogs.com/svg.image?Add(c_i,neg_i)\sim&space;a_i) -![Mul(neg_b_i, Concrete(-1)) ~ b_i](https://latex.codecogs.com/svg.image?Mul(neg_i,Concrete(-1))\sim&space;b_i) - -By definition this interaction is equal to: -![c_i = a_i + neg_b_i](https://latex.codecogs.com/svg.image?c_i=a_i+neg_i) -![neg_b_i = -1 * b_i](https://latex.codecogs.com/svg.image?neg_i=-1*b_i) - -By grouping the operations we get: -![C = A - B](https://latex.codecogs.com/svg.image?C=A-B) - -## Soundness of Interaction Rules -### Materialize -The Materialize agent transforms a Linear agent into a tree of explicit mathematical operations -that are used as final representation for the solver. -In the Python module the terms are defined as: -```python -def TermAdd(a, b): - return a + b -def TermMul(a, b): - return a * b -def TermReLU(x): - return z3.If(x > 0, x, 0) -``` - -#### Linear >< Materialize -![Linear(x, q, r) >< Materialize(out) => (1), (2), (3), (4), (5)](https://latex.codecogs.com/svg.image?\inline&space;&space;Linear(x,q,r)> out ~ Concrete(r), x ~ Eraser](https://latex.codecogs.com/svg.image?q=0\Rightarrow&space;out\sim&space;Concrete(r),x\sim&space;Eraser) - -RHS: -![out = r](https://latex.codecogs.com/svg.image?out=r) - -EQUIVALENCE: -![0*x + r = r => r = r](https://latex.codecogs.com/svg.image?0*x+r=r\Rightarrow&space;r=r) - -##### Case 2: -![q = 1, r = 0 => out ~ x](https://latex.codecogs.com/svg.image?q=1,r=0\Rightarrow&space;out~x) - -RHS: -![x = out](https://latex.codecogs.com/svg.image?x=out) -![out = x](https://latex.codecogs.com/svg.image?out=x) - -EQUIVALENCE: -![1*x + 0 = x => x = x](https://latex.codecogs.com/svg.image?1*x+0=x\Rightarrow&space;x=x) - -##### Case 3: -![q = 1 => out ~ TermAdd(x, Concrete(r))](https://latex.codecogs.com/svg.image?q=1\Rightarrow&space;out\sim&space;TermAdd(x,Concrete(r))) - -RHS: -![out = x + r](https://latex.codecogs.com/svg.image?out=x+r) - -EQUIVALENCE: -![1*x + r = x + r => x + r = x + r](https://latex.codecogs.com/svg.image?1*x+r=x+r\Rightarrow&space;x+r=x+r) - -##### Case 4: -![r = 0 => out ~ TermMul(Concrete(q), x)](https://latex.codecogs.com/svg.image?r=0\Rightarrow&space;out\sim&space;TermMul(Concrete(q),x)) - -RHS: -![out = q*x](https://latex.codecogs.com/svg.image?out=q*x) - -EQUIVALENCE: -![q*x + 0 = q*x => q*x = q*x](https://latex.codecogs.com/svg.image?q*x+0=q*x\Rightarrow&space;q*x=q*x) - -##### Case 5: -![otherwise => out ~ TermAdd(TermMul(Concrete(q), x), Concrete(r))](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;out\sim&space;TermAdd(TermMul(Concrete(q),x),Concrete(r))) - -RHS: -![out = q*x + r](https://latex.codecogs.com/svg.image?out=q*x+r) - -EQUIVALENCE: -![q*x + r = q*x + r](https://latex.codecogs.com/svg.image?q*x+r=q*x+r) - -#### Concrete >< Materialize -![Concrete(k) >< Materialize(out) => out ~ Concrete(k)](https://latex.codecogs.com/svg.image?Concrete(k)>< Add -![Linear(x, q, r) >< Add(out, b) => b ~ AddCheckLinear(out, x, q, r)](https://latex.codecogs.com/svg.image?Linear(x,q,r)> q*x + (r + b) = q*x + (r + b)](https://latex.codecogs.com/svg.image?q*x+r+b=q*x+(r+b)\Rightarrow&space;q*x+(r+b)=q*x+(r+b)) - -#### Concrete >< Add -![Concrete(k) >< Add(out, b) => (1), (2)](https://latex.codecogs.com/svg.image?Concrete(k)> out ~ b](https://latex.codecogs.com/svg.image?k=0\Rightarrow&space;out\sim&space;b) - -RHS: -![out = b](https://latex.codecogs.com/svg.image?out=b) - -EQUIVALENCE: -![0 + b = b => b = b](https://latex.codecogs.com/svg.image?0+b=b\Rightarrow&space;b=b) - -##### Case 2: -![otherwise => b ~ AddCheckConcrete(out, k)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;b\sim&space;AddCheckConcrete(out,k)) - -RHS: -![out = k + b](https://latex.codecogs.com/svg.image?out=k+b) - -EQUIVALENCE: -![k + b = k + b](https://latex.codecogs.com/svg.image?k+b=k+b) - -#### Linear >< AddCheckLinear -![Linear(y, s, t) >< AddCheckLinear(out, x, q, r) => (1), (2), (3), (4)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> out ~ Concrete(0), x ~ Eraser, y ~ Eraser](https://latex.codecogs.com/svg.image?q,r,s,t=0\Rightarrow&space;out\sim&space;Concrete(0),x\sim&space;Eraser,y\sim&space;Eraser) - -RHS: -![out = 0](https://latex.codecogs.com/svg.image?out=0) - -EQUIVALENCE: -![0*x + (0 + 0*y + 0) = 0 => 0 = 0](https://latex.codecogs.com/svg.image?0*x+(0+0*y+0)=0\Rightarrow&space;0=0) - -##### Case 2: -![s,t = 0 => out ~ Linear(x, q, r), y ~ Eraser](https://latex.codecogs.com/svg.image?s,t=0\Rightarrow&space;out\sim&space;Linear(x,q,r),y\sim&space;Eraser) - -RHS: -![out = q*x + r](https://latex.codecogs.com/svg.image?out=q*x+r) - -EQUIVALENCE: -![q*x + (r + 0*y + 0) = q*x + r => q*x + r = q*x + r](https://latex.codecogs.com/svg.image?q*x+(r+0*y+0)=q*x+r\Rightarrow&space;q*x+r=q*x+r) - -##### Case 3: -![q, r = 0 => out ~ Linear(y, s, t), x ~ Eraser](https://latex.codecogs.com/svg.image?q,r=0\Rightarrow&space;out\sim&space;Linear(y,s,t),x\sim&space;Eraser) - -RHS: -![out = s*y + t](https://latex.codecogs.com/svg.image?out=s*y+t) - -EQUIVALENCE: -![0*x + (0 + s*y + t) = s*y + t => s*y + t = s*y + t](https://latex.codecogs.com/svg.image?0*x+(0+s*y+t)=s*y+t\Rightarrow&space;s*y+t=s*y+t) - -##### Case 4: -![otherwise => Linear(x, q, r) ~ Materialize(out_x), Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermAdd(out_x, out_y), 1, 0)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;Linear(x,q,r)\sim&space;Materialize(out_x),Linear(y,s,t)\sim&space;Materialize(out_y),out\sim&space;Linear(TermAdd(out_x,out_y),1,0)) - -RHS: -![Linear(x, q, r) ~ wire_1](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;wire_1) -![Materialize(out_x) ~ wire_1](https://latex.codecogs.com/svg.image?Materialize(out_x)\sim&space;wire_1) -![q*x + r = wire_1](https://latex.codecogs.com/svg.image?q*x+r=wire_1) -![out_x = wire_1](https://latex.codecogs.com/svg.image?out_x=wire_1) -![Linear(y, s, t) ~ wire_2](https://latex.codecogs.com/svg.image?Linear(y,s,t)\sim&space;wire_2) -![Materialize(out_y) ~ wire_2](https://latex.codecogs.com/svg.image?Materialize(out_y)\sim&space;wire_2) -![s*y + t = wire_2](https://latex.codecogs.com/svg.image?s*y+t=wire_2) -![out_y = wire_2](https://latex.codecogs.com/svg.image?out_y=wire_2) -![out = 1*TermAdd(out_x, out_y) + 0](https://latex.codecogs.com/svg.image?out=1*TermAdd(out_x,out_y)+0) -Because `TermAdd(a, b)` is defined as `a+b`: -![out = 1*(q*x + r + s*y + t) + 0](https://latex.codecogs.com/svg.image?out=1*(q*x+r+s*y+t)+0) - -EQUIVALENCE: -![q*x + (r + s*y + t) = 1*(q*x + r + s*y + t) + 0 => q*x + r + s*y + t = q*x + r + s*y + t](https://latex.codecogs.com/svg.image?q*x+(r+s*y+t)=1*(q*x+r+s*y+t)+0\Rightarrow&space;q*x+r+s*y+t=q*x+r+s*y+t) - -#### Concrete >< AddCheckLinear -![Concrete(j) >< AddCheckLinear(out, x, q, r) => out ~ Linear(x, q, r + j)](https://latex.codecogs.com/svg.image?Concrete(j)>< AddCheckConcrete -![Linear(y, s, t) >< AddCheckConcrete(out, k) => out ~ Linear(y, s, t + k)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> s*y + (t + k) = s*y + (t + k)](https://latex.codecogs.com/svg.image?k+s*y+t=s*y+(t+k)\Rightarrow&space;s*y+(t+k)=s*y+(t+k)) - -#### Concrete >< AddCheckConcrete -![Concrete(j) >< AddCheckConcrete(out, k) => (1), (2)](https://latex.codecogs.com/svg.image?Concrete(j)> out ~ Concrete(k)](https://latex.codecogs.com/svg.image?j=0\Rightarrow&space;out\sim&space;Concrete(k)) - -RHS: -![out = k](https://latex.codecogs.com/svg.image?out=k) - -EQUIVALENCE: -![k + 0 = k => k = k](https://latex.codecogs.com/svg.image?k+0=k\Rightarrow&space;k=k) - -##### Case 2: -![otherwise => out ~ Concrete(k + j)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;out\sim&space;Concrete(k+j)) - -RHS: -![out = k + j](https://latex.codecogs.com/svg.image?out=k+j) - -EQUIVALENCE: -![k + j = k + j](https://latex.codecogs.com/svg.image?k+j=k+j) - -### Mul -#### Linear >< Mul -![Linear(x, q, r) >< Mul(out, b) => b ~ MulCheckLinear(out, x, q, r)](https://latex.codecogs.com/svg.image?Linear(x,q,r)> q*b*x + r*b = q*b*x + r*b](https://latex.codecogs.com/svg.image?(q*x+r)*b=q*b*x+r*b\Rightarrow&space;q*b*x+r*b=q*b*x+r*b) - -#### Concrete >< Mul -![Concrete(k) >< Mul(out, b) => (1), (2), (3)](https://latex.codecogs.com/svg.image?Concrete(k)> out ~ Concrete(0), b ~ Eraser](https://latex.codecogs.com/svg.image?k=0\Rightarrow&space;out\sim&space;Concrete(0),b\sim&space;Eraser) - -RHS: -![out = 0](https://latex.codecogs.com/svg.image?out=0) - -EQUIVALENCE: -![0 * b = 0 => 0 = 0](https://latex.codecogs.com/svg.image?0*b=0\Rightarrow&space;0=0) - -##### Case 2: -![k = 1 => out ~ b](https://latex.codecogs.com/svg.image?k=1\Rightarrow&space;out\sim&space;b) - -RHS: -![out = b](https://latex.codecogs.com/svg.image?out=b) - -EQUIVALENCE: -![1 * b = b => b = b](https://latex.codecogs.com/svg.image?1*b=b\Rightarrow&space;b=b) - -##### Case 3: -![otherwise => b ~ MulCheckConcrete(out, k)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;b\sim&space;MulCheckConcrete(out,k)) - -RHS: -![out = k * b](https://latex.codecogs.com/svg.image?out=k*b) - -EQUIVALENCE: -![k * b = k * b](https://latex.codecogs.com/svg.image?k*b=k*b) - -#### Linear >< MulCheckLinear -![Linear(y, s, t) >< MulCheckLinear(out, x, q, r) => (1), (2)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> x ~ Eraser, y ~ Eraser, out ~ Concrete(0)](https://latex.codecogs.com/svg.image?(q,r=0)\lor(s,t=0)\Rightarrow&space;x\sim&space;Eraser,y\sim&space;Eraser,out\sim&space;Concrete(0)) - -RHS: -![out = 0](https://latex.codecogs.com/svg.image?out=0) - -EQUIVALENCE: -![0*(s*y + t)*x + 0*(s*y + t) = 0 => 0 = 0](https://latex.codecogs.com/svg.image?0*(s*y+t)*x+0*(s*y+t)=0\Rightarrow&space;0=0) -![or](https://latex.codecogs.com/svg.image?\lor) -![q*(0*y + 0)*x + r*(0*y + 0) = 0 => 0 = 0](https://latex.codecogs.com/svg.image?q*(0*y+0)*x+r*(0*y+0)=0\Rightarrow&space;0=0) - -##### Case 2: -![otherwise => Linear(x, q, r) ~ Materialize(out_x), Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermMul(out_x, out_y), 1, 0)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;Linear(x,q,r)\sim&space;Materialize(out_x),Linear(y,s,t)\sim&space;Materialize(out_y),out\sim&space;Linear(TermMul(out_x,out_y),1,0)) - -RHS: -![Linear(x, q, r) ~ wire_1](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;wire_1) -![Materialize(out_x) ~ wire_1](https://latex.codecogs.com/svg.image?Materialize(out_x)\sim&space;wire_1) -![q*x + r = wire_1](https://latex.codecogs.com/svg.image?q*x+r=wire_1) -![out_x = wire_1](https://latex.codecogs.com/svg.image?out_x=wire_1) -![Linear(y, s, t) ~ wire_2](https://latex.codecogs.com/svg.image?Linear(y,s,t)\sim&space;wire_2) -![Materialize(out_y) ~ wire_2](https://latex.codecogs.com/svg.image?Materialize(out_y)\sim&space;wire_2) -![s*y + t = wire_2](https://latex.codecogs.com/svg.image?s*y+t=wire_2) -![out_y = wire_2](https://latex.codecogs.com/svg.image?out_y=wire_2) -![out = 1*TermMul(out_x, out_y) + 0](https://latex.codecogs.com/svg.image?out=1*TermMul(out_x,out_y)+0) -Because `TermMul(a, b)` is defined as `a*b`: -![out = 1*(q*x + r)*(s*y + t) + 0](https://latex.codecogs.com/svg.image?out=1*(q*x+r)*(s*y+t)+0) - -EQUIVALENCE: -![q*(s*y + t)*x + r*(s*y + t) = 1*(q*x + r)*(s*y + t) => -q*(s*y + t)*x + r*(s*y + t) = (q*x + r)*(s*y + t) => -q*(s*y + t)*x + r*(s*y + t) = q*(s*y + t)*x + r*(s*y + t)](https://latex.codecogs.com/svg.image?q*(s*y+t)*x+r*(s*y+t)=1*(q*x+r)*(s*y+t)\Rightarrow&space;q*(s*y+t)*x+r*(s*y+t)=(q*x+r)*(s*y+t)\Rightarrow&space;q*(s*y+t)*x+r*(s*y+t)=q*(s*y+t)*x+r*(s*y+t)) - - -#### Concrete >< MulCheckLinear -![Concrete(j) >< MulCheckLinear(out, x, q, r) => out ~ Linear(x, q * j, r * j)](https://latex.codecogs.com/svg.image?Concrete(j)>< MulCheckConcrete -![Linear(y, s, t) >< MulCheckConcrete(out, k) => out ~ Linear(y, s * k, t * k)](https://latex.codecogs.com/svg.image?Linear(y,s,t)> s*k*y + t*k = s*k*y + t*k](https://latex.codecogs.com/svg.image?k*(s*y+t)=s*k*y+t*k\Rightarrow&space;s*k*y+t*k=s*k*y+t*k) - - -#### Concrete >< MulCheckConcrete -![Concrete(j) >< MulCheckConcrete(out, k) => (1), (2), (3)](https://latex.codecogs.com/svg.image?Concrete(j)> out ~ Concrete(0)](https://latex.codecogs.com/svg.image?j=0\Rightarrow&space;out\sim&space;Concrete(0)) - -RHS: -![out = 0](https://latex.codecogs.com/svg.image?out=0) - -EQUIVALENCE: -![k * 0 = 0 => 0 = 0](https://latex.codecogs.com/svg.image?k*0=0\Rightarrow&space;0=0) - -##### Case 2: -![j = 1 => out ~ Concrete(k)](https://latex.codecogs.com/svg.image?j=1\Rightarrow&space;out\sim&space;Concrete(k)) - -RHS: -![out = k](https://latex.codecogs.com/svg.image?out=k) - -EQUIVALENCE: -![k * 1 = k => k = k](https://latex.codecogs.com/svg.image?k*1=k\Rightarrow&space;k=k) - -##### Case 3: -![otherwise => out ~ Concrete(k * j)](https://latex.codecogs.com/svg.image?otherwise\Rightarrow&space;out\sim&space;Concrete(k*j)) - -RHS: -![out = k * j](https://latex.codecogs.com/svg.image?out=k*j) - -EQUIVALENCE: -![k * j = k * j](https://latex.codecogs.com/svg.image?k*j=k*j) - -### ReLU -#### Linear >< ReLU -![Linear(x, q, r) >< ReLU(out) => Linear(x, q, r) ~ Materialize(out_x), out ~ Linear(TermReLU(out_x), 1, 0)](https://latex.codecogs.com/svg.image?Linear(x,q,r)> 0 THEN wire ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;wire>0\;\text{THEN}\;wire\;\text{ELSE}\;0) -![out = IF (q*x + r) > 0 THEN (q*x + r) ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0) - -RHS: -![Linear(x, q, r) ~ wire](https://latex.codecogs.com/svg.image?Linear(x,q,r)\sim&space;wire) -![Materialize(out_x) ~ wire](https://latex.codecogs.com/svg.image?Materialize(out_x)\sim&space;wire) -![q*x + r = wire](https://latex.codecogs.com/svg.image?q*x+r=wire) -![out_x = wire](https://latex.codecogs.com/svg.image?out_x=wire) -![out = 1*TermReLU(out_x) + 0](https://latex.codecogs.com/svg.image?out=1*TermReLU(out_x)+0) -Because `TermReLU(x)` is defined as `z3.If(x > 0, x, 0)`: -![out = 1*(IF (q*x + r) > 0 THEN (q*x + r) ELSE 0) + 0](https://latex.codecogs.com/svg.image?out=1*(\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0)+0) - -EQUIVALENCE: -![IF (q*x + r) > 0 THEN (q*x + r) ELSE 0 = 1*(IF (q*x + r) > 0 THEN (q*x + r) ELSE 0) + 0 => -IF (q*x + r) > 0 THEN (q*x + r) ELSE 0 = IF (q*x + r) > 0 THEN (q*x + r) ELSE 0](https://latex.codecogs.com/svg.image?\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0=1*(\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0)+0\Rightarrow\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0=\text{IF}\;(q*x+r)>0\;\text{THEN}\;(q*x+r)\;\text{ELSE}\;0) - - -#### Concrete >< ReLU -![Concrete(k) >< ReLU(out) => (1), (2)](https://latex.codecogs.com/svg.image?Concrete(k)> 0 THEN wire ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;wire>0\;\text{THEN}\;wire\;\text{ELSE}\;0) -![out = IF k > 0 THEN k ELSE 0](https://latex.codecogs.com/svg.image?out=\text{IF}\;k>0\;\text{THEN}\;k\;\text{ELSE}\;0) - -##### Case 1: -![k > 0 => out ~ Concrete(k)](https://latex.codecogs.com/svg.image?k>0\Rightarrow&space;out\sim&space;Concrete(k)) - -RHS: -![out = k](https://latex.codecogs.com/svg.image?out=k) - -EQUIVALENCE: -![IF true THEN k ELSE 0 = k => k = k](https://latex.codecogs.com/svg.image?\text{IF}\;true\;\text{THEN}\;k\;\text{ELSE}\;0=k\Rightarrow&space;k=k) - -##### Case 2: -![k <= 0 => out ~ Concrete(0)](https://latex.codecogs.com/svg.image?k\leq&space;0\Rightarrow&space;out\sim&space;Concrete(0)) - -RHS: -![out = 0](https://latex.codecogs.com/svg.image?out=0) - -EQUIVALENCE: -![IF false THEN k ELSE 0 = 0 => 0 = 0](https://latex.codecogs.com/svg.image?\text{IF}\;false\;\text{THEN}\;k\;\text{ELSE}\;0=0\Rightarrow&space;0=0) - -## Soundness of Reduction -Let ![IN_0](https://latex.codecogs.com/svg.image?\mathrm{IN_0}) be the Interaction Net translated from a Neural Network ![NN](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{NN}). Let ![IN_n](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{IN_n}) be the state of the net -after ![n](https://latex.codecogs.com/svg.image?\inline&space;n) reduction steps. Then ![forall n in N, [IN_n] = [NN]](https://latex.codecogs.com/svg.image?\inline&space;\forall&space;n\in\mathbb{N},[\mathrm{IN_n}]=[\mathrm{NN}]). - -### Proof by Induction -- Base Case (![n = 0](https://latex.codecogs.com/svg.image?\inline&space;n=0)): By the [Soundness of Translation](#soundness-of-translation), the initial net ![IN_0](https://latex.codecogs.com/svg.image?\mathrm{IN_0}) is constructed such that -its semantics ![[IN_0]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_0}]) exactly match the mathematical definition of the ONNX nodes in ![NN](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{NN}). -- Induction Step (![n -> n + 1](https://latex.codecogs.com/svg.image?\inline&space;n\to&space;n+1)): Assume ![[IN_n] = [NN]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_n}]=[\mathrm{NN}]). If ![IN_n](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{IN_n}) is in normal form, the proof is complete. -Otherwise, there exists an active pair ![A](https://latex.codecogs.com/svg.image?\inline&space;A) that reduces ![IN_n to IN_{n+1}](https://latex.codecogs.com/svg.image?\inline&space;\mathrm{IN_n}\Rightarrow&space;\mathrm{IN_{n+1}}). -By the [Soundness of Interaction Rules](#soundness-of-interaction-rules), the mathematical definition is preserved after any reduction step, -it follows that ![[IN_{n+1}] = [IN_n]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_{n+1}}]=[\mathrm{IN_n}]). By the inductive hypothesis, ![[IN_{n+1}] = [NN]](https://latex.codecogs.com/svg.image?\inline&space;[\mathrm{IN_{n+1}}]=[\mathrm{NN}]). - -By the principle of mathematical induction, the Interaction Net remains semantically equivalent to the original -Neural Network at every step of the reduction process. - -Since Interaction Nets are confluent, the reduced mathematical expression is unique regardless -of order in which rules are applied. diff --git a/proof.norg b/proof.norg deleted file mode 100644 index 7bd9e25..0000000 --- a/proof.norg +++ /dev/null @@ -1,513 +0,0 @@ -@document.meta -title: proof -description: -authors: ericmarin -categories: research -created: 2026-03-16T11:34:52 -updated: 2026-03-28T17:37:48 -version: 1.1.1 -@end - -* Soundness Proof -** Mathematical Definitions - - $Linear(x, q, r) ~ out => out = q*x + r$ - - $Concrete(k) ~ out => out = k$ - - $Add(out, b) ~ a => out = a + b$ - - $AddCheckLinear(out, x, q, r) ~ b => out = q*x + (r + b)$ - - $AddCheckConcrete(out, k) ~ b => out = k + b$ - - $Mul(out, b) ~ a => out = a * b$ - - $MulCheckLinear(out, x, q, r) ~ b => out = q*b*x + r*b$ - - $MulCheckConcrete(out, k) ~ b => out = k*b$ - - $ReLU(out) ~ x => out = IF (x > 0) THEN x ELSE 0$ - - $Materialize(out) ~ x => out = x$ - -** Soundness of Translation -*** ReLU - ONNX ReLU node is defined as: - $Y = X if X > 0 else 0$ - - The translation defines the interactions: - $x_i ~ ReLU(y_i)$ - - By definition this interaction is equal to: - $y_i = IF (x_i > 0) THEN x_i ELSE 0$ - -*** Gemm - ONNX Gemm node is defined as: - $Y = alpha * A * B + beta * C$ - - The translation defines the interactions: - $a_i ~ Mul(v_i, Concrete(alpha * b_i))$ - $Add(...(Add(y_i, v_1), ...), v_n) ~ Concrete(beta * c_i)$ - - By definition this interaction is equal to: - $v_i = alpha * a_i * b_i$ - $y_i = v_1 + v_2 + ... + v_n + beta * c_i$ - - By grouping the operations we get: - $Y = alpha * A * B + beta * C$ - -*** Flatten - Just identity mapping because the wires are always Flatten. - $o_i ~ i_i$ - -*** MatMul - Equal to Gemm with $alpha=1$, $beta=0$ and $C=0$. - -*** Reshape - Just identity mapping because the wires always Flatten. - $o_i ~ i_i$ - -*** Add - ONNX Add node is defined as: - $C = A + B$ - - The translation defines the interactions: - $Add(c_i, b_i) ~ a_i$ - - By definition this interaction is equal to: - $c_i = a_i + b_i$ - - By grouping the operations we get: - $C = A + B$ - -*** Sub - ONNX Sub node is defined as: - $C = A - B$ - - The translation defines the interactions: - $Add(c_i, neg_b_i) ~ a_i$ - $Mul(neg_b_i, Concrete(-1)) ~ b_i$ - - By definition this interaction is equal to: - $c_i = a_i + neg_b_i$ - $neg_b_i = -1 * b_i$ - - By grouping the operations we get: - $C = A - B$ - -** Soundness of Interaction Rules -*** Materialize - The Materialize agent transforms a Linear agent into a tree of explicit mathematical operations - that are used as final representation for the solver. - In the Python module the terms are defined as: - @code python - def TermAdd(a, b): - return a + b - def TermMul(a, b): - return a * b - def TermReLU(x): - return z3.If(x > 0, x, 0) - @end -**** $Linear(x, q, r) >< Materialize(out) => (1), (2), (3), (4), (5)$ - LHS: - $Linear(x, q, r) ~ wire$ - $Materialize(out) ~ wire$ - $q*x + r = wire$ - $out = wire$ - $out = q*x + r$ - - $$ Case 1: $q = 0 => out ~ Concrete(r), x ~ Eraser$ - RHS: - $out = r$ - - EQUIVALENCE: - $0*x + r = r => r = r$ - $$ - - $$ Case 2: $q = 1, r = 0 => out ~ x$ - RHS: - $x = out$ - $out = x$ - - EQUIVALENCE: - $1*x + 0 = x => x = x$ - $$ - - $$ Case 3: $q = 1 => out ~ TermAdd(x, Concrete(r))$ - RHS: - $out = x + r$ - - EQUIVALENCE: - $1*x + r = x + r => x + r = x + r$ - $$ - - $$ Case 4: $r = 0 => out ~ TermMul(Concrete(q), x)$ - RHS: - $out = q*x$ - - EQUIVALENCE: - $q*x + 0 = q*x => q*x = q*x$ - $$ - - $$ Case 5: $otherwise => out ~ TermAdd(TermMul(Concrete(q), x), Concrete(r))$ - RHS: - $out = q*x + r$ - - EQUIVALENCE: - $q*x + r = q*x + r$ - $$ - -**** $Concrete(k) >< Materialize(out) => out ~ Concrete(k)$ - LHS: - $Concrete(k) ~ wire$ - $Materialize(out) ~ wire$ - $k = wire$ - $out = wire$ - $out = k$ - - RHS: - $out = k$ - - EQUIVALENCE: - $k = k$ - -*** Add -**** $Linear(x, q, r) >< Add(out, b) => b ~ AddCheckLinear(out, x, q, r)$ - LHS: - $Linear(x, q, r) ~ wire$ - $Add(out, b) ~ wire$ - $q*x + r = wire$ - $out = wire + b$ - $out = q*x + r + b$ - - RHS: - $out = q*x + (r + b)$ - - EQUIVALENCE: - $q*x + r + b = q*x + (r + b) => q*x + (r + b) = q*x + (r + b)$ - -**** $Concrete(k) >< Add(out, b) => (1), (2)$ - LHS: - $Concrete(k) ~ wire$ - $Add(out, b) ~ wire$ - $k = wire$ - $out = wire + b$ - $out = k + b$ - - $$ Case 1: $k = 0 => out ~ b$ - RHS: - $out = b$ - - EQUIVALENCE: - $0 + b = b => b = b$ - $$ - - $$ Case 2: $otherwise => b ~ AddCheckConcrete(out, k)$ - RHS: - $out = k + b$ - - EQUIVALENCE: - $k + b = k + b$ - $$ - -**** $Linear(y, s, t) >< AddCheckLinear(out, x, q, r) => (1), (2), (3), (4)$ - LHS: - $Linear(y, s, t) ~ wire$ - $AddCheckLinear(out, x, q, r) ~ wire$ - $s*y + t = wire$ - $out = q*x + (r + wire)$ - $out = q*x + (r + s*y + t)$ - - $$ Case 1: $q,r,s,t = 0 => out ~ Concrete(0), x ~ Eraser, y ~ Eraser$ - RHS: - $out = 0$ - - EQUIVALENCE: - $0*x + (0 + 0*y + 0) = 0 => 0 = 0$ - $$ - - $$ Case 2: $s,t = 0 => out ~ Linear(x, q, r), y ~ Eraser$ - RHS: - $out = q*x + r$ - - EQUIVALENCE: - $q*x + (r + 0*y + 0) = q*x + r => q*x + r = q*x + r$ - $$ - - $$ Case 3: $q, r = 0 => out ~ Linear(y, s, t), x ~ Eraser$ - RHS: - $out = s*y + t$ - - EQUIVALENCE: - $0*x + (0 + s*y + t) = s*y + t => s*y + t = s*y + t$ - $$ - - $$ Case 4: $otherwise => Linear(x, q, r) ~ Materialize(out_x), Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermAdd(out_x, out_y), 1, 0)$ - RHS: - $Linear(x, q, r) ~ wire1$ - $Materialize(out_x) ~ wire1$ - $q*x + r = wire1$ - $out_x = wire1$ - $Linear(y, s, t) ~ wire2$ - $Materialize(out_y) ~ wire2$ - $s*y + t = wire2$ - $out_y = wire2$ - $out = 1*TermAdd(out_x, out_y) + 0$ - Because $TermAdd(a, b)$ is defined as "a+b$: - $out = 1*(q*x + r + s*y + t) + 0$ - - EQUIVALENCE: - $q*x + (r + s*y + t) = 1*(q*x + r + s*y + t) + 0 => q*x + r + s*y + t = q*x + r + s*y + t - $$ - -**** $Concrete(j) >< AddCheckLinear(out, x, q, r) => out ~ Linear(x, q, r + j)$ - LHS: - $Concrete(j) ~ wire$ - $AddCheckLinear(out, x, q, r) ~ wire$ - $j = wire$ - $out = q*x + (r + wire)$ - $out = q*x + (r + j)$ - - RHS: - $out = q*x + (r + j)$ - - EQUIVALENCE: - $q*x + (r + j) = q*x + (r + j)$ - -**** $Linear(y, s, t) >< AddCheckConcrete(out, k) => out ~ Linear(y, s, t + k)$ - LHS: - $Linear(y, s, t) ~ wire$ - $AddCheckConcrete(out, k) ~ wire$ - $s*y + t = wire$ - $out = k + wire$ - $out = k + s*y + t$ - - RHS: - $out = s*y + (t + k)$ - - EQUIVALENCE: - $k + s*y + t = s*y + (t + k) => s*y + (t + k) = s*y + (t + k)$ - -**** $Concrete(j) >< AddCheckConcrete(out, k) => (1), (2)$ - LHS: - $Concrete(j) ~ wire$ - $AddCheckConcrete(out, k) ~ wire$ - $j = wire$ - $out = k + wire$ - $out = k + j$ - - $$ Case 1: $j = 0 => out ~ Concrete(k)$ - RHS: - $out = k$ - - EQUIVALENCE: - $k + 0 = k => k = k$ - $$ - - $$ Case 2: $otherwise => out ~ Concrete(k + j)$ - RHS: - $out = k + j$ - - EQUIVALENCE: - $k + j = k + j$ - $$ - -*** Mul -**** $Linear(x, q, r) >< Mul(out, b) => b ~ MulCheckLinear(out, x, q, r)$ - LHS: - $Linear(x, q, r) ~ wire$ - $Mul(out, b) ~ wire$ - $q*x + r = wire$ - $out = wire * b$ - $out = (q*x + r) * b$ - - RHS: - $out = q*b*x + r*b$ - - EQUIVALENCE: - $(q*x + r) * b = q*b*x + r*b => q*b*x + r*b = q*b*x + r*b$ - -**** $Concrete(k) >< Mul(out, b) => (1), (2), (3)$ - LHS: - $Concrete(k) ~ wire$ - $Mul(out, b) ~ wire$ - $k = wire$ - $out = wire * b$ - $out = k * b$ - - $$ Case 1: $k = 0 => out ~ Concrete(0), b ~ Eraser$ - RHS: - $out = 0$ - - EQUIVALENCE: - $0 * b = 0 => 0 = 0$ - $$ - - $$ Case 2: $k = 1 => out ~ b$ - RHS: - $out = b$ - - EQUIVALENCE: - $1 * b = b => b = b$ - $$ - - $$ Case 3: $otherwise => b ~ MulCheckConcrete(out, k)$ - RHS: - $out = k * b$ - - EQUIVALENCE: - $k * b = k * b$ - $$ - -**** $Linear(y, s, t) >< MulCheckLinear(out, x, q, r) => (1), (2)$ - LHS: - $Linear(y, s, t) ~ wire$ - $MulCheckLinear(out, x, q, r) ~ wire$ - $s*y + t = wire$ - $out = q*wire*x + r*wire$ - $out = q*(s*y + t)*x + r*(s*y + t)$ - - $$ Case 1: $(q,r = 0) or (s,t = 0) => x ~ Eraser, y ~ Eraser, out ~ Concrete(0)$ - RHS: - $out = 0$ - - EQUIVALENCE: - $0*(s*y + t)*x + 0*(s*y + t) = 0 => 0 = 0$ - or - $q*(0*y + 0)*x + r*(0*y + 0) = 0 => 0 = 0$ - $$ - - $$ Case 2: $otherwise => Linear(x, q, r) ~ Materialize(out_x), Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermMul(out_x, out_y), 1, 0)$ - RHS: - $Linear(x, q, r) ~ wire1$ - $Materialize(out_x) ~ wire1$ - $q*x + r = wire1$ - $out_x = wire1$ - $Linear(y, s, t) ~ wire2$ - $Materialize(out_y) ~ wire2$ - $s*y + t = wire2$ - $out_y = wire2$ - $out = 1*TermMul(out_x, out_y) + 0$ - Because $TermMul(a, b)$ is defined as $a*b$: - $out = 1*(q*x + r)*(s*y + t) + 0$ - - EQUIVALENCE: - $q*(s*y + t)*x + r*(s*y + t) = 1*(q*x + r)*(s*y + t) => - q*(s*y + t)*x + r*(s*y + t) = (q*x + r)*(s*y + t) => - q*(s*y + t)*x + r*(s*y + t) = q*(s*y + t)*x + r*(s*y + t)$ - $$ - -**** $Concrete(j) >< MulCheckLinear(out, x, q, r) => out ~ Linear(x, q * j, r * j)$ - LHS: - $Concrete(j) ~ wire$ - $MulCheckLinear(out, x, q, r) ~ wire$ - $j = wire$ - $out = q*wire*x + r*wire$ - $out = q*j*x + r*j$ - - RHS: - $out = q*j*x + r*j$ - - EQUIVALENCE: - $q*j*x + r*j = q*j*x + r*j$ - -**** $Linear(y, s, t) >< MulCheckConcrete(out, k) => out ~ Linear(y, s * k, t * k)$ - LHS: - $Linear(y, s, t) ~ wire$ - $MulCheckConcrete(out, k) ~ wire$ - $s*y + t = wire$ - $out = k * wire$ - $out = k * (s*y + t)$ - - RHS: - $out = s*k*y + t*k$ - - EQUIVALENCE: - $k * (s*y + t) = s*k*y + t*k => s*k*y + t*k = s*k*y + t*k$ - -**** $Concrete(j) >< MulCheckConcrete(out, k) => (1), (2), (3)$ - LHS: - $Concrete(j) ~ wire$ - $MulCheckConcrete(out, k) ~ wire$ - $j = wire$ - $out = k * wire$ - $out = k * j$ - - $$ Case 1: $j = 0 => out ~ Concrete(0)$ - RHS: - $out = 0$ - - EQUIVALENCE: - $k * 0 = 0 => 0 = 0$ - $$ - - $$ Case 2: $j = 1 => out ~ Concrete(k)$ - RHS: - $out = k$ - - EQUIVALENCE: - $k * 1 = k => k = k$ - $$ - - $$ Case 3: $otherwise => out ~ Concrete(k * j)$ - RHS: - $out = k * j$ - - EQUIVALENCE: - $k * j = k * j$ - -*** ReLU -**** $Linear(x, q, r) >< ReLU(out) => Linear(x, q, r) ~ Materialize(out_x), out ~ Linear(TermReLU(out_x), 1, 0)$ - LHS: - $Linear(x, q, r) ~ wire$ - $ReLU(out) ~ wire$ - $q*x + r = wire$ - $out = IF wire > 0 THEN wire ELSE 0$ - $out = IF (q*x + r) > 0 THEN (q*x + r) ELSE 0$ - - RHS: - $Linear(x, q, r) ~ wire$ - $Materialize(out_x) ~ wire$ - $q*x + r = wire$ - $out_x = wire$ - $out = 1*TermReLU(out_x) + 0$ - Because $TermReLU(x)$ is defined as $z3.If(x > 0, x, 0)$: - $out = 1*(IF (q*x + r) > 0 THEN (q*x + r) ELSE 0) + 0$ - - EQUIVALENCE: - $IF (q*x + r) > 0 THEN (q*x + r) ELSE 0 = 1*(IF (q*x + r) > 0 THEN (q*x + r) ELSE 0) + 0 => - IF (q*x + r) > 0 THEN (q*x + r) ELSE 0 = IF (q*x + r) > 0 THEN (q*x + r) ELSE 0$ - -**** $Concrete(k) >< ReLU(out) => (1), (2)$ - LHS: - $Concrete(k) ~ wire$ - $ReLU(out) ~ wire$ - $k = wire$ - $out = IF wire > 0 THEN wire ELSE 0$ - $out = IF k > 0 THEN k ELSE 0$ - - $$ Case 1: $k > 0 => out ~ Concrete(k)$ - RHS: - $out = k$ - - EQUIVALENCE: - $IF true THEN k ELSE 0 = k => k = k$ - $$ - - $$ Case 2: $k <= 0 => out ~ Concrete(0)$ - RHS: - $out = 0$ - - EQUIVALENCE: - $IF false THEN k ELSE 0 = 0 => 0 = 0$ - $$ - -** Soundness of Reduction - Let $IN_0$ be the Interaction Net translated from a Neural Network $NN$. Let $IN_n$ be the state of the net - after $n$ reduction steps. Then $\forall n \in N$, $[IN_n] = [NN]$. - -*** Proof by Induction - - Base Case ($n = 0$): By the {* Soundness of Translation}, the initial net $IN_0$ is constructed such that - its semantics $[IN_0]$ exactly match the mathematical definition of the ONNX nodes in $NN$. - - Induction Step ($n -> n + 1$): Assume $[IN_n] = [NN]$. If $IN_n$ is in normal form, the proof is complete. - Otherwise, there exists an active pair $A$ that reduces $IN_n$ to $IN_{n+1}$. - By the {* Soundness of Interaction Rules}, the mathematical definition is preserved after any reduction step, - it follows that $[IN_{n+1}] = [IN_n]$. By the inductive hypothesis, $[IN_{n+1}] = [NN]$. - --- - - By the principle of mathematical induction, the Interaction Net remains semantically equivalent to the original - Neural Network at every step of the reduction process. - - Since Interaction Nets are confluent, the reduced mathematical expression is unique regardless - of order in which rules are applied. diff --git a/vein.py b/vein.py new file mode 100644 index 0000000..1aa86ae --- /dev/null +++ b/vein.py @@ -0,0 +1,395 @@ +# VErification via interaction Nets (VEIN) +# Copyright (C) 2026 Eric Marin +# +# This program is free software: you can redistribute it and/or modify it under the terms of the +# GNU Affero General Public License as published by the Free Software Foundation, either version 3 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License along with this program. +# If not, see . + +import z3 +import re +import numpy as np +import subprocess +import onnx +import onnx.shape_inference +from onnx import numpy_helper +from typing import List, Dict, Optional, Tuple +import os +import tempfile +import hashlib + +rules = """ + Linear(x, float q, float r) >< Add(out, b) => b ~ AddCheckLinear(out, x, q, r); + Concrete(float k) >< Add(out, b) + | k == 0 => out ~ b + | _ => b ~ AddCheckConcrete(out, k); + Linear(y, float s, float t) >< AddCheckLinear(out, x, float q, float r) + | (q == 0) && (r == 0) && (s == 0) && (t == 0) => out ~ Concrete(0), x ~ Eraser, y ~ Eraser + | (s == 0) && (t == 0) => out ~ Linear(x, q, r), y ~ Eraser + | (q == 0) && (r == 0) => out ~ (*L)Linear(y, s, t), x ~ Eraser + | _ => Linear(x, q, r) ~ Materialize(out_x), (*L)Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermAdd(out_x, out_y), 1, 0); + Concrete(float j) >< AddCheckLinear(out, x, float q, float r) => out ~ Linear(x, q, r + j); + Linear(y, float s, float t) >< AddCheckConcrete(out, float k) => out ~ Linear(y, s, t + k); + Concrete(float j) >< AddCheckConcrete(out, float k) + | j == 0 => out ~ Concrete(k) + | _ => out ~ Concrete(k + j); + Linear(x, float q, float r) >< Mul(out, b) => b ~ MulCheckLinear(out, x, q, r); + Concrete(float k) >< Mul(out, b) + | k == 0 => b ~ Eraser, out ~ (*L)Concrete(0) + | k == 1 => out ~ b + | _ => b ~ MulCheckConcrete(out, k); + Linear(y, float s, float t) >< MulCheckLinear(out, x, float q, float r) + | ((q == 0) && (r == 0)) || ((s == 0) && (t == 0)) => out ~ Concrete(0), x ~ Eraser, y ~ Eraser + | _ => Linear(x, q, r) ~ Materialize(out_x), (*L)Linear(y, s, t) ~ Materialize(out_y), out ~ Linear(TermMul(out_x, out_y), 1, 0); + Concrete(float j) >< MulCheckLinear(out, x, float q, float r) => out ~ Linear(x, q * j, r * j); + Linear(y, float s, float t) >< MulCheckConcrete(out, float k) => out ~ Linear(y, s * k, t * k); + Concrete(float j) >< MulCheckConcrete(out, float k) + | j == 0 => out ~ Concrete(0) + | j == 1 => out ~ Concrete(k) + | _ => out ~ Concrete(k * j); + Linear(x, float q, float r) >< ReLU(out) => (*L)Linear(x, q, r) ~ Materialize(out_x), out ~ Linear(TermReLU(out_x), 1, 0); + Concrete(float k) >< ReLU(out) + | k > 0 => out ~ (*L)Concrete(k) + | _ => out ~ Concrete(0); + Linear(x, float q, float r) >< Materialize(out) + | (q == 0) => out ~ Concrete(r), x ~ Eraser + | (q == 1) && (r == 0) => out ~ x + | (q == 1) && (r != 0) => out ~ TermAdd(x, Concrete(r)) + | (q != 0) && (r == 0) => out ~ TermMul(Concrete(q), x) + | _ => out ~ TermAdd(TermMul(Concrete(q), x), Concrete(r)); + Concrete(float k) >< Materialize(out) => out ~ (*L)Concrete(k); +""" + +_INPLA_CACHE: Dict[Tuple[str, Optional[Tuple]], str] = {} + +def inpla_export(model: onnx.ModelProto, bounds: Optional[Dict[str, List[float]]] = None) -> str: + # TODO: Add Range agent + _ = bounds + class NameGen: + def __init__(self, prefix="v"): + self.counter = 0 + self.prefix = prefix + def next(self) -> str: + name = f"{self.prefix}{self.counter}" + self.counter += 1 + return name + + def get_initializers(graph) -> Dict[str, np.ndarray]: + initializers = {} + for init in graph.initializer: + initializers[init.name] = numpy_helper.to_array(init) + return initializers + + def get_attrs(node: onnx.NodeProto) -> Dict: + return {attr.name: onnx.helper.get_attribute_value(attr) for attr in node.attribute} + + def get_dim(name): + for i in list(graph.input) + list(graph.output) + list(graph.value_info): + if i.name == name: return i.type.tensor_type.shape.dim[-1].dim_value + return None + + def flatten_nest(agent_name: str, terms: List[str]) -> str: + if not terms: return "Eraser" + if len(terms) == 1: return terms[0] + current = terms[0] + for i in range(1, len(terms)): + wire = wire_gen.next() + script.append(f"{wire} ~ {agent_name}({current}, {terms[i]});") + current = wire + return current + + def balance_add(terms: List[str], sink: str): + if not terms: + script.append(f"{sink} ~ Eraser;") + return + if len(terms) == 1: + script.append(f"{sink} ~ {terms[0]};") + return + + nodes = terms + while len(nodes) > 1: + next_level = [] + for i in range(0, len(nodes), 2): + if i + 1 < len(nodes): + wire_out = wire_gen.next() + script.append(f"{nodes[i]} ~ Add({wire_out}, {nodes[i+1]});") + next_level.append(wire_out) + else: + next_level.append(nodes[i]) + nodes = next_level + script.append(f"{nodes[0]} ~ {sink};") + + def op_gemm(node, override_attrs=None): + attrs = override_attrs if override_attrs is not None else get_attrs(node) + + W = initializers[node.input[1]] + if not attrs.get("transB", 0): W = W.T + out_dim, in_dim = W.shape + + B = initializers[node.input[2]] if len(node.input) > 2 else np.zeros(out_dim) + alpha, beta = attrs.get("alpha", 1.0), attrs.get("beta", 1.0) + + if node.input[0] not in interactions: + interactions[node.input[0]] = [[] for _ in range(in_dim)] + + out_terms = interactions.get(node.output[0]) or [[f"Materialize(result{j})"] for j in range(out_dim)] + + for j in range(out_dim): + sink = flatten_nest("Dup", out_terms[j]) + neuron_terms = [] + for i in range(in_dim): + weight = float(alpha * W[j, i]) + if weight != 0: + v = wire_gen.next() + interactions[node.input[0]][i].append(f"Mul({v}, Concrete({weight}))") + neuron_terms.append(v) + + bias_val = float(beta * B[j]) + if bias_val != 0 or not neuron_terms: + neuron_terms.append(f"Concrete({bias_val})") + + balance_add(neuron_terms, sink) + + def op_matmul(node): + op_gemm(node, override_attrs={"alpha": 1.0, "beta": 0.0, "transB": 0}) + + def op_relu(node): + out_name, in_name = node.output[0], node.input[0] + dim = get_dim(out_name) or 1 + + if in_name not in interactions: + interactions[in_name] = [[] for _ in range(dim)] + + out_terms = interactions.get(node.output[0]) or [[f"Materialize(result{j})"] for j in range(dim)] + + for i in range(dim): + sink = flatten_nest("Dup", out_terms[i]) + v = wire_gen.next() + interactions[in_name][i].append(f"ReLU({v})") + script.append(f"{v} ~ {sink};") + + def op_flatten(node): + out_name, in_name = node.output[0], node.input[0] + if out_name in interactions: + interactions[in_name] = interactions[out_name] + + def op_reshape(node): + op_flatten(node) + + def op_add(node): + out_name = node.output[0] + in_a, in_b = node.input[0], node.input[1] + + dim = get_dim(out_name) or get_dim(in_a) or get_dim(in_b) or 1 + + if in_a not in interactions: interactions[in_a] = [[] for _ in range(dim)] + if in_b not in interactions: interactions[in_b] = [[] for _ in range(dim)] + + out_terms = interactions.get(out_name) or [[f"Materialize(result{j})"] for j in range(dim)] + + b_const = initializers.get(in_b) + a_const = initializers.get(in_a) + + for i in range(dim): + sink = flatten_nest("Dup", out_terms[i]) + if b_const is not None: + val = float(b_const.flatten()[i % b_const.size]) + interactions[in_a][i].append(f"Add({sink}, Concrete({val}))") + elif a_const is not None: + val = float(a_const.flatten()[i % a_const.size]) + interactions[in_b][i].append(f"Add({sink}, Concrete({val}))") + else: + v_b = wire_gen.next() + interactions[in_a][i].append(f"Add({sink}, {v_b})") + interactions[in_b][i].append(f"{v_b}") + + def op_sub(node): + out_name = node.output[0] + in_a, in_b = node.input[0], node.input[1] + + dim = get_dim(out_name) or get_dim(in_a) or get_dim(in_b) or 1 + + if out_name not in interactions: + interactions[out_name] = [[f"Materialize(result{i})"] for i in range(dim)] + + out_terms = interactions.get(out_name) or [[f"Materialize(result{j})"] for j in range(dim)] + + if in_a not in interactions: interactions[in_a] = [[] for _ in range(dim)] + if in_b not in interactions: interactions[in_b] = [[] for _ in range(dim)] + + b_const = initializers.get(in_b) + a_const = initializers.get(in_a) + + for i in range(dim): + sink = flatten_nest("Dup", out_terms[i]) + if b_const is not None: + val = float(b_const.flatten()[i % b_const.size]) + interactions[in_a][i].append(f"Add({sink}, Concrete({-val}))") + elif a_const is not None: + val = float(a_const.flatten()[i % a_const.size]) + interactions[in_b][i].append(f"Add({sink}, Concrete({-val}))") + else: + v_b = wire_gen.next() + interactions[in_a][i].append(f"Add({sink}, Mul({v_b}, Concrete(-1.0)))") + interactions[in_b][i].append(f"{v_b}") + + graph, initializers = model.graph, get_initializers(model.graph) + wire_gen = NameGen("w") + interactions: Dict[str, List[List[str]]] = {} + script = [] + ops = { + "Gemm": op_gemm, + "Relu": op_relu, + "Flatten": op_flatten, + "Reshape": op_reshape, + "MatMul": op_matmul, + "Add": op_add, + "Sub": op_sub + } + + if graph.output: + out = graph.output[0].name + dim = get_dim(out) + if dim: + interactions[out] = [[f"Materialize(result{i})"] for i in range(dim)] + + for node in reversed(graph.node): + if node.op_type in ops: + ops[node.op_type](node) + else: + raise RuntimeError(f"Unsupported ONNX operator: {node.op_type}") + + if graph.input: + input = "input" if "input" in interactions else graph.input[0].name + for i, terms in enumerate(interactions[input]): + sink = flatten_nest("Dup", terms) + script.append(f"{sink} ~ Linear(Symbolic(X_{i}), 1.0, 0.0);") + + result_lines = [f"result{i};" for i in range(len(interactions.get(graph.output[0].name, [])))] + return "\n".join(script + result_lines) + + +def inpla_run(model: str) -> str: + with tempfile.NamedTemporaryFile(mode="w", suffix=".inpla", delete=False) as f: + f.write(f"{rules}\n{model}") + temp_path = f.name + try: + res = subprocess.run(["./inpla", "-f", temp_path, "-foptimise-tail-calls"], capture_output=True, text=True) + if res.stderr: print(res.stderr) + return res.stdout + finally: + if os.path.exists(temp_path): + os.remove(temp_path) + + +def z3_evaluate(model: str, X): + def Symbolic(id): + if id not in X: + X[id] = z3.Real(id) + return X[id] + def Concrete(val): return z3.RealVal(val) + def TermAdd(a, b): return a + b + def TermMul(a, b): return a * b + def TermReLU(x): return z3.If(x > 0, x, 0) + + context = { + 'Concrete': Concrete, + 'Symbolic': Symbolic, + 'TermAdd': TermAdd, + 'TermMul': TermMul, + 'TermReLU': TermReLU + } + lines = [line.strip() for line in model.splitlines() if line.strip()] + + def iterative_eval(expr_str): + tokens = re.findall(r'\(|\)|\,|[^(),\s]+', expr_str) + stack = [[]] + for token in tokens: + if token == '(': + stack.append([]) + elif token == ')': + args = stack.pop() + func_name = stack[-1].pop() + func = context.get(func_name) + if not func: + raise ValueError(f"Unknown function: {func_name}") + stack[-1].append(func(*args)) + elif token == ',': + continue + else: + if token in context: + stack[-1].append(token) + else: + try: + stack[-1].append(float(token)) + except ValueError: + stack[-1].append(token) + return stack[0][0] + + exprs = [iterative_eval(line) for line in lines] + return exprs + +def net(model: onnx.ModelProto, X, bounds: Optional[Dict[str, List[float]]] = None): + model_hash = hashlib.sha256(model.SerializeToString()).hexdigest() + bounds_key = tuple(sorted((k, tuple(v)) for k, v in bounds.items())) if bounds else None + cache_key = (model_hash, bounds_key) + + if cache_key not in _INPLA_CACHE: + exported = inpla_export(model, bounds) + reduced = inpla_run(exported) + _INPLA_CACHE[cache_key] = reduced + + res = z3_evaluate(_INPLA_CACHE[cache_key], X) + return res if res is not None else [] + +class Solver(z3.Solver): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.X = {} + self.Y = {} + self.bounds: Dict[str, List[float]] = {} + self.pending_nets: List[Tuple[onnx.ModelProto, Optional[str]]] = [] + + def load_vnnlib(self, file_path): + with open(file_path, "r") as f: + content = f.read() + + for match in re.finditer(r"\(assert\s+\((>=|<=)\s+(X_\d+)\s+([-+]?\d*\.?\d+(?:[eE][-+]?\d+)?)\)\)", content): + op, var, val = match.groups() + val = float(val) + if var not in self.bounds: self.bounds[var] = [float('-inf'), float('inf')] + if op == ">=": self.bounds[var][0] = val + else: self.bounds[var][1] = val + + content = re.sub(r"\(vnnlib-version.*?\)", "", content) + assertions = z3.parse_smt2_string(content) + self.add(assertions) + + def load_onnx(self, file, name=None): + model = onnx.load(file) + model = onnx.shape_inference.infer_shapes(model) + self.pending_nets.append((model, name)) + + def _process_nets(self): + y_count = 0 + for model, name in self.pending_nets: + z3_outputs = net(model, self.X, bounds=self.bounds) + if z3_outputs: + for _, out_expr in enumerate(z3_outputs): + y_var = z3.Real(f"Y_{y_count}") + self.add(y_var == out_expr) + if name: + if name not in self.Y: self.Y[name] = [] + self.Y[name].append(out_expr) + y_count += 1 + self.pending_nets = [] + + def check(self, *args): + self._process_nets() + return super().check(*args) -- cgit v1.2.3