Skip to content

Commit

Permalink
OMR_AVS 002 commandd line based prover
Browse files Browse the repository at this point in the history
  • Loading branch information
opnun-z committed May 8, 2024
1 parent 8f5d371 commit 9e831b1
Show file tree
Hide file tree
Showing 20 changed files with 1,735 additions and 75 deletions.
Binary file modified .DS_Store
Binary file not shown.
8 changes: 3 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,7 @@ logs.txt
# just for example
id_rsa

*/python/venv
.venv
__pycache__

operator/python/*.ezkl
operator/python/*.json
operator/python/*.pk
operator/python/*.vk
.DS_Store
1 change: 1 addition & 0 deletions contracts/abi/ZKVerifier.abi.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[{"type":"function","name":"verifyProof","inputs":[{"internalType":"bytes","name":"proof","type":"bytes"},{"internalType":"uint256[]","name":"instances","type":"uint256[]"}],"outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable"}]
1,522 changes: 1,522 additions & 0 deletions contracts/src/ZKVerifier.sol

Large diffs are not rendered by default.

Empty file added python/__init__.py
Empty file.
9 changes: 9 additions & 0 deletions python/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import torch
from torch import nn

class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()

def forward(self, x):
return [torch.max(x)]
11 changes: 11 additions & 0 deletions python/model_data/cal_data.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"input_data": [
[
1.0,
10.0,
1.0,
1.0,
2.0
]
]
}
Binary file added python/model_data/network.ezkl
Binary file not shown.
File renamed without changes.
73 changes: 73 additions & 0 deletions python/model_data/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"run_args": {
"tolerance": {
"val": 0.0,
"scale": 1.0
},
"input_scale": 2,
"param_scale": 2,
"scale_rebase_multiplier": 10,
"lookup_range": [
0,
64
],
"logrows": 7,
"num_inner_cols": 2,
"variables": [
[
"batch_size",
1
]
],
"input_visibility": "Public",
"output_visibility": "Public",
"param_visibility": "Fixed",
"div_rebasing": false,
"rebase_frac_zero_constants": false,
"check_mode": "UNSAFE",
"commitment": "KZG"
},
"num_rows": 14,
"total_assignments": 28,
"total_const_size": 2,
"total_dynamic_col_size": 0,
"num_dynamic_lookups": 0,
"num_shuffles": 1,
"total_shuffle_col_size": 5,
"model_instance_shapes": [
[
1,
5
],
[
1
]
],
"model_output_scales": [
2
],
"model_input_scales": [
2
],
"module_sizes": {
"polycommit": [],
"poseidon": [
0,
[
0
]
]
},
"required_lookups": [
{
"GreaterThanEqual": {
"a": 0.0
}
}
],
"required_range_checks": [],
"check_mode": "UNSAFE",
"version": "10.4.2",
"num_blinding_factors": null,
"timestamp": 1715191033368
}
Binary file added python/model_data/test.pk
Binary file not shown.
Binary file added python/model_data/test.vk
Binary file not shown.
1 change: 1 addition & 0 deletions python/proof/inputs.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"input_data": [[0.4000000059604645, 2.0, 3.0, 3.0, 4.0]]}
1 change: 1 addition & 0 deletions python/proof/proof.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions python/proof/witness.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"inputs":[["0200000000000000000000000000000000000000000000000000000000000000","0800000000000000000000000000000000000000000000000000000000000000","0c00000000000000000000000000000000000000000000000000000000000000","0c00000000000000000000000000000000000000000000000000000000000000","1000000000000000000000000000000000000000000000000000000000000000"]],"pretty_elements":{"rescaled_inputs":[["0.5","2","3","3","4"]],"inputs":[["0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000008","0x000000000000000000000000000000000000000000000000000000000000000c","0x000000000000000000000000000000000000000000000000000000000000000c","0x0000000000000000000000000000000000000000000000000000000000000010"]],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["4"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000010"]]},"outputs":[["1000000000000000000000000000000000000000000000000000000000000000"]],"processed_inputs":null,"processed_params":null,"processed_outputs":null,"max_lookup_inputs":6,"min_lookup_inputs":0,"max_range_size":0}
68 changes: 68 additions & 0 deletions python/prove.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import ezkl
import os
import argparse
from utils import relative_file_path
import json
import torch
from torch.autograd import Variable

parser = argparse.ArgumentParser(
prog='Omron AVS ezkl operator engine')

parser.add_argument('-i','--input', nargs='+', help='input data to run on', required=True)

args = parser.parse_args()

input = Variable(torch.Tensor([float(i) for i in args.input]))

data_array = ((input).detach().numpy()).reshape([-1]).tolist()

data_path = relative_file_path("proof/inputs.json")
witness_path = relative_file_path("proof/witness.json")
compiled_model_path = relative_file_path("model_data/network.ezkl")
pk_path = relative_file_path("model_data/test.pk")
vk_path = relative_file_path("model_data/test.vk")
settings_path = relative_file_path("model_data/settings.json")
proof_path = relative_file_path('proof/proof.json')

# Serialize data into file:
data = dict(input_data = [data_array])
json.dump(data, open(data_path, 'w'))

# Generate the Witness for the proof
ezkl.gen_witness(data_path, compiled_model_path, witness_path)

# Generate the proof
proof = ezkl.prove(
witness_path,
compiled_model_path,
pk_path,
proof_path,
"single",
)
assert ezkl.verify(
proof_path,
settings_path,
vk_path,
) == True

onchain_input_array = []

# using a loop
# avoiding printing last comma
formatted_output = "["
for i, value in enumerate(proof["instances"]):
for j, field_element in enumerate(value):
onchain_input_array.append(ezkl.felt_to_big_endian(field_element))
formatted_output += '"' + str(onchain_input_array[-1]) + '"'
if j != len(value) - 1:
formatted_output += ", "
if i != len(proof["instances"]) - 1:
formatted_output += ", "
formatted_output += "]"

# This will be the values you use onchain
# copy them over to remix and see if they verify
# What happens when you change a value?
print("Inputs: ", formatted_output)
print("Proof: ", proof["proof"])
Empty file added python/scripts/__init__.py
Empty file.
20 changes: 20 additions & 0 deletions python/scripts/create_sol_verifier.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import ezkl
import os

vk_path = os.path.join("./model_data/test.vk")
settings_path = os.path.join("./model_data/settings.json")

sol_code_path = os.path.join('../contracts/src/ZKVerifier.sol')
abi_path = os.path.join('../contracts/abi/ZKVerifier.abi.json')

print ("INFO: Generating solidity contract")

res = ezkl.create_evm_verifier(
vk_path,
settings_path,
sol_code_path,
abi_path
)

print("SUCCESS: Solidity contract generated at", sol_code_path)
print("SUCCESS: Solidity contract abi now at", abi_path)
90 changes: 20 additions & 70 deletions operator/python/main.py → python/scripts/setup.py
Original file line number Diff line number Diff line change
@@ -1,55 +1,37 @@
from torch import nn
from torch.autograd import Variable
import ezkl
import torch
import os
import argparse
import numpy as np
from torch.autograd import Variable
import json
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))

parser = argparse.ArgumentParser(
prog='Omron AVS ezkl runner')
parser.add_argument('-i', '--input')

args = parser.parse_args()

print (args.input)



class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()

def forward(self, x):
return [torch.max(x)]

from utils import relative_file_path
from model import Model

model = Model()

# Specify all the files we need
model_path = relative_file_path('model_data/network.onnx')
compiled_model_path = relative_file_path('model_data/network.ezkl')
pk_path = relative_file_path('model_data/test.pk')
vk_path = relative_file_path('model_data/test.vk')
settings_path = relative_file_path('model_data/settings.json')

model_path = os.path.join('network.onnx')
compiled_model_path = os.path.join('network.ezkl')
pk_path = os.path.join('test.pk')
vk_path = os.path.join('test.vk')
settings_path = os.path.join('settings.json')

witness_path = os.path.join('witness.json')
data_path = os.path.join('input.json')
cal_data_path = os.path.join('cal_data.json')
witness_path = relative_file_path('model_data/witness.json')
data_path = relative_file_path('model_data/input.json')
cal_data_path = relative_file_path('model_data/cal_data.json')

# Flips the neural net into inference mode
model.eval()

example_input = Variable(torch.Tensor([1,567,1,1,2]))
example_input = Variable(torch.Tensor([1,10,1,1,2]))

x = example_input.reshape(1, 5)
example_input = example_input.reshape(1, 5)


# Export the model
torch.onnx.export(model, # model being run
x, # model input (or a tuple for multiple inputs)
example_input, # model input (or a tuple for multiple inputs)
model_path, # where to save the model (can be a file or file-like object)
export_params=True, # store the trained parameter weights inside the model file
opset_version=10, # the ONNX version to export the model to
Expand All @@ -59,13 +41,9 @@ def forward(self, x):
dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes
'output' : {0 : 'batch_size'}})

data_array = ((x).detach().numpy()).reshape([-1]).tolist()

data = dict(input_data = [data_array])

# Serialize data into file:
json.dump(data, open(data_path, 'w'))
data_array = ((example_input).detach().numpy()).reshape([-1]).tolist()

# Edit settings
py_run_args = ezkl.PyRunArgs()
py_run_args.input_visibility = "public"
py_run_args.output_visibility = "public"
Expand All @@ -77,7 +55,7 @@ def forward(self, x):
cal_data = dict(input_data = example_input.tolist())

# Serialize calibration data into file:
json.dump(data, open(cal_data_path, 'w'))
json.dump(cal_data, open(cal_data_path, 'w'))



Expand All @@ -94,32 +72,4 @@ def forward(self, x):
compiled_model_path,
vk_path,
pk_path,
)

# Generate the Witness for the proof

# now generate the witness file
witness_path = os.path.join('witness.json')

res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)

# Generate the proof

proof_path = os.path.join('proof.json')

proof = ezkl.prove(
witness_path,
compiled_model_path,
pk_path,
proof_path,
"single",
)

print(proof)

res = ezkl.verify(
proof_path,
settings_path,
vk_path,
)
print("res", res)
)
5 changes: 5 additions & 0 deletions python/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import os

# Makes sure the files will end up in the same place relative to the base python path
def relative_file_path (file_path : str) :
return os.path.join(os.path.dirname(__file__), file_path)

0 comments on commit 9e831b1

Please sign in to comment.