Coretex
inference.py
1 # Copyright (C) 2023 Coretex LLC
2 
3 # This file is part of Coretex.ai
4 
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License as
7 # published by the Free Software Foundation, either version 3 of the
8 # License, or (at your option) any later version.
9 
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU Affero General Public License for more details.
14 
15 # You should have received a copy of the GNU Affero General Public License
16 # along with this program. If not, see <https://www.gnu.org/licenses/>.
17 
18 from typing import Tuple, Optional, Union
19 from pathlib import Path
20 
21 import json
22 import uuid
23 import asyncio
24 
25 from onnxruntime import InferenceSession
26 
27 import ezkl
28 import numpy as np
29 
30 from .._folder_manager import folder_manager
31 
32 
33 async def genWitness(inputPath: Path, circuit: Path, witnessPath: Path) -> None:
34  await ezkl.gen_witness(inputPath, circuit, witnessPath)
35 
36 
37 async def getSrs(settings: Path) -> None:
38  await ezkl.get_srs(settings)
39 
40 
41 def runOnnxInference(
42  data: np.ndarray,
43  onnxPath: Path,
44  compiledModelPath: Optional[Path] = None,
45  proveKey: Optional[Path] = None,
46  settingsPath: Optional[Path] = None,
47 ) -> Union[np.ndarray, Tuple[np.ndarray, Path]]:
48 
49  """
50  Performs inference on the provided onnx model with the provided data and also generates
51  a zero knowledge proof if a compiled model and key are passed.
52  This can be used to verify that the result was gained by
53  combining this specific model and input data.
54 
55  Parameters
56  ----------
57  data : ndarray
58  data which will be directly fed to the model
59  onnxPath : Path
60  path to the onnx model
61  settingsPath : Path
62  path to the settigs.json file
63  compiledModelPath : Optional[Path]
64  path to the compiled model
65  proveKey : Optional[Path]
66  path to the proving key file of the model
67 
68  Returns
69  -------
70  Union[np.ndarray, Tuple[np.ndarray, Path]]
71  output of the model or, if compiledModelPath and proveKey are passed, output of the model and path to the proof
72  """
73 
74  inferenceId = str(uuid.uuid1())
75 
76  session = InferenceSession(onnxPath)
77  inputName = session.get_inputs()[0].name
78  result = np.array(session.run(None, {inputName: data}))
79 
80  if compiledModelPath is None and proveKey is None and settingsPath is None:
81  return result
82 
83  if compiledModelPath is None or proveKey is None or settingsPath is None:
84  raise ValueError(f">> [Coretex] Parameters compiledModelPath, proveKey and settingsPath have to either all be passed (for verified inference) or none of them (for regular inference)")
85 
86  inferenceDir = folder_manager.createTempFolder(inferenceId)
87  witnessPath = inferenceDir / "witness.json"
88  inputPath = inferenceDir / "input.json"
89  proofPath = inferenceDir / "proof.pf"
90 
91  flattenedData = np.array(data).reshape(-1).tolist()
92  inputData = dict(input_data = [flattenedData])
93  with inputPath.open("w") as file:
94  json.dump(inputData, file)
95 
96  asyncio.run(genWitness(inputPath, compiledModelPath, witnessPath))
97  asyncio.run(getSrs(settingsPath))
98  ezkl.prove(
99  witnessPath,
100  compiledModelPath,
101  proveKey,
102  proofPath,
103  "single"
104  )
105 
106  return result, proofPath