UnityGiles commited on
Commit
d6bd9e7
Β·
1 Parent(s): 05abce4

update to inference engine

Browse files
README.md CHANGED
@@ -2,26 +2,26 @@
2
  license: mit
3
  library_name: unity-sentis
4
  pipeline_tag: image-classification
 
 
5
  ---
6
 
7
- ## MobileNet V2 in Unity Sentis Format (Version 1.4.0-pre.2*)
8
- *Version 1.3.0 Sentis files are not compatible with 1.4.0 and need to be recreated/downloaded
9
 
10
- This is a small image classification model that works in Unity 2023. It is based on [MobileNet V2](https://arxiv.org/abs/1801.04381)
 
 
11
 
12
  ## How to Use
13
- * Create a new scene in Unity 2023
14
- * Install `com.unity.sentis` version `1.4.0-pre.2` from the package manager
15
- * Add the C# script to the Main Camera
16
- * Drag the `mobilenet_v2.sentis` model onto the `modelAsset `field
17
- * Drag the `class_desc.txt` on to the `labelsAsset` field
18
- * Drag one of the sample images on to the inputImage field in the inspector.
19
- * Press play and the result of the prediction will print to the console window.
20
 
21
- # Example input
22
- ![bee](Images/Bee.jpg)
23
- # Example output
24
- `Bee (100%)`
 
 
 
 
 
25
 
26
- ## Unity Sentis
27
- Unity Sentis is the inference engine for Unity 3D. You can find more about it [here](https://unity.com/products/sentis)
 
2
  license: mit
3
  library_name: unity-sentis
4
  pipeline_tag: image-classification
5
+ tags:
6
+ - unity-inference-engine
7
  ---
8
 
 
 
9
 
10
+ # MobileNet V2 in Unity 6 with Inference Engine
11
+
12
+ This is the [MobileNet V2](https://arxiv.org/abs/1801.04381) model running in Unity 6 with Inference Engine. This is a small image classification model.
13
 
14
  ## How to Use
 
 
 
 
 
 
 
15
 
16
+ * Create a new scene in Unity 6;
17
+ * Install `com.unity.ai.inference` from the package manager;
18
+ * Add the `RunMobileNet.cs` script to the Main Camera;
19
+ * Drag the `mobilenet_v2.onnx` asset from the `models` folder into the `Model Asset` field;
20
+ * Drag the `class_desc.txt` asset from the `data` folder into the `Labels Asset` field;
21
+ * Drag an image, such as `Bee.jpg` asset from the `images` folder into the `Input Image` field;
22
+
23
+ ## Preview
24
+ Enter play mode. If working correctly the predicted class will be logged to the console.
25
 
26
+ ## Inference Engine
27
+ Inference Engine is a neural network inference library for Unity. Find out more [here](https://docs.unity3d.com/Packages/com.unity.ai.inference@latest).
RunMobileNet.cs CHANGED
@@ -1,71 +1,50 @@
1
- using System.Collections.Generic;
2
- using Unity.Sentis;
3
  using UnityEngine;
4
- using System.IO;
5
- using FF = Unity.Sentis.Functional;
6
- /*
7
- * MovileNetV2 Inference Script
8
- * ============================
9
- *
10
- * Place this script on the Main Camera
11
- *
12
- * Drag an image to the inputImage field
13
- *
14
- * When run the prediction of what the image is will output to the console window.
15
- * You can modify the script to make it do something more interesting.
16
- *
17
- */
18
-
19
 
20
  public class RunMobileNet : MonoBehaviour
21
  {
22
- //draw the sentis file here:
23
  public ModelAsset modelAsset;
24
 
25
- const string modelName = "mobilenet_v2.sentis";
26
-
27
  //The image to classify here:
28
  public Texture2D inputImage;
29
 
30
  //Link class_desc.txt here:
31
  public TextAsset labelsAsset;
32
 
33
- //All images are resized to these values to go into the model
34
- const int imageHeight = 224;
35
- const int imageWidth = 224;
36
 
37
  const BackendType backend = BackendType.GPUCompute;
38
 
39
- private IWorker engine;
40
- private string[] labels;
41
 
42
  //Used to normalise the input RGB values
43
- TensorFloat mulRGB = new TensorFloat(new TensorShape(1, 3, 1, 1), new float[] { 1 / 0.229f, 1 / 0.224f, 1 / 0.225f });
44
- TensorFloat shiftRGB = new TensorFloat(new TensorShape(1, 3, 1, 1), new float[] { 0.485f, 0.456f, 0.406f });
45
 
46
  void Start()
47
  {
48
-
49
  //Parse neural net labels
50
  labels = labelsAsset.text.Split('\n');
51
 
52
- //Load model from file or asset
53
- //var model = ModelLoader.Load(Path.Join(Application.streamingAssetsPath, modelName));
54
  var model = ModelLoader.Load(modelAsset);
55
 
56
  //We modify the model to normalise the input RGB values and select the highest prediction
57
  //probability and item number
58
- var model2 = FF.Compile(
59
- input =>
60
- {
61
- var probability = model.Forward(NormaliseRGB(input))[0];
62
- return (FF.ReduceMax(probability, 1), FF.ArgMax(probability, 1));
63
- },
64
- model.inputs[0]
65
- );
66
-
67
- //Setup the engine to run the model
68
- engine = WorkerFactory.CreateWorker(backend, model2);
 
69
 
70
  //Execute inference
71
  ExecuteML();
@@ -74,20 +53,18 @@ public class RunMobileNet : MonoBehaviour
74
  public void ExecuteML()
75
  {
76
  //Preprocess image for input
77
- using var input = TextureConverter.ToTensor(inputImage, imageWidth, imageHeight, 3);
78
-
79
- //Execute neural net
80
- engine.Execute(input);
81
 
82
- //Read output tensor
83
- var probability = engine.PeekOutput("output_0") as TensorFloat;
84
- var item = engine.PeekOutput("output_1") as TensorInt;
85
- item.CompleteOperationsAndDownload();
86
- probability.CompleteOperationsAndDownload();
 
87
 
88
  //Select the best output class and print the results
89
- var ID = item[0];
90
- var accuracy = probability[0];
91
 
92
  //The result is output to the console window
93
  int percent = Mathf.FloorToInt(accuracy * 100f + 0.5f);
@@ -97,16 +74,11 @@ public class RunMobileNet : MonoBehaviour
97
  Resources.UnloadUnusedAssets();
98
  }
99
 
100
- //This scales and shifts the RGB values for input into the model
101
- FunctionalTensor NormaliseRGB(FunctionalTensor image)
102
- {
103
- return (image - FunctionalTensor.FromTensor(shiftRGB)) * FunctionalTensor.FromTensor(mulRGB);
104
- }
105
-
106
- private void OnDestroy()
107
  {
 
108
  mulRGB?.Dispose();
109
  shiftRGB?.Dispose();
110
- engine?.Dispose();
111
  }
112
  }
 
1
+ using Unity.InferenceEngine;
 
2
  using UnityEngine;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
  public class RunMobileNet : MonoBehaviour
5
  {
 
6
  public ModelAsset modelAsset;
7
 
 
 
8
  //The image to classify here:
9
  public Texture2D inputImage;
10
 
11
  //Link class_desc.txt here:
12
  public TextAsset labelsAsset;
13
 
14
+ //The input tensor
15
+ Tensor<float> input = new Tensor<float>(new TensorShape(1, 3, 224, 224));
 
16
 
17
  const BackendType backend = BackendType.GPUCompute;
18
 
19
+ Worker worker;
20
+ string[] labels;
21
 
22
  //Used to normalise the input RGB values
23
+ Tensor<float> mulRGB = new Tensor<float>(new TensorShape(1, 3, 1, 1), new[] { 1 / 0.229f, 1 / 0.224f, 1 / 0.225f });
24
+ Tensor<float> shiftRGB = new Tensor<float>(new TensorShape(1, 3, 1, 1), new[] { 0.485f, 0.456f, 0.406f });
25
 
26
  void Start()
27
  {
 
28
  //Parse neural net labels
29
  labels = labelsAsset.text.Split('\n');
30
 
31
+ //Load model from asset
 
32
  var model = ModelLoader.Load(modelAsset);
33
 
34
  //We modify the model to normalise the input RGB values and select the highest prediction
35
  //probability and item number
36
+ var graph = new FunctionalGraph();
37
+ var image = graph.AddInput(model, 0);
38
+ var normalizedInput = (image - Functional.Constant(shiftRGB)) * Functional.Constant(mulRGB);
39
+ var probability = Functional.Forward(model, normalizedInput)[0];
40
+ var value = Functional.ReduceMax(probability, 1);
41
+ var index = Functional.ArgMax(probability, 1);
42
+ graph.AddOutput(value, "value");
43
+ graph.AddOutput(index, "index");
44
+ var model2 = graph.Compile();
45
+
46
+ //Set up the worker to run the model
47
+ worker = new Worker(model2, backend);
48
 
49
  //Execute inference
50
  ExecuteML();
 
53
  public void ExecuteML()
54
  {
55
  //Preprocess image for input
56
+ TextureConverter.ToTensor(inputImage, input);
 
 
 
57
 
58
+ //Schedule neural net
59
+ worker.Schedule(input);
60
+
61
+ //Read output tensors
62
+ using var value = (worker.PeekOutput("value") as Tensor<float>).ReadbackAndClone();
63
+ using var index = (worker.PeekOutput("index") as Tensor<int>).ReadbackAndClone();
64
 
65
  //Select the best output class and print the results
66
+ var accuracy = value[0];
67
+ var ID = index[0];
68
 
69
  //The result is output to the console window
70
  int percent = Mathf.FloorToInt(accuracy * 100f + 0.5f);
 
74
  Resources.UnloadUnusedAssets();
75
  }
76
 
77
+ void OnDestroy()
 
 
 
 
 
 
78
  {
79
+ input?.Dispose();
80
  mulRGB?.Dispose();
81
  shiftRGB?.Dispose();
82
+ worker?.Dispose();
83
  }
84
  }
class_desc.txt β†’ data/class_desc.txt RENAMED
File without changes
{Images β†’ images}/Bee.jpg RENAMED
File without changes
{Images β†’ images}/Coffee mug.jpg RENAMED
File without changes
{Images β†’ images}/Radiator.jpg RENAMED
File without changes
{Images β†’ images}/Rottweiler.jpg RENAMED
File without changes
{Images β†’ images}/Tailed frog.jpg RENAMED
File without changes
{Images β†’ images}/decoded.png RENAMED
File without changes
info.json CHANGED
@@ -3,12 +3,12 @@
3
  "RunMobileNet.cs"
4
  ],
5
  "models": [
6
- "mobilenet_v2.sentis"
7
  ],
8
  "data":[
9
- "class_desc.txt"
10
  ],
11
- "version":[
12
- "1.4.0"
13
  ]
14
  }
 
3
  "RunMobileNet.cs"
4
  ],
5
  "models": [
6
+ "models/mobilenet_v2.onnx"
7
  ],
8
  "data":[
9
+ "data/class_desc.txt"
10
  ],
11
+ "version": [
12
+ "2.2.0"
13
  ]
14
  }
mobilenet_v2.sentis DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:907d42cf325f7d2457b8ccdd12fabb5bea882d2757c3bb5bc57042e5ec6533bc
3
- size 13989036
 
 
 
 
mobilenet_v2.onnx β†’ models/mobilenet_v2.onnx RENAMED
File without changes