-
-
Notifications
You must be signed in to change notification settings - Fork 483
/
Copy pathOfficialDemoGPU.cs
114 lines (89 loc) · 3.6 KB
/
OfficialDemoGPU.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
using Mediapipe;
using System;
using System.Linq;
using UnityEngine;
public class OfficialDemoGPU : DemoGraph {
private const string outputStream = "output_video";
private OutputStreamPoller<GpuBuffer> outputStreamPoller;
private GpuBufferPacket outputPacket;
private SidePacket sidePacket;
private string destinationBufferName;
public override void Initialize() {
if (config == null) {
throw new InvalidOperationException("config is missing");
}
var calculatorGraphConfig = CalculatorGraphConfig.Parser.ParseFromTextFormat(config.text);
#if UNITY_ANDROID
var sinkNode = calculatorGraphConfig.Node.Last();
destinationBufferName = Tool.GetUnusedSidePacketName(calculatorGraphConfig, "destination_buffer");
sinkNode.InputSidePacket.Add($"DESTINATION:{destinationBufferName}");
#endif
graph = new CalculatorGraph(calculatorGraphConfig);
}
public override Status StartRun() {
throw new NotSupportedException();
}
public override Status StartRun(Texture texture) {
Debug.Log("This graph is for testing official examples. You can customize the graph by editing `official_demo_gpu.txt` (default is `hand_tracking_mobile.pbtxt`)");
#if !UNITY_ANDROID
outputStreamPoller = graph.AddOutputStreamPoller<GpuBuffer>(outputStream).ConsumeValueOrDie();
outputPacket = new GpuBufferPacket();
#endif
sidePacket = new SidePacket();
sidePacket.Emplace("num_hands", new IntPacket(2));
#if UNITY_ANDROID
var glTextureName = texture.GetNativeTexturePtr();
var textureWidth = texture.width;
var textureHeight = texture.height;
GpuBuffer gpuBuffer = null;
gpuHelper.RunInGlContext(() => {
var glContext = GlContext.GetCurrent();
var glTextureBuffer = new GlTextureBuffer((UInt32)glTextureName, textureWidth, textureHeight,
GpuBufferFormat.kBGRA32, OnReleaseDestinationTexture, glContext);
gpuBuffer = new GpuBuffer(glTextureBuffer);
return Status.Ok();
}).AssertOk();
outputPacket = new GpuBufferPacket(gpuBuffer);
sidePacket.Emplace(destinationBufferName, outputPacket);
#endif
return graph.StartRun(sidePacket);
}
public override void RenderOutput(WebCamScreenController screenController, TextureFrame textureFrame) {
#if UNITY_ANDROID
// MediaPipe renders to the texture directly.
return;
#else
if (!outputStreamPoller.Next(outputPacket)) {
Debug.LogWarning("Failed to fetch an output packet, rendering the input image");
screenController.DrawScreen(textureFrame);
return;
}
using (var gpuBuffer = outputPacket.Get()) {
ImageFrame imageFrame = null;
gpuHelper.RunInGlContext(() => {
var gpuBufferFormat = gpuBuffer.Format();
var sourceTexture = gpuHelper.CreateSourceTexture(gpuBuffer);
imageFrame = new ImageFrame(
gpuBufferFormat.ImageFormatFor(), gpuBuffer.Width(), gpuBuffer.Height(), ImageFrame.kGlDefaultAlignmentBoundary);
gpuHelper.BindFramebuffer(sourceTexture);
var info = gpuBufferFormat.GlTextureInfoFor(0);
Gl.ReadPixels(0, 0, sourceTexture.width, sourceTexture.height, info.glFormat, info.glType, imageFrame.MutablePixelData());
Gl.Flush();
sourceTexture.Release();
return Status.Ok(false);
}).AssertOk();
if (imageFrame != null) { // always true
screenController.DrawScreen(imageFrame);
}
}
#endif
}
#if UNITY_ANDROID
static void OnReleaseDestinationTexture(UInt64 name, IntPtr tokenPtr) {
// TODO: release outputPacket
using (var token = new GlSyncPoint(tokenPtr)) {
token.Wait();
}
}
#endif
}