SteamVR_TrackedCamera.cs
8.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
//======= Copyright (c) Valve Corporation, All rights reserved. ===============
//
// Purpose: Provides access to video feed and poses of tracked cameras.
//
// Usage:
// var source = SteamVR_TrackedCamera.Distorted();
// var source = SteamVR_TrackedCamera.Undistorted();
// or
// var undistorted = true; // or false
// var source = SteamVR_TrackedCamera.Source(undistorted);
//
// - Distorted feeds are the decoded images from the camera.
// - Undistorted feeds correct for the camera lens distortion (a.k.a. fisheye)
// to make straight lines straight.
//
// VideoStreamTexture objects must be symmetrically Acquired and Released to
// ensure the video stream is activated, and shutdown properly once there are
// no more consumers. You only need to Acquire once when starting to use a
// stream, and Release when you are done using it (as opposed to every frame).
//
//=============================================================================
using UnityEngine;
using Valve.VR;
namespace Valve.VR
{
public class SteamVR_TrackedCamera
{
public class VideoStreamTexture
{
public VideoStreamTexture(uint deviceIndex, bool undistorted)
{
this.undistorted = undistorted;
videostream = Stream(deviceIndex);
}
public bool undistorted { get; private set; }
public uint deviceIndex { get { return videostream.deviceIndex; } }
public bool hasCamera { get { return videostream.hasCamera; } }
public bool hasTracking { get { Update(); return header.trackedDevicePose.bPoseIsValid; } }
public uint frameId { get { Update(); return header.nFrameSequence; } }
public VRTextureBounds_t frameBounds { get; private set; }
public EVRTrackedCameraFrameType frameType { get { return undistorted ? EVRTrackedCameraFrameType.Undistorted : EVRTrackedCameraFrameType.Distorted; } }
Texture2D _texture;
public Texture2D texture { get { Update(); return _texture; } }
public SteamVR_Utils.RigidTransform transform { get { Update(); return new SteamVR_Utils.RigidTransform(header.trackedDevicePose.mDeviceToAbsoluteTracking); } }
public Vector3 velocity { get { Update(); var pose = header.trackedDevicePose; return new Vector3(pose.vVelocity.v0, pose.vVelocity.v1, -pose.vVelocity.v2); } }
public Vector3 angularVelocity { get { Update(); var pose = header.trackedDevicePose; return new Vector3(-pose.vAngularVelocity.v0, -pose.vAngularVelocity.v1, pose.vAngularVelocity.v2); } }
public TrackedDevicePose_t GetPose() { Update(); return header.trackedDevicePose; }
public ulong Acquire()
{
return videostream.Acquire();
}
public ulong Release()
{
var result = videostream.Release();
if (videostream.handle == 0)
{
Object.Destroy(_texture);
_texture = null;
}
return result;
}
int prevFrameCount = -1;
void Update()
{
if (Time.frameCount == prevFrameCount)
return;
prevFrameCount = Time.frameCount;
if (videostream.handle == 0)
return;
var vr = SteamVR.instance;
if (vr == null)
return;
var trackedCamera = OpenVR.TrackedCamera;
if (trackedCamera == null)
return;
var nativeTex = System.IntPtr.Zero;
var deviceTexture = (_texture != null) ? _texture : new Texture2D(2, 2);
var headerSize = (uint)System.Runtime.InteropServices.Marshal.SizeOf(header.GetType());
if (vr.textureType == ETextureType.OpenGL)
{
if (glTextureId != 0)
trackedCamera.ReleaseVideoStreamTextureGL(videostream.handle, glTextureId);
if (trackedCamera.GetVideoStreamTextureGL(videostream.handle, frameType, ref glTextureId, ref header, headerSize) != EVRTrackedCameraError.None)
return;
nativeTex = (System.IntPtr)glTextureId;
}
else if (vr.textureType == ETextureType.DirectX)
{
if (trackedCamera.GetVideoStreamTextureD3D11(videostream.handle, frameType, deviceTexture.GetNativeTexturePtr(), ref nativeTex, ref header, headerSize) != EVRTrackedCameraError.None)
return;
}
if (_texture == null)
{
_texture = Texture2D.CreateExternalTexture((int)header.nWidth, (int)header.nHeight, TextureFormat.RGBA32, false, false, nativeTex);
uint width = 0, height = 0;
var frameBounds = new VRTextureBounds_t();
if (trackedCamera.GetVideoStreamTextureSize(deviceIndex, frameType, ref frameBounds, ref width, ref height) == EVRTrackedCameraError.None)
{
// Account for textures being upside-down in Unity.
frameBounds.vMin = 1.0f - frameBounds.vMin;
frameBounds.vMax = 1.0f - frameBounds.vMax;
this.frameBounds = frameBounds;
}
}
else
{
_texture.UpdateExternalTexture(nativeTex);
}
}
uint glTextureId;
VideoStream videostream;
CameraVideoStreamFrameHeader_t header;
}
#region Top level accessors.
public static VideoStreamTexture Distorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
{
if (distorted == null)
distorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
if (distorted[deviceIndex] == null)
distorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, false);
return distorted[deviceIndex];
}
public static VideoStreamTexture Undistorted(int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
{
if (undistorted == null)
undistorted = new VideoStreamTexture[OpenVR.k_unMaxTrackedDeviceCount];
if (undistorted[deviceIndex] == null)
undistorted[deviceIndex] = new VideoStreamTexture((uint)deviceIndex, true);
return undistorted[deviceIndex];
}
public static VideoStreamTexture Source(bool undistorted, int deviceIndex = (int)OpenVR.k_unTrackedDeviceIndex_Hmd)
{
return undistorted ? Undistorted(deviceIndex) : Distorted(deviceIndex);
}
private static VideoStreamTexture[] distorted, undistorted;
#endregion
#region Internal class to manage lifetime of video streams (per device).
class VideoStream
{
public VideoStream(uint deviceIndex)
{
this.deviceIndex = deviceIndex;
var trackedCamera = OpenVR.TrackedCamera;
if (trackedCamera != null)
trackedCamera.HasCamera(deviceIndex, ref _hasCamera);
}
public uint deviceIndex { get; private set; }
ulong _handle;
public ulong handle { get { return _handle; } }
bool _hasCamera;
public bool hasCamera { get { return _hasCamera; } }
ulong refCount;
public ulong Acquire()
{
if (_handle == 0 && hasCamera)
{
var trackedCamera = OpenVR.TrackedCamera;
if (trackedCamera != null)
trackedCamera.AcquireVideoStreamingService(deviceIndex, ref _handle);
}
return ++refCount;
}
public ulong Release()
{
if (refCount > 0 && --refCount == 0 && _handle != 0)
{
var trackedCamera = OpenVR.TrackedCamera;
if (trackedCamera != null)
trackedCamera.ReleaseVideoStreamingService(_handle);
_handle = 0;
}
return refCount;
}
}
static VideoStream Stream(uint deviceIndex)
{
if (videostreams == null)
videostreams = new VideoStream[OpenVR.k_unMaxTrackedDeviceCount];
if (videostreams[deviceIndex] == null)
videostreams[deviceIndex] = new VideoStream(deviceIndex);
return videostreams[deviceIndex];
}
static VideoStream[] videostreams;
#endregion
}
}