ReFlex Library: How add a new sensor
Table of Contents
- Table of Contents
- General approach
- The
IDepthCamera
Interface - Example Implementation:
AzureKinectCamera
- Registering the Sensor with
CameraManager
General approach
- Depth sensors must implement the interface
IDepthCamera
in the namespaceReFlex.Core.Tracking.Interfaces
- By convention Sensor Implementations should be placed in a separate project in
Library/src/Sensor
(.NET Solution Structure: ReFlex/Sensor/) - That project should contain the references to sensor-related libraries / packages
- To preserve modularity, these references should be only added to the camera project
-
for more details: cf. IDepthCamera interface
- The implementation of the
IDepthCamera
needs to be registered with theCameraManager
in theTrackingServer.Model
namespace (project:ReFlex.TrackingServer
) - camera reference needs to be placed inside the
!NO_EXTERNAL_SENSORS
preprocessor switch. this ensures that tests can be run when sensors and related drivers/references are not available (e.g. in CI Pipeline). - The same is valid for registering the sensor in the constructor of the
CameraManager
- sensors that do not require external dependencies (e.g. software-emulator, … ) can ignore this rule
- if a sensor should be constrained to be available on a specific platform only, the check
RuntimeInformation.IsOSPlatform()
can be used - for more information, see Registering the Sensor
The IDepthCamera
Interface
Method / Property | Description |
---|---|
Id | unique identifier, used to select the sensor. (string , readonly) |
CameraType | Enumeration of different Camera types, used to distinguish between physical camera and software-emulated |
ModelDescription | description of the sensor for server frontend (string , readonly) |
State | DepthCameraState for checking current state of the sensor (readonly, should be set by the sensor itself |
StreamParameter | description for currently active sensor mode (resolution, framerate, image format) |
StateChanged | event which should be triggered, when the camera state changes |
FrameReady | event which should be triggered, to broadcast an updated point cloud |
DepthImageReady | event which should be triggered, to broadcast the raw depth image as byte array |
Initialize() | Code for initial setup of the sensor (checking availability, etc.) |
GetPossibleConfigurations() | returns the list of available |
EnableStream(StreamParameter parameter) | returns list of available sensor modes |
StartStream() | starts tracking with the previously set configuration |
StopStream() | stops tracking |
Dispose() | cleanup code when the app is stopped (e.g. stop sensor and free resources) |
Example Implementation: AzureKinectCamera
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
[Plugin(PluginType = typeof(IDepthCamera))]
public class AzureKinectCamera : IDepthCamera, IDisposable
{
// private fields
private const int BytesPerChannel = sizeof(ushort);
private const int NumChannels = 3;
private readonly Device _device;
private DepthCameraState _state;
private bool _queryDepth;
private Transformation _transform;
private byte[] _transformedPixels;
private Point3[] _convertedVertices;
// Properties
public string Id => _device?.SerialNum;
public CameraType CameraType => CameraType.AzureKinect;
public string ModelDescription => "Microsoft\u00A9 Azure Kinect";
public DepthCameraState State
{
get => _state;
private set
{
if (_state == value) return;
_state = value;
OnStateChanged(this, _state);
}
}
public StreamParameter StreamParameter { get; private set; }
// events
public event EventHandler<DepthCameraState> StateChanged;
public event EventHandler<ImageByteArray> DepthImageReady;
public event EventHandler<DepthCameraFrame> FrameReady;
// Constructor
public AzureKinectCamera()
{
var numDevicesAvailable = Device.GetInstalledCount();
_device = Device.Open();
State = numDevicesAvailable > 0
? DepthCameraState.Connected
: DepthCameraState.Disconnected;
}
// Methods
public void Initialize()
{
var numDevicesAvailable = Device.GetInstalledCount();
if (numDevicesAvailable <= 0) {
State = DepthCameraState.Error;
}
}
public void EnableStream(StreamParameter parameter)
{
StreamParameter = parameter;
}
public IList<StreamParameter> GetPossibleConfigurations()
{
return AzureKinectStreamParameterConverter.GetSupportedConfigurations();
}
public void StartStream()
{
var deviceConfiguration = new DeviceConfiguration
{
CameraFPS = AzureKinectStreamParameterConverter.GetFps(StreamParameter),
ColorResolution = ColorResolution.Off,
DepthMode = AzureKinectStreamParameterConverter.GetDepthMode(StreamParameter)
};
_device.StartCameras(deviceConfiguration);
_transform = _device.GetCalibration().CreateTransformation();
ArrayUtils.InitializeArray(out _transformedPixels, StreamParameter.Width * StreamParameter.Height * NumChannels * 2);
ArrayUtils.InitializeArray(out _convertedVertices, StreamParameter.Width * StreamParameter.Height);
State = DepthCameraState.Streaming;
_queryDepth = true;
Task.Run(QueryDepthStream);
}
public void StopStream()
{
_queryDepth = false;
if (_device == null)
return;
_device.StopCameras();
State = DepthCameraState.Connected;
}
public void Dispose()
{
_transform?.Dispose();
_device?.Dispose();
}
// ... implementation details omitted
}
Registering the Sensor with CameraManager
To make camera available in the server, the module must be registered in the CameraManager
of ReFlex.TrackingServer
.
Example Code:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
using NLog;
using ReFlex.Core.Tracking.Interfaces;
using ReFlex.Sensor.EmulatorModule;
#if !NO_EXTERNAL_SENSORS
using System.Runtime.InteropServices;
using ReFlex.Sensor.AzureKinectModule;
using ReFlex.Sensor.Kinect2Module;
using ReFlex.Sensor.RealSenseD435Module;
using ReFlex.Sensor.RealSenseL515Module;
using ReFlex.Sensor.RealSenseR2Module;
// place reference to new camera module here
#endif
namespace TrackingServer.Model
{
public class CameraManager
// ...
public CameraManager()
{
_depthCameras = new List<IDepthCamera>();
#if !NO_EXTERNAL_SENSORS
try
{
// register your new camera:
var myCustomCam = new CustomDepthSensor();
_depthCameras.Add(myCustomCam);
Logger.Info($"Successfully loaded {myCustomCam.ModelDescription} camera.");
}
catch (Exception exception)
{
Logger.Error(exception);
}
// ...
#endif
}
}