-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathNiTE.h
executable file
·1761 lines (1440 loc) · 64 KB
/
NiTE.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/*******************************************************************************
* *
* PrimeSense NiTE 2.0 *
* Copyright (C) 2012 PrimeSense Ltd. *
* *
*******************************************************************************/
#ifndef _NITE_H_
#define _NITE_H_
#include "NiteCAPI.h"
#include "OpenNI.h"
// Summary of use cases, modules, facades
namespace nite {
#include "NiteEnums.h"
// General
_NITE_DECLARE_VERSION(Version);
/**
Encapsulates a single point in 3D space, storing the x/y/z coordinates as floating point numbers.
Can also be used to represent a three dimensional vector.
*/
class Point3f : public NitePoint3f
{
public:
/**
Default Constructor, creates a point with all three coordinate values set to 0.0
*/
Point3f()
{
x = y = z = 0.0f;
}
/**
Constructor. Creates a point from three given coordinates.
@param [in] x X coordinate of point to be created
@param [in] y Y coordinate of point to be cretaed
@param [in] z Z coordinate of point to be created
*/
Point3f(float x, float y, float z)
{
this->set(x, y, z);
}
/**
Copy Constructor. Creates a new point that has the same coordinates as an existing point.
@param [in] other An existing point to be copied.
*/
Point3f(const Point3f& other)
{
*this = other;
}
/**
Setter function for all coordinates of the point.
@param [in] x Desired new X coordinate of the point.
@param [in] y Desired new Y coordinate of the point.
@param [in] z Desired new Z coordinate of the point.
*/
void set(float x, float y, float z)
{
this->x = x;
this->y = y;
this->z = z;
}
/**
Assignment operator. Sets all coordinates of the point on the left to be equal to the
coordinate values of the point on the left.
@param [in] other Point to copy coordinates from
@returns Reference to a point containing a copy of the input points coordinates.
*/
Point3f& operator=(const Point3f& other)
{
set(other.x, other.y, other.z);
return *this;
}
/**
Equality operator. Tests two points to determine whether they have matching coordinate values.
@param [in] other One of the points to be tested against
@returns True if all three coordinate values of left point match all three coordinate values of right point.
*/
bool operator==(const Point3f& other) const
{
return x == other.x && y == other.y && z == other.z;
}
/**
Inequality operator. Tests two points to see if they are different.
@param [in] other One of two points to test
@returns True if either x, y, or z coordinate of first point differs from the matching coordinate of the second point.
*/
bool operator!=(const Point3f& other) const
{
return !operator==(other);
}
};
/**
Enapsulates a geometrical plane. The plane is defined by a single point on that plane, and a normal vector to the plane.
The normal vector is represented as a Point3f.
*/
class Plane : public NitePlane
{
public:
/**
Default constructor. Creates a plane that passes through the origin, and has a normal vector set to (0,0,0). Note
that the default normal vector will make the plane generated by this function invalid for many mathematical operations,
as it is a degenerate vector and has no direction.
*/
Plane()
{
this->point = Point3f();
this->normal = Point3f();
}
/**
Constructor. Creates a new point, given a single point on that plane and a normal vector.
@param [in] point Any point on the plane
@param [in] normal The normal vector of the plane, represented as a Point3f
*/
Plane(const Point3f& point, const Point3f& normal)
{
this->point = point;
this->normal = normal;
}
};
/**
Represents a Quaternion. The Quaternion is stored as four floating point numbers. (The quaternions
are a number system that extends the complex number system from two dimensions to four.)
*/
class Quaternion : public NiteQuaternion
{
public:
/**
Default Constructor. Creates a new Quaternion with the value of all four components
set to zero.
*/
Quaternion()
{
x = y = z = w = 0;
}
/**
Constructor. Creates a new Quaternion with each component specified.
@param [in] w Desired first quaternion component value.
@param [in] x Desired second quaternion component value.
@param [in] y Desired third quaternion component value.
@param [in] z Desired fourth quaternion component value.
*/
Quaternion(float w, float x, float y, float z)
{
this->x = x;
this->y = y;
this->z = z;
this->w = w;
}
};
/**
Represents a box in three dimensional space. The box is represented as two points,
containing respectively its minimum and maximum x, y and z coordinate values.
*/
class BoundingBox : public NiteBoundingBox
{
public:
/**
Default constructor. Does nothing.
*/
BoundingBox()
{}
/**
Constructor. Creates a bounding box from two points in space, which represent that
minimum and maximum value of all of its coordinates.
@param [in] min A point containing the minimum x, y and z values of the bounding box.
@param [in] max A point containing the maximum x, y and z values of the bounding box.
*/
BoundingBox(const Point3f& min, const Point3f& max)
{
this->min = min;
this->max = max;
}
};
/**
Provides a simple array class used throughout the API. Wraps a primitive array
of objects, holding the elements and their count.
*/
template <class T>
class Array
{
public:
/**
Default constructor. Creates an empty Array and sets the element count to zero.
*/
Array() : m_size(0), m_data(NULL) {}
/**
Setter function for data. Causes this Array to wrap an existing primitive array
of specified type.
@param [in] T Type of objects the Array will contain
@param [in] data Pointer to first object in list
@param [in] size Number of objects in list
*/
void setData(int size, T* data) {m_data = data; m_size = size;}
/**
Implements the array indexing operator for the Array class
*/
const T& operator[](int index) const {return m_data[index];}
/**
Getter function for the Array size.
@returns Current number of elements in the Array.
*/
int getSize() const {return m_size;}
/**
Check if there are any elements in the Array
@returns true if there are elements in the Array, false otherwise.
*/
bool isEmpty() const {return m_size == 0;}
private:
Array(const Array&);
Array& operator=(const Array&);
int m_size;
T* m_data;
};
// UserTracker
/**
UserId is a persistent ID for a specific user detected by the UserTracker algorithm.
While the user is known, it will have the same ID.
*/
typedef short int UserId;
/**
This class wraps the output from the NiTE Pose Detection algorithms.
NiTE provides the ability for applications to detect whether a user is in a specific pose.
This is most commonly used for skeleton calibration. The algorithm passes through a number
of different states during the process of Pose Detection.
For each user being analyzed, this class stores the pose being detected and the state of
that detection.
*/
class PoseData : protected NitePoseData
{
public:
/**
Get the type of this pose.
@returns The type of pose being detected, formatted as a @ref PoseType.
*/
PoseType getType() const {return (PoseType)type;}
/**
Checks if the user is currently in the pose indicated by the @ref PoseData::getType() function.
@returns True if the user is in the pose indicated, False otherwise.
*/
bool isHeld() const {return (state & NITE_POSE_STATE_IN_POSE) != 0;}
/**
Checks whether this is the first frame in which the user has assumed the pose indicated by @ref PoseData::getType()
@returns True if the user is in the pose indicated AND the user was not in that pose on the previous frame. Returns false otherwise.
*/
bool isEntered() const {return (state & NITE_POSE_STATE_ENTER) != 0;}
/**
Check if the user has exited the pose in this frame.
@returns True if the user is NOT in the pose indicated, and was in the previous frame. Returns false otherwise.
*/
bool isExited() const {return (state & NITE_POSE_STATE_EXIT) != 0;}
};
/**
This class stores the output from the @ref nite::UserTracker algorithm.
The basic purpose of the User Tracker algorithm is to analyze a depth frame and to locate all users in
the scene, to indicate which pixels belong to which user, and which pixels belong to the background.
The output is in the form of a two dimensional array with the same dimensions
as the depth frame that was used as the input to UserTracker. Each element of the
array corresponds to a single pixel from the depth map, and indicates the UserID
of the user which occupies that pixel.
The UserId value 0 is used to indicate pixels that contain no users (ie background).
@see UserTracker for more information about this algorithm.
*/
class UserMap : private NiteUserMap
{
public:
/**
Returns a direct pointer to the array containing the UserMap
data. This array has type @ref UserID, and a size equal to y*stride. If
your application needs to calculate the size value directly, then see
@ref UserMap::getHeight() and @ref UserMap::getStride().
@returns Pointer directly to the array data
*/
const UserId* getPixels() const {return pixels;}
/**
Gets the width of the UserMap array. This value will match the X resolution of the
depth map that was used to create the UserMap.
@returns Width of the UserMap, measured in pixels.
*/
int getWidth() const {return width;}
/**
Gets the height of the UserMap array. This value will match the Y resolution of the
depth map that was used to create the UserMap. This value can be used, along with the
value returned by @ref getStride() to determine the size of the array in bytes.
@returns Height of the UserMap, measured in pixels.
*/
int getHeight() const {return height;}
/**
This function returns the stride of the array returned by getPixels. The stride of the
array is defined as the width of a single row of that array, measured in bytes. This value
can be multiplied by the value returned by @ref getHeight() to calculate the size of the array.
@returns Stride of the array, measured in bytes.
*/
int getStride() const {return stride;}
friend class UserTrackerFrameRef;
};
/**
This class provides a data structure to store the output of the Skeleton algorithm.
The basic purpose of the skeleton algorithm is to locate each of a user's joints, and
to estimate the position and orientation of each joint in space. The output from the Skeleton algorithm is
stored as a series of SkeletonJoint objects.
@see nite::Skeleton for more information about the Skeleton algorithm.
*/
class SkeletonJoint : private NiteSkeletonJoint
{
public:
/**
Gets the type of the joint represented by this particular object.
@returns Type of the joint.
@see JointType enumeration for a list of all possible joint types.
*/
JointType getType() const {return (JointType)jointType;}
/**
Get the current position of the joint. This will be represented as a point in space.
The coordinates used are "real world" coordinates. See the OpenNI2 documentation for
more information on coordinate systems.
A confidence value is also available via the getPositionConfidence() function to
indicate how sure the algorithm is of this value.
@returns Position in space of the joint.
*/
const Point3f& getPosition() const {return (Point3f&)position;}
/**
Indicates how sure the NiTE skeleton algorithm is about the position data stored about
this joint. The value is between 0 and 1, with increasing value indicating
increasing confidence.
@returns Confidence value between 0 and 1
*/
float getPositionConfidence() const {return positionConfidence;}
/**
Get the current orientation of the joint represented by this object. This orientation
will be represented by a Quaternion.
A confidence value is also available via the getOrientationConfidence() function to
indicate how sure the algorithm is of this value.
@returns Joint orientation
*/
const Quaternion& getOrientation() const {return (Quaternion&)orientation;}
/**
Indicates how sure the NiTE skeleton algorithm is about the orientation data stored about
this joint. The value is between 0 and 1, with increasing value indicating
increasing confidence.
@returns Confidence value between 0 and 1
*/
float getOrientationConfidence() const {return orientationConfidence;}
};
/**
This is the main class for the skeleton algorithm.
The purpose of the skeleton algorithm is to analyze a user outline supplied by the User Tracker
algorithm, and to locate the position of that user's joints in space (eg knees, elbows, head, etc).
The orientation of the user's joints is also calculated. Where joints are not visible, the algorithm
will make a best guess about the joint. For all data calculated, confidence values are also
created to help an application understand if the algorithm is sure about the data, or if it is "guessing".
Nite offers two types of skeletons algorithms. One requires the user to assume a specific "calibration pose"
before calibration can be attempted. The other does not require a specific pose from the user, but will require
a few seconds to settle down as it learns a user's body shape. The algorithm used sometimes depends on
the platform being used -- for example, the ARM version of Nite currently only offers the version that
requires a calibration pose.
All skeleton joint positions are given in "real world" data coordinates, using the same conventions as
OpenNI 2.0. See the online documentation for OpenNI 2.0 for more information on the coordinate systems
used with the skeleton.
@see SkeletonJoint for the output data format of the skeleton.
@see JointType enumeration for a list of all joints detected by the skeleton.
@see UserTracker to understand where the input data to the skeleton comes from.
@see SkeletonState enumeration for a list of possible skeleton states.
*/
class Skeleton : private NiteSkeleton
{
public:
/**
Get a specific joint of the skeleton. The object returned by this function can be
queried to determine position, orientation, and confidence values for the joint.
@param [in] type The type of the joint to retrieve from the skeleton.
@returns Pointer to a SkeletonJoint containing the requested data.
@see JointType for a list of possible joints to request from the skeleton.
*/
const SkeletonJoint& getJoint(JointType type) const {return (SkeletonJoint&)joints[type];}
/**
Get the state of the skeleton. This is useful for determining whether an active skeleton
exists, and whether it is calibrating. Some of the states are also error conditions that attempt
to provide a hint about why the skeleton is not calibrating. This can be useful for providing feedback
to a user about where they need to stand, assuming a calibration pose, etc.
@return Current state of the skeleton.
@see @ref SkeletonState enumeration for a list of possible states
*/
SkeletonState getState() const {return (SkeletonState)state;}
};
/**
Provides the current information available about a specific user detected by the UserTracker.
Note that some data is provided in depth ("projective") coordinates, and other data is provided
in world ("real world") coordinates. See the OpenNI 2.0 documentation for information on the two
coordinate systems used and how to convert between them.
@see UserTracker for more information on how this data is generated.
@see Skeleton for more information on a user's skeleton
@see UserMap for information on locating a specific user in the scene.
*/
class UserData : private NiteUserData
{
public:
/**
Get the ID of the user. This ID is persistent -- it will not change as long as the user
is being continuously tracked.
@returns @ref UserId of the user being tracked.
*/
UserId getId() const {return id;}
/**
Get a bounding box around the user. This provides a minimum volume cube that completely
contains the user. Coordinates are "projective", so they are suitable for direct use with
a depthmap.
@returns @ref BoundingBox that surrounds the user in the @ref UserMap.
*/
const BoundingBox& getBoundingBox() const {return (const BoundingBox&)boundingBox;}
/**
Find the geometric center of mass of the user. This value is given in "real world" coordinates,
so it must be converted before being superimposed over a raw depthmap. It is suitable for direct
use with skeleton and hand point output.
@returns @ref Point3f indicating position in space of the user.
*/
const Point3f& getCenterOfMass() const {return (const Point3f&)centerOfMass;}
/**
Checks whether this user has been newly detected.
@returns True if this is the first frame this user appears in, False otherwise.
*/
bool isNew() const {return (state & NITE_USER_STATE_NEW) != 0;}
/**
Check if the user is currently visible in the field of view.
@returns True if the user is visible and detected, false otherwise.
*/
bool isVisible() const {return (state & NITE_USER_STATE_VISIBLE) != 0;}
/**
Check if the user is lost. This will happen once, in the first frame in which the user was declared lost.
This user will not be provided in future frames, and his UserId may be assigned to a new user that appears
in view.
@returns True if the user is no longer being tracked, False otherwise.
*/
bool isLost() const {return (state & NITE_USER_STATE_LOST) != 0;}
/**
Get the full skeleton of this user. This will only be available if Skeleton tracking has been
enabled for this user, and the skeleton is calibrated and being actively tracked.
@returns @ref Skeleton of the user.
@see UserTracker for information on enabling skeleton tracking.
*/
const Skeleton& getSkeleton() const {return (const Skeleton&)skeleton;}
/**
Get all information about a specific pose for this user. This will only be available if pose detection is
enabled on this user for the @ref PoseType specified.
@param [in] @ref PoseType to retrieve data for.
@returns @ref PoseData for this pose type and this user.
@see UserTracker for moreinformation on enabling Pose detected.
*/
const PoseData& getPose(PoseType type) const {return (const PoseData&)poses[type];}
};
/**
This class stores a snapshot of a single frame of output from the UserTracker algorithm. This frame will
correspond to a single input depth frame. It holds all info on users, as well as information on the floor plane.
@see @ref UserData For detailed info on specific users
@see @ref Skeleton For user skeleton data
@see @ref UserMap For user positions in the frame
@see @ref UserTracker For information on starting the user tracker algorithms to detect all of this data.
@see @ref HandTrackerFrameRef for the equivalent information when hand tracking.
*/
class UserTrackerFrameRef
{
public:
/**
Default Constructor. Creates an empty @ref UserTrackerFrameRef.
*/
UserTrackerFrameRef() : m_pFrame(NULL), m_userTrackerHandle(NULL)
{}
/**
Destructor. Frees the resources used by a @ref UserTrackerFrameRef.
*/
~UserTrackerFrameRef()
{
release();
}
/**
Copy Constructor. Creates a new @ref UserTrackerFrameRef identical to an
existing one.
*/
UserTrackerFrameRef(const UserTrackerFrameRef& other) : m_pFrame(NULL)
{
*this = other;
}
/**
Impliments the assignment operator for the @ref UserTrackerFrameRef.
@param[in] other A UserTrackerFrameRef to be assigned to this one.
*/
UserTrackerFrameRef& operator=(const UserTrackerFrameRef& other)
{
setReference(other.m_userTrackerHandle, other.m_pFrame);
niteUserTrackerFrameAddRef(m_userTrackerHandle, m_pFrame);
return *this;
}
/**
Indicates whether this @ref UserTrackerFrameRef points to valid frame data.
When first constructed, the UserTrackerFrameRef will be invalid until assigned
a frame handle.
@returns True if this object contains valid data, False otherwise.
*/
bool isValid() const
{
return m_pFrame != NULL;
}
/**
Properly releases all resources used by this object. This will be automatically
called by the destructor, but it is good practice to call this as soon as the object
is no longer required. Calling this function multiple times on the same object is
safe.
*/
void release()
{
if (m_pFrame != NULL)
{
niteUserTrackerFrameRelease(m_userTrackerHandle, m_pFrame);
}
m_pFrame = NULL;
m_userTrackerHandle = NULL;
}
/**
Provides access to the @ref UserData object for a specific user, indexed by @ref UserId.
@param [in] @ref UserId The ID of the user you would like data from.
@returns Pointer to the corresponding @ref UserData object.
@see UserData for the information contained in this object.
@see UserMap if what you really want is to know where in the image the user is.
*/
const UserData* getUserById(UserId id) const
{
for (int i = 0; i < m_users.getSize(); ++i)
{
if (m_users[i].getId() == id)
{
return &m_users[i];
}
}
return NULL;
}
/**
Get an Array of all the users available in this frame.
@returns Array of @ref UserData with one entry for each user in this frame.
*/
const Array<UserData>& getUsers() const {return m_users;}
/**
NiTE generates a confidence value between 0 and 1 for the floor plane calculation.
Lower values indicate less confidence.
@returns Value between 0 and 1 indicating confidence in the accuracy of the floor plane
@see getFloor() to obtain the actual plane.
*/
float getFloorConfidence() const {return m_pFrame->floorConfidence;}
/**
Obtains an estimate of the floor plane of the scene currently in view.
@returns @ref Plane that indicates a best guess of where the floor of the scene is.
@see getFloorConfidence() for a confidence value in this calculation.
*/
const Plane& getFloor() const {return (const Plane&)m_pFrame->floor;}
/**
Get the raw depth frame that originated this output. Each UserTracker frame is generated
to correspond with a single depth frame. This function gives you access to the raw frame data.
See the OpenNI 2.0 documentation for information on manipulating VideoFrameRef data.
@returns VideoFrameRef with the raw depth that generated this frame.
*/
openni::VideoFrameRef getDepthFrame() {return m_depthFrame;}
/**
Get the segmentation of the scene. The UserTracker algorithm locates each user in the field of view,
and figures out which pixels correrspond to which user, as well as which pixels correspond to the
background (ie, not to any user).
@returns @ref UserMap showing which pixels are occupied by what users.
*/
const UserMap& getUserMap() const {return static_cast<const UserMap&>(m_pFrame->userMap);}
/**
Get the timestamp in which this frame was processed. See the OpenNI 2.0 documentation for more
information on how OpenNI assigns time stamps to depth frames.
@returns Timestamp in microseconds
*/
uint64_t getTimestamp() const {return m_pFrame->timestamp;}
/**
Gets the frame index of the raw depth frame that generated this UserTracker frame. Frame
indexes are consequitive integer numbers assigned to frames by OpenNI. See the OpenNI
documentation for more information.
@returns FrameIndex of the depth frame used to generate this User Tracker frame.
*/
int getFrameIndex() const {return m_pFrame->frameIndex;}
private:
friend class User;
friend class UserTracker;
Array<UserData> m_users;
void setReference(NiteUserTrackerHandle userTrackerHandle, NiteUserTrackerFrame* pFrame)
{
release();
m_userTrackerHandle = userTrackerHandle;
m_pFrame = pFrame;
m_depthFrame._setFrame(pFrame->pDepthFrame);
m_users.setData(m_pFrame->userCount, (UserData*)m_pFrame->pUser);
}
NiteUserTrackerFrame* m_pFrame;
NiteUserTrackerHandle m_userTrackerHandle;
openni::VideoFrameRef m_depthFrame;
};
/**
This is the main object of the User Tracker algorithm. It provides access to one half of the
algorithms provided by NiTE. Scene segmentation, skeleton, floor plane detection, and pose
detection are all provided by this class.
The first purpose of the User Tracker algorithm is to find all of the active users in a specific scene.
It individually tracks each human it finds, and provides the means to seperate their outline from
each other and from the background. Once the scene has been segmented, the User Tracker is also used to initiate Skeleton
Tracking and Pose Detection algorithms.
Each user is provided an ID as they are detected. The user ID remains constant as long as the
user remains in the frame. If a user leaves the field of view of the camera, or tracking of that
user is otherwise lost, the user may have a different ID when he is detected again. There is
currently no mechanism that provides persistant recognition of individuals when they are not being
actively tracking. If this functionality is desired, it will need to be implimented at the
application level.
A listener class is provided to allow event based interaction with this algorithm.
@see UserMap for the output format of the User Tracker algorithm
@see UserData for additional data output by this format
@see Skeleton if you are also interested in tracking a user's skeleton
@see NiTE for a couple of static functions that must be run before User Tracker can be used
@see @ref HandTracker for Gesture and Hand tracking algorithms.
*/
class UserTracker
{
public:
/**
This is a listener class that is used to react to events generated by the @ref UserTracker class.
To use this class, you must derive a class from it that implements the @ref onNewFrame() function. This
is the function that will be called when an event is generated.
Create a new instance of your derived class. Then, use the @ref UserTracker::addNewFrameListener()
function to add the listener to the @ref UserTracker. When that @ref UserTracker generates an onNewFrame event, \
the specified callback function will be called.
The onNewFrame event is currently the only event type that this listener is designed to work with.
@see @ref UserTracker for the source of this listener's events.
*/
class NewFrameListener
{
public:
/**
Default Constructor. Creates a new object of this type and configures it to correctly receive
events.
*/
NewFrameListener() : m_pUserTracker(NULL)
{
m_userTrackerCallbacks.readyForNextFrame = newFrameCallback;
}
/**
This is the callback function for the event. It should be implemented in a class derived from NewFrameListener.
This function will automatically be called when the OnNewFrame event is triggered.
@param [in] A reference to the UserTracker that triggered the event is provided.
*/
virtual void onNewFrame(UserTracker&) = 0;
private:
NiteUserTrackerCallbacks m_userTrackerCallbacks;
NiteUserTrackerCallbacks& getCallbacks() {return m_userTrackerCallbacks;}
static void ONI_CALLBACK_TYPE newFrameCallback(void* pCookie)
{
NewFrameListener* pListener = (NewFrameListener*)pCookie;
pListener->onNewFrame(*pListener->m_pUserTracker);
}
friend class UserTracker;
void setUserTracker(UserTracker* pUserTracker)
{
m_pUserTracker = pUserTracker;
}
UserTracker* m_pUserTracker;
};
/**
Default constructor. Creates an empty @ref UserTracker with a NULL handle. This object will not be useful
until the @ref create() function is called.
@see @ref UserTracker::create() for a function to create and activate the algorithm.
@see @ref UserTracker::isValid() to determine whether @ref create() has already been called.
*/
UserTracker() : m_userTrackerHandle(NULL)
{}
/**
Destructor. Automatically calls the provided @ref destroy() function.
*/
~UserTracker()
{
destroy();
}
/**
Creates and initializes an empty User Tracker. This function should be the first one called when
a new UserTracker object is constructed.
An OpenNI device with depth capabilities is required for this algorithm to work. See the OpenNI 2.0
documentation for more information about using an OpenNI 2.0 compliant hardware device and creating
a Device object.
@param [in] pDevice A pointer to an initalized OpenNI 2.0 Device object that provides depth streams.
@returns A status code to indicate success/failure. Since this relies on an external hardware
device, it is important for applications to check this value.
@see Status enumeration for a list of all possible status values generated by this call.
*/
Status create(openni::Device* pDevice = NULL)
{
if (isValid())
{
// tracker already active
return STATUS_OUT_OF_FLOW;
}
if (pDevice == NULL)
{
return (Status)niteInitializeUserTracker(&m_userTrackerHandle);
}
return (Status)niteInitializeUserTrackerByDevice(pDevice, &m_userTrackerHandle);
}
/**
Shuts down the user tracker and releases all resources used by it.
This is the opposite of create(). This function is called automatically
by the destructor in the current implimentation, but it is good practice to run it manually when the algorithm
is no longer required. Running this function more than once is safe -- it simply exits if called on a
non-valid UserTracker.
*/
void destroy()
{
if (isValid())
{
niteShutdownUserTracker(m_userTrackerHandle);
m_userTrackerHandle = NULL;
}
}
/**
Gets the next snapshot of the algorithm. This causes all data to be generated for the next frame of the
algorithm -- algorithm frames correspond to the input depth frames used to generate them.
@param pFrame [out] A pointer that will be set to point to the next frame of data.
@returns Status code indicating whether this operation was successful.
*/
Status readFrame(UserTrackerFrameRef* pFrame)
{
NiteUserTrackerFrame *pNiteFrame = NULL;
Status rc = (Status)niteReadUserTrackerFrame(m_userTrackerHandle, &pNiteFrame);
pFrame->setReference(m_userTrackerHandle, pNiteFrame);
return rc;
}
/**
Indicates whether the UserTracker is valid.
When a new UserTracker is first constructed, this function will indicate that it is invalid (ie return False). Once
the create() function has been successfully called, then this function will return True. If the destroy() function
is called, this function will again indicate invalid.
It is safe to run create() and destroy() without calling this function -- both of those functions already check this
value and return without doing anything if no action is required.
@returns True if the UserTracker object is correctly initialized, False otherwise.
@see create() function -- causes the UserTracker to become initialized.
@see destroy() function -- causes the UserTracker to become uninitialized.
*/
bool isValid() const
{
return m_userTrackerHandle != NULL;
}
/**
Control the smoothing factor of the skeleton joints. Factor should be between 0 (no smoothing at all) and 1 (no movement at all).
Experimenting with this factor should allow you to fine tune the skeleton performance. Higher values will produce smoother operation
of the skeleton, but may make the skeleton feel less responsive to the user.
@param [in] factor The smoothing factor.
@returns Status code indicating success or failure of this operation.
*/
Status setSkeletonSmoothingFactor(float factor)
{
return (Status)niteSetSkeletonSmoothing(m_userTrackerHandle, factor);
}
/**
Queries the current skeleton smoothing factor.
@returns Current skeleton smoothing factor.
@see setSkeletonSmoothingFactor for more information on the smoothing factor, and the means to change it.
*/
float getSkeletonSmoothingFactor() const
{
float factor;
Status rc = (Status)niteGetSkeletonSmoothing(m_userTrackerHandle, &factor);
if (rc != STATUS_OK)
{
factor = 0;
}
return factor;
}
/**
Requests that the Skeleton algorithm starts tracking a specific user. Once started, the skeleton will
provide information on the joint position and orientation for that user during each new frame of the
UserTracker.
Note that the computational requirements of calculating a skeleton increase linearly with the number of
users tracked. Tracking too many users may result in poor performance and high CPU utilization. If
performance slows to the point where the skeleton is not calculated at the full frame rate of the depth
data used to generate it, the algorithm tends to perform poorly.
@param [in] UserID The ID number of the user to calculate a skeleton for.
@returns Status code indicating success or failure of this operation.
@see nite::Skeleton for more information on the skeleton algorithm.
*/
Status startSkeletonTracking(UserId id)
{
return (Status)niteStartSkeletonTracking(m_userTrackerHandle, id);
}
/**
Stops skeleton tracking for a specific user. If multiple users are being tracked, this will only stop
tracking for the user specified -- skeleton calculation will continue for remaining users.
@param [in] UserID of the person to stop tracking.
@see nite::Skeleton for more information on the skeleton algorithm.
*/
void stopSkeletonTracking(UserId id)
{
niteStopSkeletonTracking(m_userTrackerHandle, id);
}
/**
This function commands the @ref UserTracker to start detecting specific poses for a specific user.
@param [in] user The @ref UserID of the user that you would like to detect a pose for.
@param [in] type The type of pose you would like to detect.
@returns @ref Status code indicating success or failure of this operation.
@see @ref PoseData For more information on pose detection and the output it generates.
@see @ref PoseType enumeration for a list of the available poses that can be detected.
*/
Status startPoseDetection(UserId user, PoseType type)
{
return (Status)niteStartPoseDetection(m_userTrackerHandle, (NiteUserId)user, (NitePoseType)type);
}
/**
This function commands the pose detection algorithm to stop detecting a specific pose for a specific
user. Since it is possible to detect multiple poses from multiple users, it is possible that detection
of a different pose on the same user (or the same pose on a different user) may continue after this function
is called.
@param [in] user The @ref UserID of the user to stop detecting a specific pose for.
@param [in] type The @ref PoseType of the pose to stop detecting.
*/
void stopPoseDetection(UserId user, PoseType type)
{
niteStopPoseDetection(m_userTrackerHandle, (NiteUserId)user, (NitePoseType)type);
}
/**
Adds a @ref NewFrameListner object to this @ref UserTracker so that it will respond when a new frame
is generated.