I’m trying to make a Kinect game using MS-SDK wrapper from the Asset store in Unity, In the kinect manager script the Hand Cursor object is where you can set the gameObject you want to act as a cursor.
The example provided by MSSDK gesture demo uses a GUITexture as the Hand cursor, however to be able to detect collisions, the box collider component attached to the hand cursor needs a transform scale value set to atleast 0.1, but setting the scale value causes the Texture to appear very large.
I would prefer to use sprites over GUITexture, since most of the gameObjects will be sprites but the movement as a sprite object is very limited, the object doesnt really move as much as the hand.
The same issue occurs, if I create a sprite and add an GUITexture component, or if I just use a sprite then it doesn’t move with my hand properly (very less translation).
In short, I want to be able to use a hand cursor and detect collision with other sprites in the game.
To understand a bit about whats going on with the code, here’s a snippet
RightHandCursor Gesture detection:
// check for RightHandCursor
case Gestures.RightHandCursor:
switch(gestureData.state)
{
case 0: // gesture detection - phase 1 (perpetual)
if(jointsTracked[rightHandIndex] && jointsTracked[rightHipIndex] &&
(jointsPos[rightHandIndex].y - jointsPos[rightHipIndex].y) > -0.1f)
{
gestureData.joint = rightHandIndex;
gestureData.timestamp = timestamp;
//gestureData.jointPos = jointsPos[rightHandIndex];
SetScreenPos(userId, ref gestureData, ref jointsPos, ref jointsTracked);
gestureData.progress = 0.7f;
}
else
{
// cancel the gesture
//SetGestureCancelled(ref gestureData);
gestureData.progress = 0f;
}
break;
}
break;
The SetScreenPos function:
private static void SetScreenPos(uint userId, ref GestureData gestureData, ref Vector3[] jointsPos, ref bool[] jointsTracked)
{
Vector3 handPos = jointsPos[rightHandIndex];
bool calculateCoords = false;
if(gestureData.joint == rightHandIndex)
{
if(jointsTracked[rightHandIndex] /**&& jointsTracked[rightElbowIndex] && jointsTracked[rightShoulderIndex]*/)
{
calculateCoords = true;
}
}
else if(gestureData.joint == leftHandIndex)
{
if(jointsTracked[leftHandIndex] /**&& jointsTracked[leftElbowIndex] && jointsTracked[leftShoulderIndex]*/)
{
handPos = jointsPos[leftHandIndex];
calculateCoords = true;
}
}
if(calculateCoords)
{
if(jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] &&
jointsTracked[leftShoulderIndex] && jointsTracked[rightShoulderIndex])
{
Vector3 neckToHips = jointsPos[shoulderCenterIndex] - jointsPos[hipCenterIndex];
Vector3 rightToLeft = jointsPos[rightShoulderIndex] - jointsPos[leftShoulderIndex];
gestureData.tagVector2.x = rightToLeft.x; // * 1.2f;
gestureData.tagVector2.y = neckToHips.y; // * 1.2f;
if(gestureData.joint == rightHandIndex)
{
gestureData.tagVector.x = jointsPos[rightShoulderIndex].x - gestureData.tagVector2.x / 2;
gestureData.tagVector.y = jointsPos[hipCenterIndex].y;
}
else
{
gestureData.tagVector.x = jointsPos[leftShoulderIndex].x - gestureData.tagVector2.x / 2;
gestureData.tagVector.y = jointsPos[hipCenterIndex].y;
}
}
if(gestureData.tagVector2.x != 0 && gestureData.tagVector2.y != 0)
{
Vector3 relHandPos = handPos - gestureData.tagVector;
gestureData.screenPos.x = Mathf.Clamp01(relHandPos.x / gestureData.tagVector2.x);
gestureData.screenPos.y = Mathf.Clamp01(relHandPos.y / gestureData.tagVector2.y);
}
}
}
