
in the figure shown above, I have the layout of touch control that I want to use for my game. I am wondering how I would code so that these colored areas could be isolated as opposed to the screen taking generic touch inputs. How would I code for these sections independently, and as a group. For example, if I want to use both the yellow and green buttons, how would I reference when I’m hitting both areas at the same time, or if I use both separately timed out within a certain amount of time etc? The red section primarily would be used for 3D movement along the x and z axes. I plan to use the accelerometer to rotate along the Y-axis as well as to look up in the air and down to the floor.
I am hoping for either basic code, or to be pointed in the right direction where these issues have been discussed because I have not found appropriate knowledge on these issues.
Thank you for your help on this issue.
This can be done in multi-touch devices (which I suppose means 100% of the touch devices produced nowadays). A simple solution is to use GUITextures for each button, and use HitTest to check whether a touch occurred in the button area - for instance:
// drag the button textures to these fields:
public var guiUp: GUITexture;
public var guiDown: GUITexture;
public var guiLeft: GUITexture;
public var guiRight: GUITexture;
// button states:
var buttonUp: boolean;
var buttonDown: boolean;
var buttonLeft: boolean;
var buttonRight: boolean;
function ReadButtons(){ // check buttons
buttonUp = false;
buttonDown = false;
buttonLeft = false;
buttonRight = false;
var count: int = Input.touchCount;
for (var i=0; i<count; i++){ // verify all touches
var touch: Touch = Input.GetTouch(i);
// if touch inside some button GUI, set the corresponding value
if (guiUp.HitTest(touch.position)) buttonUp = true;
if (guiDown.HitTest(touch.position)) buttonDown = true;
if (guiLeft.HitTest(touch.position)) buttonLeft = true;
if (guiRight.HitTest(touch.position)) buttonRight = true;
}
}
function Update(){
ReadButtons();
if (buttonUp){
// button up is pressed
}
if (buttonLeft){
// button left is pressed
}
...
}
EDITED: You may also use a GUITexture for each area and define Rects in screen coordinates that specify the space occupied by each button:
// drag the GUITextures for each area here:
public var area1: GUITexture;
public var area2: GUITexture;
// define the Rect occupied by each button here:
public var rButUp: Rect;
public var rButDown: Rect;
public var rButLeft: Rect;
public var rButRight: Rect;
public var rButA: Rect;
public var rButB: Rect;
// button states:
var buttonUp: boolean;
var buttonDown: boolean;
var buttonLeft: boolean;
var buttonRight: boolean;
var buttonA: boolean;
var buttonB: boolean;
function ReadButtons(){ // check buttons
buttonUp = false;
buttonDown = false;
buttonLeft = false;
buttonRight = false;
buttonA = false;
buttonB = false;
var count: int = Input.touchCount;
for (var i=0; i<count; i++){ // verify all touches
var touch: Touch = Input.GetTouch(i);
// if touch inside some GUI, check the button rects:
if (area1.HitTest(touch.position)){
if (rButUp.Contains(touch.position) buttonUp = true;
if (rButDown.Contains(touch.position)) buttonDown = true;
if (rButLeft.Contains(touch.position)) buttonLeft = true;
if (rButRight.Contains(touch.position)) buttonRight = true;
}
if (area2.HitTest(touch.position)){
if (rButA.Contains(touch.position) buttonA = true;
if (rButB.Contains(touch.position)) buttonB = true;
}
}
...