I have this lookAt script that makes the object look at the mouse, how do I get it to work with touch input though? Basically just when the player is touching the screen I want the object to look toward where he is touching.
function lookAt(){
var hit : RaycastHit;
var ray = Camera.main.ScreenPointToRay (Input.mousePosition);
if (Physics.Raycast (ray, hit))
{
transform.LookAt(hit.point);
}
}
if (Input.touchCount > 0){
var touch : Touch = Input.GetTouch(0);
everything else should be fine but Input.mousePosition should be your touch.position instead.
}
tell me how that works
That works perfectly, is there a way to do that with onMouseDown, but for GUI?
you could use OnGUI.
I prefer 3d menus and GUITextures for iPhone myself, here is my script for GUITextures that should answer any more questions
//uses GUITextures and put this script on each texture in the menu system. be sure they are in sync and test often.
var activate : GameObject[];
var deactivate : GameObject[];
var Deactivate : boolean=true;
var Activate : boolean=true;
var StartLevel : boolean=false; //
var begin : boolean=true; //does it start activated
function Start (){
guiTexture.pixelInset = Rect(guiTexture.pixelInset.x * Screen.height/320, guiTexture.pixelInset.y * Screen.height/320, guiTexture.pixelInset.width * Screen.height/320, guiTexture.pixelInset.height * Screen.height/320);
if (!begin){
gameObject.active = false;
}
}
function Update (){
if(Input.touchCount > 0){
var touch: Touch = Input.GetTouch(0);
if(touch.phase == TouchPhase.Began guiTexture.HitTest(touch.position)){
print("hit " + guiTexture.name);
if (Activate){
for (var ao : GameObject in activate){
ao.active = true;
}
}
if (Deactivate){
for (var deo : GameObject in deactivate){
deo.active = false;
}
}
if (StartLevel){
Application.LoadLevel (1);
}
}}}
edit: OnMouse whatever works in the iPhone simulator but I dont think it works when its built in xcode
I used the same thing basically with one exception:
var touch: iPhoneTouch = iPhoneInput.touches[0];
What’s the difference between touches and GetTouch?
touches represents how many fingers are touching the screen
I use Input.GetTouch(x) to find the phase and position for each touch
Ok thanks, that’s what I thought, but does it matter though if I just care if one finger touches it? I mean will it do the same thing?
mmmmm not sure. try it out
Ok thanks. Right now I actually don’t even have the iPhone version of Unity (because I’m on a PC) so I’ll test it when I get a Mac. Thanks for all your help.