I think, UI and GUI system don’t support MultiTouch Input.
I follow MultiTouch Input Tutorial(code):
using UnityEngine;
using System.Collections;
public class TouchTest : MonoBehaviour
{
void Update ()
{
Touch[] myTouches = Input.touches;
for(int i = 0; i < Input.touchCount; i++)
{
//Do something with the touches
}
}
}
I would like to use Rect.Contains(myTouches(i).position), but i need convert local rect from RectTransform component to screen position.
Finally, I have substituted a uButton by uImage, and change the color(appearance) manually.
To enable MultiTouch Input, a Canvas has a GraphicRaycaster attached by default, which allows EventSystem.current.RaycastAll to hit uGUI elements that are Graphics (Image, Text, etc).
Raycasting in new GUI
(Unity 4.6) How to raycast against uGUI objects from an arbitrary screen/canvas position
Example Code:
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.EventSystems;
using System.Collections.Generic;
public class uImageRaycast : MonoBehaviour {
public GameObject uImage;
Image image;
Color colorOn,colorOff;
void Start(){
this.image = this.uImage.GetComponent<Image>();
this.colorOff = this.image.color;
this.colorOn = new Color(this.colorOff.r,this.colorOff.g,this.colorOff.b,this.colorOff.a*0.5f);
}
void Update(){
this.image.color = this.colorOff;
PointerEventData pointer = new PointerEventData(EventSystem.current);
List<RaycastResult> raycastResult = new List<RaycastResult>();
foreach (Touch touch in Input.touches){
pointer.position = touch.position;
EventSystem.current.RaycastAll(pointer,raycastResult);
foreach(RaycastResult result in raycastResult){
if(result.gameObject == this.uImage){
this.image.color = this.colorOn;
//Do Stuff
}
}
raycastResult.Clear();
}
}
}
1 Like