I’m making a FNAF like game where you can click on the screen of a computer to make the actions. The screen have it’s own UI. When I’m in unity everything works properly, but when I build the game it doesn’t work.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.EventSystems;
public class RenderTextureRaycast : MonoBehaviour
{
public RectTransform UICanvas;
public GraphicRaycaster screenCaster;
public LayerMask screenMask;
GameObject mainCamera;
PointerEventData pointerEventData;
public Text debugText;
private void Start()
{
mainCamera = Camera.main.gameObject;
pointerEventData = new PointerEventData(EventSystem.current);
}
private void Update()
{
Ray ray = new Ray(mainCamera.transform.position, mainCamera.transform.forward);
RaycastHit hit;
if (Physics.Raycast(ray, out hit, 3, screenMask))
{
List<RaycastResult> results = new List<RaycastResult>();
Vector2 virtualPosition = new Vector3(hit.textureCoord.x, hit.textureCoord.y);
virtualPosition.x *= UICanvas.sizeDelta.x;
virtualPosition.y *= UICanvas.sizeDelta.y;
pointerEventData.position = virtualPosition;
screenCaster.Raycast(pointerEventData, results);
debugText.text = "Texture coordinate: " + hit.textureCoord + "
Screen position: " + virtualPosition + "
Event system: " + pointerEventData;
foreach (RaycastResult result in results)
{
if (Input.GetMouseButtonDown(0))
{
ExecuteEvents.Execute(result.gameObject, pointerEventData, ExecuteEvents.pointerDownHandler);
ExecuteEvents.Execute(result.gameObject, pointerEventData, ExecuteEvents.pointerClickHandler);
}
if (Input.GetMouseButtonUp(0))
{
ExecuteEvents.Execute(result.gameObject, pointerEventData, ExecuteEvents.pointerUpHandler);
}
}
}
}
}
Does anyone knows how can I solve this problem? Thanks!