Okay, this is so weird.
I want a GUITexture to fade in and out when ordered to, so I wrote this script and attached it to it.
If fades in nicely, but it REFUSES to fade out properly.
When debugging it seems like the alpha value of the colour REFUSES to decrease, either with Lerp or with just regular subtraction.
I’ve kept all my methods of fading commented out so you guys can see what I’ve already tried.
What the hell is going on?
public class GUITextureController : MonoBehaviour {
private float alpha;
public bool isOn;
public bool isOff;
public float fadeSpeed;
// Use this for initialization
void Start () {
guiTexture.color = new Color(0.5f,0.5f,0.5f, 0);
}
// Update is called once per frame
void Update () {
if (isOn == true) {
OnGUI ();
if(guiTexture.color.a >= 0.45f) {
isOn = false;
isOff = true;
Debug.Log ("Switchar från på till av");
}
}
else if (isOff == true) {
OffGUI();
if(guiTexture.color.a <= 0.05f){
guiTexture.enabled = false;
Debug.Log ("Fadade ut, stänger av");
}
}
}
public void TurnOn() {
guiTexture.enabled = true;
isOn = true;
isOff = false;
}
void OnGUI()
{
//alpha = alpha + fadeSpeed; //Mathf.Lerp(alpha, 0.5f, 0.4f * Time.deltaTime);
//guiTexture.color = new Color(0.5f,0.5f,0.5f, alpha);
guiTexture.color = Color.Lerp(guiTexture.color, Color.gray, fadeSpeed * Time.deltaTime);
}
void OffGUI() {
Debug.Log ("OffGUI körs");
guiTexture.color = Color.Lerp(guiTexture.color, Color.clear, fadeSpeed * Time.deltaTime);
//alpha = Mathf.Lerp (alpha, 0, 0.1f * Time.deltaTime);
// aplha = alpha - fadeSpeed;
if (alpha <= 0) {
alpha = 0;
}
//guiTexture.color = new Color (0.5f, 0.5f, 0.5f, alpha);
}
}