I Created a program that takes audio from the mic sends it to a server and gets back the answer tat gets from the server i managed to make everything work but i am having a GC.Alloc that’s making 23 mil calls and eventually causes the program to freeze all for approx 10 secs maybe more then it continues .
i beleve my problem is at this section :
void OnGUI()
{
GC.Collect(0);
if (this.isMicrophoneStart)
{
this.currentUpdateTime += Time.deltaTime;
if (!isSpeaking)
{
if (this.time != this.goAudioSource.timeSamples)
{
this.time1 = this.time;
}
if (this.timeStart1 != this.goAudioSource.timeSamples)
{
this.timeStart = this.timeStart1;
}
this.time = this.goAudioSource.time;
this.timeStart1 = this.goAudioSource.timeSamples;
float timeSamplesPerSecond = this.timeStart1 / this.time;
float offsetFloat = timeSamplesPerSecond * (int)this.time1;
this.offset = (int)offsetFloat;
}
// if (this.currentUpdateTime >= this.updateStep)
// {
this.CheckLoudness();
if (this.clipLoudness > db)
{
if (timer != null)
{
timer.Stop();
}
//Thread t = new Thread(new ParameterizedThreadStart(this.ChangeText));
//t.Start(this.text);
//this.text.text = “Speaking”;
// Debug.Log(“Speaking”);
this.isSpeaking = true;
this.isWaitToResponse = false;
this.isStartTalking = false;
this.isFirstTime = false;
}
if(!Microphone.IsRecording(selectedDevice))
{
// Debug.Log(“not Recording”);
}
if (isSpeaking && clipLoudness < 0.01 && !isFirstTime && (timer == null || !timer.IsRunning))
{
this.isFirstTime = true;
timer = new System.Diagnostics.Stopwatch();
timer.Start();
//Thread.Sleep(2000);
}
if (isSpeaking && clipLoudness < 0.01 && timer.ElapsedMilliseconds > 2000 && (timer != null || timer.IsRunning))
{
Debug.Log(“1”);
// this.text.text = “stop”;
//this.text.Rebuild(new CanvasUpdate());
timer.Stop();
this.timeEnd = this.goAudioSource.timeSamples;
this.isSpeaking = false;
this.isWaitToResponse = true;
isFirstTime = false;
Debug.Log(“stop Speaking”);
this.SaveVoice();
this.isMicrophoneStart = false;
this.SendVoice();
}
//else
//{
// if(isSpeaking && clipLoudness > 0.01 && isFirstTime)
// {
// this.isFirstTime = false;
// }
//}
// }
}
//if (!this.InternetAccess)
//{
// this.webCamTexture.Stop();
// Microphone.End(this.selectedDevice);
// this.finishTakingPicture = true;
// GUI.contentColor = Color.red;
// GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 100, 200, 50), “No Internet Access…”);
//}
if (this.isSpeaking)
{
GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 100, 200, 50), “Recording in progress…”);
}
if (this.isWaitToResponse)
{
GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 100, 200, 50), “Wait To Response…”);
}
if (!this.finishTakingPicture)
{
GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 100, 200, 50), “Wait…”);
}
if (this.isStartTalking && this.finishTakingPicture)
{
//&& this.InternetAccess
GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 100, 200, 50), “You Can Start Talking”);
}
if (!micConnected)
{
GUI.contentColor = Color.red;
GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 - 25, 200, 50), “Microphone not connected!”);
}
//if (timer.ElapsedMilliseconds > 3000)
//{
// Debug.Log(“milliseconds=” + timer.ElapsedMilliseconds);
// timer.Stop();
// this.StartPhoto();
//}
if (!this.isSpeaking && this.finishTakingPicture && this.username == “Unknown”)
{
//&& this.sentPhoto <= this.maxSentPhotos
GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 100, 200, 50), “Wait To Response…”);
this.sentPhoto++;
this.finishTakingPicture = false;
Thread.Sleep(500);
//this.StartPhoto(true);
//if (this.sentPhoto == this.maxSentPhotos)
//{
// this.StartPhoto(true);
//}
//else
//{
// this.StartPhoto(false);
//}
}
if (this.username == “”)
{
this.InternetAccess = false;
}
//if(this.username != “” && this.username!=“Unknown”)
//{
// this.webCamTexture.Stop();
//}
}
//private void ChangeText(object text)
//{
// Text textout = (Text)text;
// textout.text = “proccess”;
//}
private void StartPhoto(bool fromlip)
{
WebCamTexture webCamTexture = new WebCamTexture();
Renderer renderer = this.GetComponent() as Renderer;
renderer.material.mainTexture = webCamTexture;
webCamTexture.Play();
//Thread.Sleep(2000);
finishTakingPicture = false;
StartCoroutine(this.TakePhoto(fromlip, webCamTexture));
}
private void StopTakingPhoto(WebCamTexture webCamTexture)
{
webCamTexture.Stop();
// Debug.Log(“After Web Cam:” + webCamTexture);
this.finishTakingPicture = true;
}
private void SaveVoice()
{
// this.text.text = “Wait To Response…”;
int overlapSamples;
Microphone.End(this.selectedDevice);
int len2 = this.goAudioSource.clip.samples - (this.timeStart);
overlapSamples = (int)(this.overlap * this.goAudioSource.clip.frequency);
this.newClip = AudioClip.Create(“cut2”, timeEnd, goAudioSource.clip.channels, goAudioSource.clip.frequency, false, false);
float[ ] smp2 = new float[(len2 + overlapSamples) * this.goAudioSource.clip.channels];
this.goAudioSource.clip.GetData(smp2, this.offset);
this.newClip.SetData(smp2, 0);
//this.newClip.
this.saveWav.Save(“Assets/Audio/sentRecord” + this.sentVoiceNumber, this.newClip);
this.sentVoiceNumber++;
}
private void CheckLoudness()
{
this.currentUpdateTime = 0f;
this.goAudioSource.clip.GetData(this.clipSampleData, this.goAudioSource.timeSamples); //I read 1024 samples, which is about 80 ms on a 44khz stereo clip, beginning at the current sample position of the clip.
this.clipLoudness = 0f;
foreach (var sample in clipSampleData)
{
this.clipLoudness += Mathf.Abs(sample);
}
this.clipLoudness /= this.sampleDataLength;
}
private void SendVoice()
{
StartCoroutine(call.postWWW(lip, lipSync, sentVoiceNumber - 1));
do
{
// Debug.Log(call.wwwPostRequest.isDone);
} while (!call.wwwPostRequest.isDone);
if (call.wwwPostRequest.isDone)
{
string tempPath = Path.Combine(Application.persistentDataPath, “Audio”);
tempPath = Path.Combine(tempPath, “returnAudio.wav”);
//var filepath = Path.Combine(System.IO.Directory.GetCurrentDirectory(), “Assets/Audio/returnAudio.wav”);
//try
//{
// using (FileStream fs = File.Create(filepath))
// {
// fs.Write(call.wwwPostRequest.bytes, 0, call.wwwPostRequest.bytes.Length);
// fs.Close();
// }
//}
//catch
//{
// Thread.Sleep(1000);
//}
if (lip != null && lipSync != null)
{
AudioClip aClip = call.wwwPostRequest.GetAudioClip(false, false, AudioType.WAV);
StartCoroutine(this.lipsynctalk(lip, lipSync, aClip));
}
}
}
private IEnumerator lipsynctalk(LipSync lip, LipSyncData lipSync, AudioClip clip)
{
SavWav saveWav = new SavWav();
if (lip != null && lipSync != null && clip.frequency != 0)
{
LipSyncData lipSyncdata = new LipSyncData(clip, lipSync.phonemeData,lipSync.emotionData,lipSync.gestureData);
this.isWaitToResponse = false;
lip.Play(lipSyncdata);
yield return new WaitForSeconds(clip.length + 2);
this.StartNewVoice();
//if (this.username == “Unknown”)
//{
// this.StartPhoto(true);
//}
//else
//{
// this.StartNewVoice();
//}
}
else
{
if (clip.frequency == 0)
{
this.isWaitToResponse = false;
AudioClip dontUnderstandClip = Resources.Load(“didntUnderstand”);
LipSyncData lipSyncdata = new LipSyncData(dontUnderstandClip,lipSync.phonemeData, lipSync.emotionData, lipSync.gestureData);
lip.Play(lipSyncdata);
yield return new WaitForSeconds(clip.length + 2);
this.StartNewVoice();
//if (this.username == “Unknown” || this.username == “”)
//{
// this.StartPhoto(true);
//}
//else
//{
// this.StartNewVoice();
//}
}
}
}
private void SendFace(string picture, WebCamTexture webCamTexture)
{
StartCoroutine(call.faceRecognition(picture));
do
{
} while (!call.wwwPostFaceRecognition.isDone);
if (call.wwwPostFaceRecognition.isDone)
{
//this.finishTakingPicture = true;
this.username = call.wwwPostFaceRecognition.text;
// Debug.Log(“time” + timerphoto.ElapsedMilliseconds);
if(this.username != “Unknown”)
{
// btn.image.rectTransform.sizeDelta = new Vector2(0, 0);
StartCoroutine(call.login(this.username));
}
if (this.username != “Unknown” || timerphoto.ElapsedMilliseconds > 2000)
{
Debug.Log(“GFinish Photos”);
//StartCoroutine(call.login(this.username));
this.finishTakingPicture = true;
this.StopTakingPhoto(webCamTexture);
this.StartNewVoice();
timerphoto.Stop();
timerphoto = new System.Diagnostics.Stopwatch();
//btn.SetActive(false);
}
else
{
StartCoroutine(this.TakePhoto(false, webCamTexture));
}
}
// Debug.Log(this.username);
}
private IEnumerator TakePhoto(bool fromLipSync, WebCamTexture webCamTexture)
{
yield return new WaitForEndOfFrame();
timerphoto = new System.Diagnostics.Stopwatch();
timerphoto.Start();
//Debug.Log(“webcam=” + webCamTexture);
//this.finishTakingPicture = false;
Thread.Sleep(1000);
Texture2D photo = new Texture2D(webCamTexture.width, webCamTexture.height);
photo = new Texture2D(webCamTexture.width, webCamTexture.height);
photo.SetPixels(webCamTexture.GetPixels());
photo.Apply();
byte[ ] bytes = photo.EncodeToPNG();
var your_path = Path.Combine(System.IO.Directory.GetCurrentDirectory(), “Assets/”);
File.WriteAllBytes(your_path + “photo2.png”, bytes);
string base64String = Convert.ToBase64String(bytes);
this.SendFace(base64String, webCamTexture);
//this.StartNewVoice();
//if (fromLipSync)
//{
// this.StartNewVoice();
//}
//if (!fromLipSync && this.username != “Unknown”)
//{
// this.StopTakingPhoto(webCamTexture);
// StartCoroutine(call.login(this.username));
// this.StartNewVoice();
//}
}
private float[ ] ConvertByteToFloat(byte[ ] array)
{
float[ ] floatArr = new float[array.Length / 4];
for (int i = 0; i < floatArr.Length; i++)
{
if (BitConverter.IsLittleEndian)
Array.Reverse(array, i * 4, 4);
floatArr = BitConverter.ToSingle(array, i * 4) / 0x16000000;
}
return floatArr;
}
private void StartNewVoice()
{
this.goAudioSource.clip = Microphone.Start(selectedDevice, false, 600, 44100);
this.isMicrophoneStart = true;
this.isStartTalking = true;
while (!(Microphone.GetPosition(null) > 0)) { }
this.goAudioSource.mute = true;
this.goAudioSource.Play();
}
private string ReadString()
{
string path = “Assets/talk.txt”;
StreamReader reader = new StreamReader(path);
string read = reader.ReadToEnd();
reader.Close();
return read;
}
}