Search Unity

Question What is this Script

Discussion in 'Scripting' started by LetmeDwight, Jan 28, 2021.

  1. LetmeDwight

    LetmeDwight

    Joined:
    Apr 9, 2020
    Posts:
    125
    A friend quickly translated this script for me from one of his js projects. The point is that the script should move the mouth of a humanoid model through the voices in the audio file. The code itself has 5 errors and I don't know some things and their purpose like "MathF" and things like that. Could someone please explain this to me?
    Code (CSharp):
    1. using System.Collections;
    2. using System.Collections.Generic;
    3. using UnityEngine;
    4.  
    5. public class LipSyncV2 : MonoBehaviour
    6. {
    7.     public SkinnedMeshRenderer Character;
    8.     public AudioSource AudioLocation;
    9.     public int TalkingMouthNumber;
    10.     // TalkingMouthNumber = your mouth talking blendshape number
    11.  
    12.     public float volume = 40f;
    13.     public float frqLow = 200;
    14.     public float frqHigh = 800;
    15.  
    16.     float[] freqData;
    17.     int nSamples = 1024;
    18.     int fMax = 24000;
    19.  
    20.     float BandVol(float fLow, float fHigh)
    21.     {
    22.  
    23.         fLow = Mathf.Clamp(fLow, 20, fMax); // limit low...
    24.         fHigh = Mathf.Clamp(fHigh, fLow, fMax); // and high frequencies
    25.         AudioLocation.GetSpectrumData(freqData, 0, FFTWindow.BlackmanHarris);
    26.         int n1 = Mathf.Floor(fLow * nSamples / fMax);
    27.         int n2 = Mathf.Floor(fHigh * nSamples / fMax);
    28.         float sum = 0;
    29.  
    30.         // average the volumes of frequencies fLow to fHigh
    31.         for (int i = n1; i <= n2; i++)
    32.         {
    33.             sum += freqData[i];
    34.         }
    35.  
    36.         return sum * (n2 - n1 + 1);
    37.     }
    38.  
    39.     void Start()
    40.     {
    41.  
    42.         if (!AudioLocation)
    43.             AudioLocation = GetComponent<AudioSource>();
    44.  
    45.         freqData = new float[nSamples];
    46.  
    47.     }
    48.  
    49.     void Update()
    50.     {
    51.         if (Character)
    52.         {
    53.  
    54.             float DATAREADA = Mathf.Clamp((BandVol(frqLow, frqHigh) * volume * 2), 0, 100);
    55.             DATAREADA = Mathf.Lerp(0, DATAREADA, Time.time * 0.1);
    56.  
    57.             //Character.SendMessage("FaceTalking", DATAREAD, SendMessageOptions.DontRequireReceiver);
    58.             FaceTalking(DATAREADA);
    59.         }
    60.     }
    61.  
    62.     void FaceTalking(float TalkingNow)
    63.     {
    64.         if (SkinnedMeshRenderer)
    65.         {
    66.             SkinnedMeshRenderer.SetBlendShapeWeight(TalkingMouthNumber, TalkingNow);
    67.         }
    68.     }
    69. }
     
  2. Kurt-Dekker

    Kurt-Dekker

    Joined:
    Mar 16, 2013
    Posts:
    38,727
    How to understand errors in general:

    https://forum.unity.com/threads/ass...3-syntax-error-expected.1039702/#post-6730855

    All class have documentation. Start there.

    https://docs.unity3d.com/ScriptReference/Mathf.html

    This sounds intuitively like something that is gonna rely on a LOT more detail than the code itself. Code in Unity is only a tiny fraction of the problem. The rest is the scene, model, and prefab setup.

    Engineering is accomplished generally by defining and understanding a problem, then creating a solution. "Finding" a script is generally not a useful way to accomplish anything. I would recommend starting with tutorials to do what you want. Voice mouth modeling is a very complex topic in general.
     
  3. LetmeDwight

    LetmeDwight

    Joined:
    Apr 9, 2020
    Posts:
    125
    I made it in a easy other way without tutorial.
    I just made a 1 second animation with mouth open and close and made this small script that always play this animation in a second Animator Layer, if a mp3 is played in the audiosource for the Voicelines:
    Code (CSharp):
    1. using System.Collections;
    2. using System.Collections.Generic;
    3. using UnityEngine;
    4.  
    5. public class IsVoicelinePlaying_Lipsync : MonoBehaviour
    6. {
    7.     private AudioSource obj_Speechblendaudiosource;
    8.     Animator YunoAnimator;
    9.  
    10.     private void Start()
    11.     {
    12.         obj_Speechblendaudiosource = GameObject.Find("obj_SpeechBlendUNDVoiceManager").GetComponent<AudioSource>();
    13.         YunoAnimator = GameObject.Find("YunoIK_withRibbonV7").GetComponent<Animator>();
    14.     }
    15.  
    16.  
    17.     void Update()
    18.     {
    19.         if (obj_Speechblendaudiosource.isPlaying)
    20.         {
    21.             obj_Speechblendaudiosource = GameObject.Find("obj_SpeechBlendUNDVoiceManager").GetComponent<AudioSource>();
    22.             YunoAnimator.SetInteger("SpeechLayer", 1);
    23.         }
    24.         else
    25.         {
    26.             obj_Speechblendaudiosource = GameObject.Find("obj_SpeechBlendUNDVoiceManager").GetComponent<AudioSource>();
    27.             YunoAnimator.SetInteger("SpeechLayer", 0);
    28.         }
    29.  
    30.         Debug.Log("obj_Speechblendaudiosource.isPlaying = " + obj_Speechblendaudiosource.isPlaying);
    31.     }
    32. }