Skip to content

Instantly share code, notes, and snippets.

@alankent
Created September 25, 2021 20:35
Show Gist options
  • Save alankent/b9e7af795f7f3a614de46da3576e6f58 to your computer and use it in GitHub Desktop.
Save alankent/b9e7af795f7f3a614de46da3576e6f58 to your computer and use it in GitHub Desktop.
using System.Collections;
using System.Collections.Generic;
using UniGLTF;
using UnityEngine;
using UnityEditor;
using VRM;
[ExecuteAlways]
public class AlansBlendShapeClip : MonoBehaviour
{
public Object blendShapeClipDirectory;
private Object m_lastBlendShapeClipDirectory = null;
private string m_blendShapeClipDirectoryPath = null;
private List<BlendShapeClip> m_clips = new List<BlendShapeClip>();
private AlansBlendShapeClipVowels m_vowels;
BlendShapeMerger m_merger;
// Force deferred loading of blendshape clips (cannot be done in constructor).
public bool ForceReload = true;
#if false
// This works, but would need an editor to look nice and introduces another layer of nesting to animate the weights, so would break all my existing animation clips.
[System.Serializable]
public class BlendShapeClipGroup
{
public bool Loaded = false;
[Range(0f, 1f)] public float Weight = 0;
}
//public BlendShapeClip EyesClosedHappyGroup = new();
#endif
[Range(0f, 1f)] public float Neutral = 0;
[Range(0f, 1f)] public float A = 0;
[Range(0f, 1f)] public float E = 0;
[Range(0f, 1f)] public float I = 0;
[Range(0f, 1f)] public float O = 0;
[Range(0f, 1f)] public float U = 0;
[Range(0f, 1f)] public float Angry = 0;
[Range(0f, 1f)] public float AngryVein = 0;
[Range(0f, 1f)] public float BagStraps = 0;
[Range(0f, 1f)] public float Blank = 0;
[Range(0f, 1f)] public float Blink = 0;
[Range(0f, 1f)] public float BlinkLeft = 0;
[Range(0f, 1f)] public float BlinkRight = 0;
[Range(0f, 1f)] public float Blush = 0;
[Range(0f, 1f)] public float Cry = 0;
[Range(0f, 1f)] public float Dark = 0;
[Range(0f, 1f)] public float Dirty = 0;
[Range(0f, 1f)] public float Dizzy = 0;
[Range(0f, 1f)] public float Extra = 0;
[Range(0f, 1f)] public float Flushed = 0;
[Range(0f, 1f)] public float Fun = 0;
[Range(0f, 1f)] public float Joy = 0;
[Range(0f, 1f)] public float Scary = 0;
[Range(0f, 1f)] public float Sorrow = 0;
[Range(0f, 1f)] public float Surprised = 0;
[Range(0f, 1f)] public float Sweat = 0;
[Range(0f, 1f)] public float Weep = 0;
[Range(0f, 1f)] public float EyesWide = 0;
[Range(0f, 1f)] public float EyesClosedHappy = 0;
[Range(0f, 1f)] public float MouthGrin = 0;
[Range(0f, 1f)] public float IrisShrink = 0;
public GameObject Bag;
// Warning messages about clips that were not found (read only).
[Tooltip("Blendshape clips that could not be found in the blend shape clip directory for this character")]
//[TextArea(5, 10)]
public string ClipsNotLoaded = "";
public AlansBlendShapeClip()
{
}
void Awake()
{
ForceReload = true;
UpdateBlendShapeClips();
}
void OnValidate()
{
UpdateBlendShapeClips();
}
public void LateUpdate()
{
UpdateBlendShapeClips();
}
public void UpdateBlendShapeClips()
{
// If not initialized yet or directory has changed, force it to reload.
if (ForceReload || m_merger == null || blendShapeClipDirectory != m_lastBlendShapeClipDirectory)
{
ForceReload = false;
m_lastBlendShapeClipDirectory = blendShapeClipDirectory;
m_blendShapeClipDirectoryPath = null;
LoadAllBlendShapeClips();
m_vowels = GetComponent<AlansBlendShapeClipVowels>();
}
Accumulate(new BlendShapeKey(BlendShapePreset.Neutral), Neutral);
// If the current object has a AlansBlendShapeClipVowels component attached (used when talking), max with it's vowels.
Accumulate(new BlendShapeKey(BlendShapePreset.A), (m_vowels == null) ? A : Mathf.Max(A, m_vowels.A));
Accumulate(new BlendShapeKey(BlendShapePreset.E), (m_vowels == null) ? E : Mathf.Max(E, m_vowels.E));
Accumulate(new BlendShapeKey(BlendShapePreset.I), (m_vowels == null) ? I : Mathf.Max(I, m_vowels.I));
Accumulate(new BlendShapeKey(BlendShapePreset.O), (m_vowels == null) ? O : Mathf.Max(O, m_vowels.O));
Accumulate(new BlendShapeKey(BlendShapePreset.U), (m_vowels == null) ? U : Mathf.Max(U, m_vowels.U));
Accumulate(new BlendShapeKey(BlendShapePreset.Angry), Angry);
Accumulate(new BlendShapeKey("AngryVein"), AngryVein);
Accumulate(new BlendShapeKey("BagStraps"), BagStraps);
if (Bag != null) Bag.SetActive(BagStraps > 0.5f);
Accumulate(new BlendShapeKey("Blank"), Blank);
Accumulate(new BlendShapeKey(BlendShapePreset.Blink), Blink);
Accumulate(new BlendShapeKey(BlendShapePreset.Blink_L), BlinkLeft);
Accumulate(new BlendShapeKey(BlendShapePreset.Blink_R), BlinkRight);
FourLevels(Blush, "Blush25", "Blush50", "Blush75", "Blush99");
Accumulate(new BlendShapeKey("Cry"), Cry);
Accumulate(new BlendShapeKey("Dark"), Dark);
Accumulate(new BlendShapeKey("Dirty"), Dirty);
Accumulate(new BlendShapeKey("Dizzy"), Dizzy);
Accumulate(new BlendShapeKey("Extra"), Extra);
FourLevels(Flushed, "Flushed25", "Flushed50", "Flushed75", "Flushed99");
Accumulate(new BlendShapeKey(BlendShapePreset.Fun), Fun);
Accumulate(new BlendShapeKey(BlendShapePreset.Joy), Joy);
Accumulate(new BlendShapeKey("Scary"), Scary);
Accumulate(new BlendShapeKey(BlendShapePreset.Sorrow), Sorrow);
Accumulate(new BlendShapeKey("Surprised"), Surprised);
Accumulate(new BlendShapeKey("Sweat"), Sweat);
Accumulate(new BlendShapeKey("Weep"), Weep);
Accumulate(new BlendShapeKey("EyesWide"), EyesWide);
Accumulate(new BlendShapeKey("EyesClosedHappy"), EyesClosedHappy);
Accumulate(new BlendShapeKey("MouthGrin"), MouthGrin);
Accumulate(new BlendShapeKey("IrisShrink"), IrisShrink);
m_merger.Apply();
}
private void Accumulate(BlendShapeKey key, float value)
{
m_merger.AccumulateValue(key, value);
}
private void FourLevels(float value, string level25, string level50, string level75, string level100)
{
float v25 = 0f, v50 = 0f, v75 = 0f, v100 = 0f;
if (value > 0.875f) v100 = 1f;
else if (value > 0.625f) v75 = 1f;
else if (value > 0.375f) v50 = 1f;
else if (value > 0.125f) v25 = 1f;
m_merger.AccumulateValue(new BlendShapeKey(level25), v25);
m_merger.AccumulateValue(new BlendShapeKey(level50), v50);
m_merger.AccumulateValue(new BlendShapeKey(level75), v75);
m_merger.AccumulateValue(new BlendShapeKey(level100), v100);
}
private void LoadAllBlendShapeClips()
{
m_clips.Clear();
ClipsNotLoaded = "";
LoadClip("BlendShape.Neutral.asset");
LoadClip("BlendShape.A.asset");
LoadClip("BlendShape.E.asset");
LoadClip("BlendShape.I.asset");
LoadClip("BlendShape.O.asset");
LoadClip("BlendShape.U.asset");
LoadClip("BlendShape.Angry.asset");
LoadClip("FaceTile.AngryVein.asset");
LoadClip("ClothesTile.BagStraps.asset");
LoadClip("EyeTile.Blank.asset");
LoadClip("BlendShape.Blink.asset");
LoadClip("BlendShape.Blink_L.asset");
LoadClip("BlendShape.Blink_R.asset");
LoadClip("FaceTile.Blush25.asset");
LoadClip("FaceTile.Blush50.asset");
LoadClip("FaceTile.Blush75.asset");
LoadClip("FaceTile.Blush99.asset");
LoadClip("FaceTile.Cry.asset");
LoadClip("FaceTile.Dark.asset");
LoadClip("FaceTile.Dirty.asset");
LoadClip("EyeTile.Dizzy.asset");
LoadClip("BlendShape.Extra.asset");
LoadClip("FaceTile.Flushed25.asset");
LoadClip("FaceTile.Flushed50.asset");
LoadClip("FaceTile.Flushed75.asset");
LoadClip("FaceTile.Flushed99.asset");
LoadClip("BlendShape.Fun.asset");
LoadClip("BlendShape.Joy.asset");
LoadClip("FaceTile.Scary.asset");
LoadClip("BlendShape.Sorrow.asset");
LoadClip("Blendshape.Surprised.asset");
LoadClip("FaceTile.Weep.asset");
LoadClip("FaceTile.Sweat.asset");
LoadClip("Face.EyesWide.asset");
LoadClip("Face.EyesClosedHappy.asset");
LoadClip("Face.MouthGrin.asset");
LoadClip("Face.IrisShrink.asset");
m_merger = new BlendShapeMerger(m_clips, transform);
}
private BlendShapeClip LoadClip(string name)
{
if (blendShapeClipDirectory == null)
{
Debug.Log("Blend Shape Clip Directory for " + name + " of " + gameObject.name + " - must be set to Asset folder holding blendshape clips");
return null;
}
if (m_blendShapeClipDirectoryPath == null)
{
m_blendShapeClipDirectoryPath = AssetDatabase.GetAssetPath(blendShapeClipDirectory);
if (m_blendShapeClipDirectoryPath == null)
{
Debug.Log("Failed to determine path for " + name + " from asset folder in Blend Shape Clip Directory");
return null;
}
}
string path = m_blendShapeClipDirectoryPath + "/" + name;
var clip = AssetDatabase.LoadAssetAtPath<BlendShapeClip>(path);
if (clip == null)
{
//Debug.Log("Warning: Blendshape clip '" + path + "' not found or could not be loaded.");
ClipsNotLoaded += name + " ";
return null;
}
m_clips.Add(clip);
return clip;
}
}
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class AlansBlendShapeClipVowels : MonoBehaviour
{
[Range(0f, 1f)] public float A;
[Range(0f, 1f)] public float E;
[Range(0f, 1f)] public float I;
[Range(0f, 1f)] public float O;
[Range(0f, 1f)] public float U;
private AlansBlendShapeClip absc;
// Start is called before the first frame update
void Start()
{
absc = GetComponent<AlansBlendShapeClip>();
}
// Gets called from talk track when it adjusts weights.
public void UpdateBlendShapeClips()
{
absc?.UpdateBlendShapeClips();
}
}
using UnityEngine;
using UnityEditor.Timeline;
using UnityEngine.Sequences.Timeline;
using UnityEngine.Timeline;
namespace UnityEditor.Sequences.Timeline
{
[CustomTimelineEditor(typeof(TalkingClip))]
public class TalkingClipEditor : ClipEditor
{
/// <inheritdoc cref="ClipEditor.GetClipOptions"/>
public override ClipDrawOptions GetClipOptions(TimelineClip clip)
{
var options = base.GetClipOptions(clip);
// set clip colour to green when playhead is on clip
var director = TimelineEditor.inspectedDirector;
if (director != null && director.time >= clip.start && director.time <= clip.end)
{
options.highlightColor = options.highlightColor * 1.5f;
}
return options;
}
#if false
public override void OnCreate(TimelineClip clip, TrackAsset track, TimelineClip clonedFrom)
{
var talkingTrack = track as TalkingTrackAsset;
if (talkingTrack != null)
{
clip.duration = talkingTrack.defaultFrameDuration;
// TODO: Maybe we could set the clip length based on amount of text.
// For now I let user scale length to control duration.
}
}
#endif
}
}
using UnityEditor.Timeline;
using UnityEngine;
using UnityEngine.Sequences.Timeline;
using UnityEngine.Timeline;
namespace UnityEditor.Sequences.Timeline
{
[CustomTimelineEditor(typeof(TalkingTrackAsset))]
public class TalkingTrackEditor : TrackEditor
{
public override TrackDrawOptions GetTrackOptions(TrackAsset track, Object binding)
{
var options = base.GetTrackOptions(track, binding);
//options.minimumHeight = 40;
return options;
}
}
}
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Playables;
// GREAT VIDEO! https://www.youtube.com/watch?v=UEuM-Fckx5w
public class TalkingBehaviour : PlayableBehaviour
{
public string Visemes;
public float Weight;
private AlansBlendShapeClipVowels m_abscv = null;
class VisemeWeights
{
public float A;
public float E;
public float I;
public float O;
public float U;
public VisemeWeights(float A, float E, float I, float O, float U)
{
this.A = A;
this.E = E;
this.I = I;
this.O = O;
this.U = U;
}
}
public override void ProcessFrame(Playable playable, FrameData info, object playerData)
{
// We have the text in m_Text
// We have AlansBlendShapeClipVowels in playerData
// We have the current frame to display in info
// Time in seconds since start of clip and the duration of the clip.
float time = (float) playable.GetTime();
float clipDuration = (float) playable.GetDuration();
// Work out duration per viseme.
float visemeDuration = clipDuration / (Visemes.Length - 1);
// Work out the viseme index
int vi = (int) (time / visemeDuration);
char vc1 = Visemes[vi];
char vc2 = (vi + 1 < Visemes.Length) ? Visemes[vi + 1] : Visemes[vi];
VisemeWeights vw1 = CharToViseme(vc1);
VisemeWeights vw2 = CharToViseme(vc2);
// Offset into current viseme.
float lerp = (time / visemeDuration) - vi;
AlansBlendShapeClipVowels abscv = (AlansBlendShapeClipVowels)playerData;
if (abscv != null)
{
m_abscv = abscv;
m_abscv.A = (vw1.A + (vw2.A - vw1.A) * lerp) * Weight;
m_abscv.E = (vw1.E + (vw2.E - vw1.E) * lerp) * Weight;
m_abscv.I = (vw1.I + (vw2.I - vw1.I) * lerp) * Weight;
m_abscv.O = (vw1.O + (vw2.O - vw1.O) * lerp) * Weight;
m_abscv.U = (vw1.U + (vw2.U - vw1.U) * lerp) * Weight;
m_abscv.UpdateBlendShapeClips();
}
}
public override void OnBehaviourPause(Playable playable, FrameData info)
{
if (m_abscv != null)
{
m_abscv.A = 0;
m_abscv.E = 0;
m_abscv.I = 0;
m_abscv.O = 0;
m_abscv.U = 0;
m_abscv.UpdateBlendShapeClips();
}
}
private static VisemeWeights AWeights = new(1, 0, 0, 0, 0);
private static VisemeWeights EWeights = new(0, 1, 0, 0, 0);
private static VisemeWeights IWeights = new(0, 0.5f, 0.5f, 0, 0); // "i" does not open mouth much
private static VisemeWeights OWeights = new(0, 0, 0, 1, 0);
private static VisemeWeights UWeights = new(0, 0, 0, 0, 1);
private static VisemeWeights SWeights = new(0, 0, 1, 0, 1);
private static VisemeWeights CWeights = new(0.3f, 0.3f, 0.3f, 0, 0);
private static VisemeWeights ZeroWeights = new(0, 0, 0, 0, 0);
private VisemeWeights CharToViseme(char c)
{
switch (c)
{
case 'A': return AWeights;
case 'E': return EWeights;
case 'I': return IWeights;
case 'O': return OWeights;
case 'U': return UWeights;
case 'C': return CWeights;
case 'S': return SWeights;
default: return ZeroWeights;
}
}
}
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Playables;
using UnityEngine.Timeline;
// Contains clip data which is passed to the behavior at run time.
public class TalkingClip : PlayableAsset, ITimelineClipAsset
{
[Tooltip("Text to be spoken")]
[TextArea(5, 10)]
[SerializeField]
public string Text;
public bool ConvertTextToVisemes = true;
[Tooltip("Visemes computed from text")]
[TextArea(5, 10)]
[SerializeField]
public string Visemes;
[Range(0f, 1f)] public float VisemeWeight = 1f;
// We dont support blending or anything fancy.
public ClipCaps clipCaps => ClipCaps.None;
public override Playable CreatePlayable(PlayableGraph graph, GameObject owner)
{
// 'owner' you can call GetComponent<PlayableDirector>(); on
var playable = ScriptPlayable<TalkingBehaviour>.Create(graph);
if (playable.GetBehaviour() != null)
SetAttributes(playable.GetBehaviour());
return playable;
}
void SetAttributes(TalkingBehaviour talkingBehavior)
{
if (ConvertTextToVisemes)
{
UpdateVisemesFromText();
}
talkingBehavior.Visemes = Visemes;
talkingBehavior.Weight = VisemeWeight;
}
// Convert text into a viseme sequence
public void UpdateVisemesFromText()
{
// Really crude lipsync algorithm.
// Space = mouth closed, A = open mouth (ah), E = wide open a bit (ee), I = wide not open (it), O = wider version of A, U = narrow
string text = Text ?? "";
string visemes = " ";
for (int i = 0; i < text.Length; i++)
{
char c = char.ToLower(text[i]);
char nextc = (i + 1 < text.Length) ? char.ToLower(text[i + 1]) : ' ';
switch (c)
{
// A, E, I, O, U, Y, EE, AI, OO, EA, OA, IR, OW, OU, AW, EW, OI, OY, AR, OR, AY
case 'a':
if (nextc == 'y' || nextc == 'i')
{
visemes += "AE";
i++;
}
else if (nextc == 'w')
{
visemes += "U";
i++;
}
else if (nextc == 'r')
{
visemes += "A";
i++;
}
else
{
visemes += "A";
}
break;
case 'e':
if (nextc == 'y')
{
visemes += "AE";
i++;
}
else if (nextc == 'e' || nextc == 'a')
{
visemes += "E";
i++;
}
else if (nextc == 'r')
{
visemes += "A";
i++;
}
else
{
visemes += "E";
}
break;
case 'i':
case '|':
if (nextc == 'y')
{
visemes += "IE";
i++;
}
else
{
visemes += "I";
}
break;
case 'o':
if (nextc == 'i')
{
visemes += "OE";
i++;
}
else if (nextc == 'o' || nextc == 'r')
{
visemes += "U";
}
else if (nextc == 'u')
{
visemes += "AU";
}
else
{
visemes += "O";
}
break;
case 'u':
visemes += "U";
break;
case 'y':
visemes += "E";
break;
case 'w':
visemes += "U";
if (nextc == 'h') i++;
break;
case 's':
visemes += "S";
if (nextc == 's' || nextc == 't' || nextc == 'h') i++;
break;
case 'c':
case 'k':
visemes += "C";
if (nextc == 'k' || nextc == 'h') i++;
break;
default:
// Anything else (consonant, space between words) means should close mouth
if (visemes[visemes.Length - 1] != ' ')
{
visemes += " ";
}
break;
}
}
if (visemes[visemes.Length - 1] != ' ')
{
visemes += " ";
}
Visemes = visemes;
}
}
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using UnityEngine;
using UnityEngine.Timeline;
[Serializable]
[DisplayName("Talking Mouth Animation Track")]
[TrackColor(2 / 255f, 126 / 255f, 234 / 255f)] // To ask UX
[TrackClipType(typeof(TalkingClip))]
[TrackBindingType(typeof(AlansBlendShapeClipVowels))]
public class TalkingTrackAsset : TrackAsset
{
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment