├── lib ├── male │ ├── aeiou.mp3 │ ├── count.mp3 │ ├── example.mp3 │ ├── lipsync.mp3 │ └── speech.mp3 ├── network_image └── female │ ├── aeiou.mp3 │ ├── count.mp3 │ ├── example.mp3 │ ├── lipsync.mp3 │ └── speech.mp3 ├── .gitignore ├── src ├── avatar3D │ ├── core │ │ ├── BoneParameter.as │ │ ├── bone │ │ │ ├── BoneMov.as │ │ │ └── BoneRot.as │ │ └── AvatarFeature.as │ ├── face │ │ ├── neck │ │ │ └── AvatarNeck.as │ │ ├── eyes │ │ │ └── AvatarEye.as │ │ └── mouth │ │ │ └── AvatarMouth.as │ ├── expression │ │ ├── setting │ │ │ └── ExpressionParameter.as │ │ ├── AvatarExpression.as │ │ └── ExpressionsCollection.as │ ├── AvatarAnimator.as │ ├── AvatarBuilder.as │ └── AvatarCore.as ├── lipsync │ ├── training │ │ ├── TrainingPattern.as │ │ ├── generator │ │ │ ├── ProviderEvent.as │ │ │ ├── SampleProvider.as │ │ │ └── TrainingPatternGenerator.as │ │ ├── LipsyncCreator.mxml │ │ └── LipsyncTrainer.as │ ├── player │ │ ├── LipsyncBufferItem.as │ │ ├── LipsyncEvent.as │ │ └── LipsyncPlayer.as │ └── core │ │ ├── LipsyncSettings.as │ │ ├── phoneme │ │ ├── Phoneme.as │ │ └── PhonemeCollection.as │ │ ├── network │ │ ├── Neuron.as │ │ └── NeuralNetwork.as │ │ └── lpc │ │ └── LP.as ├── util │ ├── AvatarXMLProvider.as │ ├── AvatarDebugger.as │ ├── LookAtPoint.as │ ├── DAECompressor.as │ ├── Label.as │ └── NeuralNetworkProvider.as ├── scenes │ ├── LipsyncTestScene.as │ ├── SetupVisemeScene.as │ └── ColladaTestScene.as ├── generic3D │ ├── AvatarScene.as │ └── collada │ │ └── AvatarModelProvider.as └── editor │ └── LipsyncEditorWindow.as ├── README.md ├── Lipsync.lxml ├── obj ├── LipsyncConfig.old ├── LipsyncConfig.xml └── Lipsync.flex.compc.xml ├── LICENSE └── Lipsync.as3proj /lib/male/aeiou.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/male/aeiou.mp3 -------------------------------------------------------------------------------- /lib/male/count.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/male/count.mp3 -------------------------------------------------------------------------------- /lib/network_image: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/network_image -------------------------------------------------------------------------------- /lib/female/aeiou.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/female/aeiou.mp3 -------------------------------------------------------------------------------- /lib/female/count.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/female/count.mp3 -------------------------------------------------------------------------------- /lib/male/example.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/male/example.mp3 -------------------------------------------------------------------------------- /lib/male/lipsync.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/male/lipsync.mp3 -------------------------------------------------------------------------------- /lib/male/speech.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/male/speech.mp3 -------------------------------------------------------------------------------- /lib/female/example.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/female/example.mp3 -------------------------------------------------------------------------------- /lib/female/lipsync.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/female/lipsync.mp3 -------------------------------------------------------------------------------- /lib/female/speech.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s-soltys/LipSync/HEAD/lib/female/speech.mp3 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Build and Release Folders 2 | bin/ 3 | bin-debug/ 4 | bin-release/ 5 | 6 | # Other files and folders 7 | .settings/ 8 | 9 | # Project files, i.e. `.project`, `.actionScriptProperties` and `.flexProperties` 10 | # should NOT be excluded as they contain compiler settings and other important 11 | # information for Eclipse / Flash Builder. 12 | -------------------------------------------------------------------------------- /src/avatar3D/core/BoneParameter.as: -------------------------------------------------------------------------------- 1 | package avatar3D.core 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | 8 | public interface BoneParameter { 9 | function set value(value:Number):void; 10 | function get value():Number; 11 | 12 | function refreshValue(change:Number):void; 13 | 14 | function setValueTween(value:Number, time:Number, delay:Number = 0.0, transition:String = "linear"):void; 15 | 16 | } 17 | 18 | } -------------------------------------------------------------------------------- /src/lipsync/training/TrainingPattern.as: -------------------------------------------------------------------------------- 1 | package lipsync.training 2 | { 3 | import flash.geom.Vector3D; 4 | public class TrainingPattern 5 | { 6 | public var input:Vector.; 7 | public var output:Vector.; 8 | 9 | public function TrainingPattern(inputPattern:Vector. = null, outputPattern:Vector. = null) { 10 | this.input = inputPattern; 11 | this.output = outputPattern; 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /src/lipsync/player/LipsyncBufferItem.as: -------------------------------------------------------------------------------- 1 | package lipsync.player 2 | { 3 | import lipsync.core.phoneme.Phoneme; 4 | /** 5 | * ... 6 | * @author S 7 | */ 8 | public class LipsyncBufferItem 9 | { 10 | public var phoneme:Phoneme; 11 | public var position:int; 12 | public var energy:Number; 13 | public var samples:Vector.; 14 | 15 | public function LipsyncBufferItem() { 16 | phoneme = Phoneme.NULL; 17 | energy = 0; 18 | } 19 | 20 | } 21 | 22 | } -------------------------------------------------------------------------------- /src/util/AvatarXMLProvider.as: -------------------------------------------------------------------------------- 1 | package util 2 | { 3 | import flash.utils.ByteArray; 4 | /** 5 | * ... 6 | * @author S 7 | */ 8 | public class AvatarXMLProvider 9 | { 10 | [Embed(source="../../lib/xml/avatar_default.xml", mimeType="application/octet-stream")] 11 | private var defaultXMLFile:Class; 12 | 13 | public var xml:XML; 14 | 15 | public function AvatarXMLProvider() { 16 | var xmlString:String = new defaultXMLFile(); 17 | 18 | xml = new XML(xmlString); 19 | } 20 | 21 | } 22 | } -------------------------------------------------------------------------------- /src/util/AvatarDebugger.as: -------------------------------------------------------------------------------- 1 | package util 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class AvatarDebugger 8 | { 9 | 10 | public function AvatarDebugger() { 11 | 12 | } 13 | 14 | public static function log(log:String):void { 15 | trace("[LOG]: " + log); 16 | } 17 | 18 | public static function debug(debug:String):void { 19 | trace("[DEBUG]: " + debug); 20 | } 21 | 22 | public static function error(error:String):void { 23 | trace("[ERROR]: " + error); 24 | } 25 | 26 | } 27 | } -------------------------------------------------------------------------------- /src/lipsync/core/LipsyncSettings.as: -------------------------------------------------------------------------------- 1 | package lipsync.core 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class LipsyncSettings { 8 | public static var outputCount:int = 6; 9 | 10 | public static var samplingDecimate:int = 6; 11 | 12 | public static const samplingRate:int = 44100; 13 | public static const samplingRateMS:Number = 44.1; 14 | 15 | public static var windowLength:int = 18; 16 | 17 | public static var recognizePhonemeDelay:int = 20; 18 | 19 | public static var activationEnergy:Number = 0.025; 20 | } 21 | 22 | } -------------------------------------------------------------------------------- /src/lipsync/training/generator/ProviderEvent.as: -------------------------------------------------------------------------------- 1 | package lipsync.training.generator 2 | { 3 | import flash.events.Event; 4 | import lipsync.core.phoneme.Phoneme; 5 | /** 6 | * ... 7 | * @author S 8 | */ 9 | public class ProviderEvent extends Event 10 | { 11 | internal static const TRAINING_SEQ:String = "training_seq"; 12 | 13 | internal var phoneme:Phoneme; 14 | internal var sampleArraySet:Array; 15 | 16 | public function ProviderEvent(type:String, bubbles:Boolean = false, cancelable:Boolean = false){ 17 | super(type, bubbles, cancelable); 18 | } 19 | 20 | } 21 | 22 | } -------------------------------------------------------------------------------- /src/util/LookAtPoint.as: -------------------------------------------------------------------------------- 1 | package util 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class LookAtPoint 8 | { 9 | public var x:Number; 10 | public var y:Number; 11 | 12 | private var nextWeight:Number; 13 | private var currentWeight:Number; 14 | 15 | public function LookAtPoint(inertia:Number) { 16 | currentWeight = inertia; 17 | nextWeight = 1.0 - inertia; 18 | } 19 | 20 | public function lookAt(x:Number, y:Number):void { 21 | this.x = this.x * currentWeight + x * nextWeight; 22 | this.y = this.y * currentWeight + y * nextWeight; 23 | } 24 | 25 | } 26 | } -------------------------------------------------------------------------------- /src/util/DAECompressor.as: -------------------------------------------------------------------------------- 1 | package util 2 | { 3 | import flash.display.Sprite; 4 | import flash.net.FileReference; 5 | import flash.utils.ByteArray; 6 | /** 7 | * ... 8 | * @author S 9 | */ 10 | public class DAECompressor extends Sprite 11 | { 12 | [Embed(source = "../../lib/model/model.dae", mimeType = "application/octet-stream")] 13 | private var model:Class; 14 | 15 | public function DAECompressor() { 16 | var modelByteArray:ByteArray = new model; 17 | modelByteArray.deflate(); 18 | 19 | var file:FileReference = new FileReference(); 20 | file.save(modelByteArray, "model"); 21 | } 22 | 23 | } 24 | 25 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LipSync 2 | 3 | This application generates real-time facial animations for 3D models based on input sound files. 4 | 5 | A short presentation can be seen on my YouTube channel: 6 | 7 | https://youtu.be/I08ZYTYpRis 8 | 9 | 10 | ## How it works 11 | The application works in several steps: 12 | 13 | 1. Load speech samples in windows up to 30 ms 14 | 2. Extract features from the sample windows using linear prediction 15 | 3. Classify extracted feature blocks to visemes using neural networks 16 | 4. Animate the 3D model using viseme data 17 | 18 | 19 | ## Further reading 20 | 21 | https://en.wikipedia.org/wiki/Linear_prediction 22 | 23 | https://en.wikipedia.org/wiki/Viseme 24 | -------------------------------------------------------------------------------- /src/avatar3D/face/neck/AvatarNeck.as: -------------------------------------------------------------------------------- 1 | package avatar3D.face.neck 2 | { 3 | import avatar3D.core.AvatarFeature; 4 | import away3d.containers.ObjectContainer3D; 5 | 6 | /** 7 | * ... 8 | * @author S 9 | */ 10 | public class AvatarNeck 11 | { 12 | private var neckLow:AvatarFeature; 13 | private var neckHigh:AvatarFeature; 14 | 15 | public function AvatarNeck(avatar:ObjectContainer3D, xml:XMLList) { 16 | neckLow = new AvatarFeature(avatar, xml.neck_low); 17 | neckHigh = new AvatarFeature(avatar, xml.neck_high); 18 | } 19 | 20 | public function lookAt(posX:Number, posY:Number):void { 21 | neckHigh.rotZ.value = posX; 22 | neckHigh.rotX.value = -posY; 23 | neckHigh.rotY.value = posX; 24 | 25 | neckLow.rotZ.value = posX; 26 | neckLow.rotX.value = -posY; 27 | } 28 | 29 | } 30 | } -------------------------------------------------------------------------------- /src/util/Label.as: -------------------------------------------------------------------------------- 1 | package util 2 | { 3 | import flash.display.MovieClip; 4 | import flash.events.Event; 5 | import flash.events.MouseEvent; 6 | import flash.text.TextField; 7 | import flash.text.TextFormat; 8 | /** 9 | * ... 10 | * @author S 11 | */ 12 | public class Label extends MovieClip { 13 | 14 | public function Label(text:String) { 15 | var lbl:TextField = new TextField(); 16 | lbl.selectable = false; 17 | this.addChild(lbl); 18 | 19 | var tf:TextFormat = new TextFormat("Lucida Console", 20, 0x555555, true); 20 | lbl.text = text; 21 | lbl.setTextFormat(tf); 22 | 23 | lbl.width = 1.2 * lbl.textWidth; 24 | } 25 | 26 | public function onMouseClick(onMouseClick:Function):void { 27 | this.addEventListener(MouseEvent.CLICK, onMouseClick, false, 0, false); 28 | } 29 | 30 | 31 | } 32 | 33 | } -------------------------------------------------------------------------------- /src/lipsync/player/LipsyncEvent.as: -------------------------------------------------------------------------------- 1 | package lipsync.player 2 | { 3 | import flash.events.Event; 4 | import flash.utils.ByteArray; 5 | import lipsync.core.phoneme.Phoneme; 6 | /** 7 | * ... 8 | * @author Szymon 9 | */ 10 | public class LipsyncEvent extends Event 11 | { 12 | public static const AMPLITUDE_SAMPLE:String = "soundev_amplitude_sample"; 13 | public static const PHONEME:String = "soundev_phoneme"; 14 | public static const PLAYING_COMPLETE:String = "soundev_complete"; 15 | public static const PLAYING_ERROR:String = "soundev_error"; 16 | public static const PLAYING_START:String = "soundev_start"; 17 | 18 | public var amplitude:Number; 19 | public var phoneme:Phoneme = Phoneme.NULL; 20 | 21 | public function LipsyncEvent(type:String, bubbles:Boolean = false, cancelable:Boolean = false){ 22 | super(type, bubbles, cancelable); 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /src/scenes/LipsyncTestScene.as: -------------------------------------------------------------------------------- 1 | package scenes 2 | { 3 | import flash.display.Sprite; 4 | import lipsync.core.LipsyncSettings; 5 | import lipsync.player.LipsyncEvent; 6 | import lipsync.player.LipsyncPlayer; 7 | import util.NeuralNetworkProvider; 8 | /** 9 | * ... 10 | * @author S 11 | */ 12 | public class LipsyncTestScene extends Sprite { 13 | private var soundPlayer:LipsyncPlayer; 14 | 15 | public function LipsyncTestScene() { 16 | soundPlayer = new LipsyncPlayer(100, 1.0); 17 | soundPlayer.setupNeuralNetwork(NeuralNetworkProvider.getNetwork()); 18 | soundPlayer.addEventListener(LipsyncEvent.PHONEME, onGetPhoneme); 19 | 20 | soundPlayer.playSound("../lib/final/female/aeiou.mp3"); 21 | } 22 | 23 | private function onGetPhoneme(event:LipsyncEvent):void { 24 | if(event.phoneme.id != 0) 25 | trace(event.phoneme.visemeId); 26 | } 27 | 28 | } 29 | 30 | } -------------------------------------------------------------------------------- /src/generic3D/AvatarScene.as: -------------------------------------------------------------------------------- 1 | package generic3D 2 | { 3 | import avatar3D.AvatarAnimator; 4 | import away3d.cameras.Camera3D; 5 | import away3d.containers.ObjectContainer3D; 6 | import away3d.containers.Scene3D; 7 | import away3d.containers.View3D; 8 | import flash.events.Event; 9 | /** 10 | * ... 11 | * @author S 12 | */ 13 | public class AvatarScene extends View3D 14 | { 15 | 16 | public function AvatarScene() { 17 | camera = new Camera3D(); 18 | scene = new Scene3D(); 19 | 20 | this.addEventListener(Event.ENTER_FRAME, onEnterFrame); 21 | } 22 | 23 | public function addAvatar(avatar:AvatarAnimator):void { 24 | scene.addChild(avatar.getAvatarObject3D()); 25 | } 26 | 27 | public function removeAvatar(avatar:AvatarAnimator):void { 28 | scene.removeChild(avatar.getAvatarObject3D()); 29 | } 30 | 31 | private function onEnterFrame(event:Event):void { 32 | this.render(); 33 | } 34 | } 35 | 36 | } -------------------------------------------------------------------------------- /src/util/NeuralNetworkProvider.as: -------------------------------------------------------------------------------- 1 | package util 2 | { 3 | import flash.utils.ByteArray; 4 | import lipsync.core.network.NeuralNetwork; 5 | /** 6 | * ... 7 | * @author S 8 | */ 9 | public class NeuralNetworkProvider 10 | { 11 | [Embed(source="../../lib/lipsync/network_image_m", mimeType="application/octet-stream")] public static var networkImageMale:Class; 12 | [Embed(source="../../lib/lipsync/network_image_f", mimeType="application/octet-stream")] public static var networkImageFemale:Class; 13 | 14 | public static function getNetwork():NeuralNetwork { 15 | var image:ByteArray = new networkImageMale(); 16 | 17 | var network:NeuralNetwork = new NeuralNetwork(); 18 | network.load(image); 19 | 20 | return network; 21 | } 22 | 23 | public static function build(imageClass:Class):NeuralNetwork { 24 | var image:ByteArray = new imageClass(); 25 | 26 | var network:NeuralNetwork = new NeuralNetwork(); 27 | network.load(image); 28 | 29 | return network; 30 | } 31 | 32 | } 33 | } -------------------------------------------------------------------------------- /src/avatar3D/expression/setting/ExpressionParameter.as: -------------------------------------------------------------------------------- 1 | package avatar3D.expression.setting 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class ExpressionParameter 8 | { 9 | public var rotation:Boolean; 10 | public var rot_x:Number = 0.0; 11 | public var rot_y:Number = 0.0; 12 | public var rot_z:Number = 0.0; 13 | 14 | public var movement:Boolean; 15 | public var mov_x:Number = 0.0; 16 | public var mov_y:Number = 0.0; 17 | public var mov_z:Number = 0.0; 18 | 19 | 20 | public function ExpressionParameter(xml:XMLList) { 21 | if ((xml.attribute("rot_x").length() + xml.attribute("rot_y").length() + xml.attribute("rot_z").length()) > 0) { 22 | rotation = true; 23 | } 24 | rot_x = xml.@rot_x; 25 | rot_y = xml.@rot_y; 26 | rot_z = xml.@rot_z; 27 | 28 | 29 | if ((xml.attribute("mov_x").length() + xml.attribute("mov_y").length() + xml.attribute("mov_z").length()) > 0) { 30 | movement = true; 31 | } 32 | mov_x = xml.@mov_x; 33 | mov_y = xml.@mov_y; 34 | mov_z = xml.@mov_z; 35 | } 36 | 37 | } 38 | } -------------------------------------------------------------------------------- /Lipsync.lxml: -------------------------------------------------------------------------------- 1 | 2 | 3 | C:\Users\Szymon\Desktop\AS3\Lipsync\bin\Lipsync.swc 4 | .\bin\Lipsync.flash.swc 5 | false 6 | false 7 | false 8 | true 9 | false 10 | 11 | false 12 | 13 | 14 | 15 | 16 | src\pl\lipsync\training\lipsynctrainer.as 17 | src\pl\lipsync\training\lipsynccreator.mxml 18 | 19 | 20 | src\pl\lipsync\training\lipsynctrainer.as 21 | src\pl\lipsync\training\lipsynccreator.mxml 22 | 23 | 24 | 25 | 26 | 27 | 28 | None 29 | 30 | -------------------------------------------------------------------------------- /obj/LipsyncConfig.old: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10.0.0 6 | 7 | 8 | CONFIG::debug 9 | false 10 | 11 | 12 | CONFIG::release 13 | true 14 | 15 | 16 | CONFIG::timeStamp 17 | '2011-08-24' 18 | 19 | 20 | C:\Users\Szymon\Desktop\AS3\Lipsync\src 21 | D:\Program Files (x86)\FlashDevelop\Library\AS3\classes 22 | 23 | 24 | 25 | C:\Users\Szymon\Desktop\AS3\Lipsync\src\pl\lipsync\training\LipsyncCreator.mxml 26 | 27 | #FFFFFF 28 | 30 29 | 30 | 850 31 | 550 32 | 33 | -------------------------------------------------------------------------------- /obj/LipsyncConfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 10.0.0 6 | 7 | 8 | CONFIG::debug 9 | false 10 | 11 | 12 | CONFIG::release 13 | true 14 | 15 | 16 | CONFIG::timeStamp 17 | '2011-08-24' 18 | 19 | 20 | C:\Users\Szymon\Desktop\AS3\Lipsync\src 21 | D:\Program Files (x86)\FlashDevelop\Library\AS3\classes 22 | 23 | 24 | 25 | C:\Users\Szymon\Desktop\AS3\Lipsync\src\pl\lipsync\training\LipsyncCreator.mxml 26 | 27 | #FFFFFF 28 | 30 29 | 30 | 850 31 | 550 32 | 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | 26 | -------------------------------------------------------------------------------- /src/lipsync/core/phoneme/Phoneme.as: -------------------------------------------------------------------------------- 1 | package lipsync.core.phoneme 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class Phoneme { 8 | public static const NULL:Phoneme = new Phoneme("", 0, 0); 9 | 10 | public static const v1a:Phoneme = new Phoneme("v1", 2, 1); 11 | public static const v1b:Phoneme = new Phoneme("v1", 3, 1); 12 | public static const v2a:Phoneme = new Phoneme("v2", 6, 2); 13 | public static const v2b:Phoneme = new Phoneme("v2", 7, 2); 14 | public static const v3a:Phoneme = new Phoneme("v3", 10, 3); 15 | public static const v3b:Phoneme = new Phoneme("v3", 11, 3); 16 | public static const v4a:Phoneme = new Phoneme("v4", 14, 4); 17 | public static const v4b:Phoneme = new Phoneme("v4", 15, 4); 18 | public static const v5a:Phoneme = new Phoneme("v5", 20, 5); 19 | public static const v5b:Phoneme = new Phoneme("v5", 21, 5); 20 | public static const v6a:Phoneme = new Phoneme("v6", 30, 6); 21 | public static const v6b:Phoneme = new Phoneme("v6", 31, 6); 22 | public static const v7a:Phoneme = new Phoneme("v7", 40, 7); 23 | public static const v7b:Phoneme = new Phoneme("v7", 41, 7); 24 | public static const v8a:Phoneme = new Phoneme("v8", 52, 8); 25 | public static const v8b:Phoneme = new Phoneme("v8", 53, 8); 26 | public static const v9a:Phoneme = new Phoneme("v9", 62, 9); 27 | public static const v9b:Phoneme = new Phoneme("v9", 63, 9); 28 | 29 | public var id:int; 30 | public var symbol:String; 31 | public var visemeId:int; 32 | 33 | public function Phoneme(symbol:String, id:int, visemeId:int) { 34 | this.symbol = symbol; 35 | this.id = id; 36 | this.visemeId = visemeId; 37 | } 38 | 39 | } 40 | } -------------------------------------------------------------------------------- /obj/Lipsync.flex.compc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | C:\Users\Szymon\Desktop\AS3\Lipsync\bin\Lipsync.swc 4 | true 5 | 10 6 | true 7 | false 8 | 9 | false 10 | false 11 | true 12 | true 13 | false 14 | true 15 | true 16 | true 17 | true 18 | false 19 | 20 | c:\users\szymon\desktop\as3\lipsync\src 21 | 22 | 23 | 24 | pl.lipsync.core.LipsyncSettings 25 | pl.lipsync.core.lpc.LP 26 | pl.lipsync.core.network.NeuralNetwork 27 | pl.lipsync.core.network.Neuron 28 | pl.lipsync.core.phoneme.Phoneme 29 | pl.lipsync.core.phoneme.PhonemeCollection 30 | pl.lipsync.player.LipsyncBufferItem 31 | pl.lipsync.player.LipsyncEvent 32 | pl.lipsync.player.LipsyncPlayer 33 | pl.lipsync.training.TrainingPattern 34 | pl.lipsync.training.generator.ProviderEvent 35 | pl.lipsync.training.generator.SampleProvider 36 | pl.lipsync.training.generator.TrainingPatternGenerator 37 | 38 | -------------------------------------------------------------------------------- /src/avatar3D/expression/AvatarExpression.as: -------------------------------------------------------------------------------- 1 | package avatar3D.expression 2 | { 3 | import avatar3D.expression.setting.ExpressionParameter; 4 | /** 5 | * ... 6 | * @author S 7 | */ 8 | public class AvatarExpression 9 | { 10 | public var alias:String; 11 | public var id:int; 12 | 13 | public var jaw:ExpressionParameter; 14 | public var tongue:ExpressionParameter; 15 | public var mouth_r:ExpressionParameter; 16 | public var mouth_l:ExpressionParameter; 17 | public var lip_down_r:ExpressionParameter; 18 | public var lip_down_m:ExpressionParameter; 19 | public var lip_down_l:ExpressionParameter; 20 | public var lip_top_r:ExpressionParameter; 21 | public var lip_top_m:ExpressionParameter; 22 | public var lip_top_l:ExpressionParameter; 23 | public var cheek_r:ExpressionParameter; 24 | public var cheek_l:ExpressionParameter; 25 | public var cheekb_r:ExpressionParameter; 26 | public var cheekb_l:ExpressionParameter; 27 | 28 | 29 | public function AvatarExpression(xml:Object) { 30 | this.alias = xml.@alias; 31 | this.id = parseInt(xml.@id); 32 | 33 | jaw = new ExpressionParameter(xml.jaw); 34 | tongue = new ExpressionParameter(xml.tongue); 35 | mouth_r = new ExpressionParameter(xml.mouth_r); 36 | mouth_l = new ExpressionParameter(xml.mouth_l); 37 | lip_down_r = new ExpressionParameter(xml.lip_down_r); 38 | lip_down_m = new ExpressionParameter(xml.lip_down_m); 39 | lip_down_l = new ExpressionParameter(xml.lip_down_l); 40 | lip_top_r = new ExpressionParameter(xml.lip_top_r); 41 | lip_top_m = new ExpressionParameter(xml.lip_top_m); 42 | lip_top_l = new ExpressionParameter(xml.lip_top_l); 43 | cheek_r = new ExpressionParameter(xml.cheek_r); 44 | cheek_l = new ExpressionParameter(xml.cheek_l); 45 | cheekb_r = new ExpressionParameter(xml.cheekb_r); 46 | cheekb_l = new ExpressionParameter(xml.cheekb_l); 47 | } 48 | 49 | } 50 | } -------------------------------------------------------------------------------- /src/avatar3D/face/eyes/AvatarEye.as: -------------------------------------------------------------------------------- 1 | package avatar3D.face.eyes 2 | { 3 | import avatar3D.core.AvatarFeature; 4 | import away3d.containers.ObjectContainer3D; 5 | import flash.events.Event; 6 | import flash.events.TimerEvent; 7 | import flash.utils.Timer; 8 | 9 | /** 10 | * ... 11 | * @author S 12 | */ 13 | public class AvatarEye 14 | { 15 | private var blinkTime:Number; 16 | private var blinkPause:Number; 17 | 18 | private var blinkTimer:Timer; 19 | 20 | private var eyeball:AvatarFeature; 21 | private var eyelid:AvatarFeature; 22 | private var eyebrow_i:AvatarFeature; 23 | private var eyebrow_o:AvatarFeature; 24 | private var eyebrow:AvatarFeature; 25 | 26 | public function AvatarEye(avatar:ObjectContainer3D, xml:XMLList) { 27 | eyelid = new AvatarFeature(avatar, xml.eyelid); 28 | eyeball = new AvatarFeature(avatar, xml.eyeball); 29 | 30 | eyebrow_i = new AvatarFeature(avatar, xml.eyebrow_i); 31 | eyebrow_o = new AvatarFeature(avatar, xml.eyebrow_o); 32 | } 33 | 34 | public function setupMotionParameters(blinkTime:Number, blinkPause:Number):void { 35 | this.blinkTime = blinkTime; 36 | this.blinkPause = blinkPause; 37 | } 38 | 39 | public function blink():void { 40 | eyelid.rotX.setValueTween(1.0, blinkTime, 0.0, "easeInOutSine"); 41 | eyelid.rotX.setValueTween(0.0, blinkTime, blinkPause, "easeInOutSine"); 42 | } 43 | 44 | public function close():void { 45 | blinkTimer.stop(); 46 | eyelid.rotX.setValueTween(1.0, blinkTime, 0.0, "easeOutCubic"); 47 | } 48 | 49 | public function open():void { 50 | blinkTimer.reset(); 51 | blinkTimer.start(); 52 | } 53 | 54 | public function lookAt(posX:Number, posY:Number):void { 55 | eyeball.rotX.value = posY; 56 | eyeball.rotY.value = posX; 57 | 58 | eyebrow_i.movY.value = posY / 2.75; 59 | eyebrow_o.movY.value = posY / 2.0; 60 | } 61 | 62 | } 63 | } -------------------------------------------------------------------------------- /src/avatar3D/expression/ExpressionsCollection.as: -------------------------------------------------------------------------------- 1 | package avatar3D.expression 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class ExpressionsCollection 8 | { 9 | public static var visemes:Array; 10 | 11 | public static var NEUTRAL:AvatarExpression; 12 | public static var JOY:AvatarExpression; 13 | public static var SADNESS:AvatarExpression; 14 | public static var ANGER:AvatarExpression; 15 | public static var FEAR:AvatarExpression; 16 | public static var DISGUST:AvatarExpression; 17 | public static var SURPRISE:AvatarExpression; 18 | 19 | 20 | public static function initCollection(xml:XMLList):void { 21 | NEUTRAL = new AvatarExpression(xml.emotions.neutral); 22 | JOY = new AvatarExpression(xml.emotions.joy); 23 | SADNESS = new AvatarExpression(xml.emotions.sadness); 24 | ANGER = new AvatarExpression(xml.emotions.anger); 25 | FEAR = new AvatarExpression(xml.emotions.fear); 26 | DISGUST = new AvatarExpression(xml.emotions.disgust); 27 | SURPRISE = new AvatarExpression(xml.emotions.surprise); 28 | 29 | visemes = new Array(); 30 | var visemesXML:XMLList = xml.visemes; 31 | for each(var v:XML in visemesXML.children()) { 32 | var viseme:AvatarExpression = new AvatarExpression(v); 33 | visemes.push(viseme); 34 | } 35 | } 36 | 37 | public static function combine(viseme:AvatarExpression, expression:AvatarExpression):AvatarExpression { 38 | var combined:AvatarExpression = null; 39 | 40 | return combined; 41 | } 42 | 43 | public static function getVisemeByAlias(alias:String):AvatarExpression { 44 | for each(var viseme:AvatarExpression in visemes) { 45 | if (viseme.alias == alias) return viseme; 46 | } 47 | return NEUTRAL; 48 | } 49 | 50 | public static function getVisemeById(id:int):AvatarExpression { 51 | for each(var viseme:AvatarExpression in visemes) { 52 | if (viseme.id == id) return viseme; 53 | } 54 | return NEUTRAL; 55 | } 56 | 57 | } 58 | } -------------------------------------------------------------------------------- /src/avatar3D/AvatarAnimator.as: -------------------------------------------------------------------------------- 1 | package avatar3D 2 | { 3 | import avatar3D.expression.AvatarExpression; 4 | import avatar3D.expression.ExpressionsCollection; 5 | import flash.events.Event; 6 | import flash.text.TextField; 7 | import lipsync.core.LipsyncSettings; 8 | import lipsync.core.network.NeuralNetwork; 9 | import lipsync.player.LipsyncEvent; 10 | import lipsync.player.LipsyncPlayer; 11 | import util.NeuralNetworkProvider; 12 | /** 13 | * ... 14 | * @author S 15 | */ 16 | public class AvatarAnimator extends AvatarCore 17 | { 18 | private var lipsync:LipsyncPlayer; 19 | private var i:int; 20 | 21 | public function AvatarAnimator() { 22 | lipsync = new LipsyncPlayer(100, 1.0); 23 | lipsync.setupNeuralNetwork(NeuralNetworkProvider.getNetwork()); 24 | LipsyncSettings.recognizePhonemeDelay = 50; 25 | lipsync.addEventListener(LipsyncEvent.PHONEME, onLipsyncEvent); 26 | lipsync.addEventListener(LipsyncEvent.PLAYING_COMPLETE, onLipsyncComplete); 27 | } 28 | 29 | public function initAvatar():void { 30 | initAvatarCore(); 31 | } 32 | 33 | public function saySentence(soundFile:String):void { 34 | lipsync.playSound(soundFile); 35 | } 36 | 37 | public function saySentences(soundFiles:Array):void { 38 | lipsync.playSounds(soundFiles); 39 | } 40 | 41 | public function isSpeaking():Boolean { 42 | return lipsync.isSoundPlaying(); 43 | } 44 | 45 | public function saySentencesUsingNetwork(soundFiles:Array, nnetowrk:NeuralNetwork):void { 46 | lipsync.setupNeuralNetwork(nnetowrk); 47 | lipsync.playSounds(soundFiles); 48 | } 49 | 50 | private function onLipsyncEvent(event:LipsyncEvent):void { 51 | //var expression:AvatarExpression = ExpressionsCollection.getVisemeByAlias(event.phoneme.symbol); 52 | var expression:AvatarExpression = ExpressionsCollection.getVisemeById(event.phoneme.visemeId); 53 | setViseme(expression, event.amplitude * 13.5); 54 | } 55 | 56 | private function onLipsyncComplete(event:Event):void { 57 | mouth.setNeutral(0.5); 58 | } 59 | 60 | } 61 | 62 | } -------------------------------------------------------------------------------- /src/lipsync/core/phoneme/PhonemeCollection.as: -------------------------------------------------------------------------------- 1 | package lipsync.core.phoneme 2 | { 3 | import lipsync.core.LipsyncSettings; 4 | /** 5 | * ... 6 | * @author S 7 | */ 8 | public class PhonemeCollection 9 | { 10 | public static var phonemes:Vector. = new Vector.(); 11 | 12 | private static function initCollection():void { 13 | if (phonemes.length == 0) { 14 | phonemes.push(Phoneme.v1a); 15 | phonemes.push(Phoneme.v1b); 16 | phonemes.push(Phoneme.v2a); 17 | phonemes.push(Phoneme.v2b); 18 | phonemes.push(Phoneme.v3a); 19 | phonemes.push(Phoneme.v3b); 20 | phonemes.push(Phoneme.v4a); 21 | phonemes.push(Phoneme.v4b); 22 | phonemes.push(Phoneme.v5a); 23 | phonemes.push(Phoneme.v5b); 24 | phonemes.push(Phoneme.v6a); 25 | phonemes.push(Phoneme.v6b); 26 | phonemes.push(Phoneme.v7a); 27 | phonemes.push(Phoneme.v7b); 28 | phonemes.push(Phoneme.v8a); 29 | phonemes.push(Phoneme.v8b); 30 | phonemes.push(Phoneme.v9a); 31 | phonemes.push(Phoneme.v9b); 32 | } 33 | } 34 | 35 | public static function getById(id:int):Phoneme { 36 | initCollection(); 37 | 38 | for each(var phoneme:Phoneme in phonemes) { 39 | if (phoneme.id == id) 40 | return phoneme; 41 | } 42 | 43 | return Phoneme.NULL; 44 | } 45 | 46 | public static function phonemeToArray(phoneme:Phoneme):Vector. { 47 | var id:int = phoneme.id; 48 | var digits:int = LipsyncSettings.outputCount; 49 | 50 | var result:Vector. = new Vector.(); 51 | for (var i:Number = 0; i <= digits - 1; i++) result[i] = 0; 52 | for (i = digits - 1; i >= 0; i--) { 53 | if ((id - Math.pow(2, i)) >= 0) { 54 | result[digits - 1 - i] = 1; 55 | id -= Math.pow(2,i); 56 | } 57 | } 58 | return result; 59 | } 60 | 61 | public static function arrayToPhoneme(array:Vector.):Phoneme { 62 | if (isNaN(array[0])) return Phoneme.NULL; 63 | 64 | var result:int = 0; 65 | var mult:int = 1; 66 | for (var i:int = array.length - 1; i >= 0; i--) { 67 | var value:Number = array[i]; 68 | 69 | if (array[i] >= 0.5) { 70 | result += mult; 71 | } 72 | mult *= 2; 73 | } 74 | return PhonemeCollection.getById(result); 75 | } 76 | 77 | } 78 | } -------------------------------------------------------------------------------- /src/avatar3D/core/bone/BoneMov.as: -------------------------------------------------------------------------------- 1 | package avatar3D.core.bone 2 | { 3 | import avatar3D.core.BoneParameter; 4 | import away3d.containers.Bone; 5 | import caurina.transitions.Tweener; 6 | /** 7 | * ... 8 | * @author S 9 | */ 10 | public class BoneMov implements BoneParameter 11 | { 12 | private var bone:Bone; 13 | 14 | private var min:Number; 15 | private var def:Number; 16 | private var max:Number; 17 | 18 | private var currentWeight:Number; 19 | private var nextWeight:Number; 20 | 21 | 22 | public function BoneMov(bone:Bone, xml:XMLList) { 23 | this.bone = bone; 24 | 25 | if (xml.length() > 0) { 26 | var num:Number = xml.@def; 27 | this.def = bone.x + num; 28 | this.min = xml.@min; 29 | this.max = xml.@max; 30 | 31 | this.currentWeight = xml.@inertia; 32 | this.nextWeight = 1.0 - this.currentWeight; 33 | 34 | bone.x = this.def; 35 | } else { 36 | def = max = min = bone.x; 37 | this.currentWeight = 0.0; 38 | this.nextWeight = 1.0; 39 | } 40 | } 41 | 42 | 43 | public function set value(value:Number):void { 44 | bone.x *= currentWeight; 45 | 46 | if (value > 0) { 47 | if (value > 1.0) value = 1.0; 48 | 49 | bone.x += nextWeight * (def + max * value); 50 | } else if (value < 0) { 51 | value = -value; 52 | if (value > 1.0) value = 1.0; 53 | 54 | bone.x += nextWeight * (def + min * value); 55 | } else { 56 | bone.x += nextWeight * def; 57 | } 58 | } 59 | 60 | public function get value():Number { 61 | return bone.x; 62 | } 63 | 64 | 65 | public function refreshValue(change:Number):void { 66 | bone.x *= (nextWeight * change + currentWeight); 67 | } 68 | 69 | public function setValueTween(value:Number, time:Number, delay:Number = 0.0, transition:String = "linear"):void { 70 | var target:Number = 0.0; 71 | if (value > 0) { 72 | if (value > 1.0) value = 1.0; 73 | 74 | target = def + max * value; 75 | } else if (value < 0) { 76 | value = -value; 77 | if (value > 1.0) value = 1.0; 78 | 79 | target = def + min * value; 80 | } else { 81 | target = def; 82 | } 83 | 84 | Tweener.addTween(bone, { x:target, time:time, delay:delay, transition:transition } ); 85 | } 86 | 87 | } 88 | } -------------------------------------------------------------------------------- /src/avatar3D/core/bone/BoneRot.as: -------------------------------------------------------------------------------- 1 | package avatar3D.core.bone 2 | { 3 | import avatar3D.core.BoneParameter; 4 | import away3d.containers.Bone; 5 | import caurina.transitions.Tweener; 6 | /** 7 | * ... 8 | * @author S 9 | */ 10 | public class BoneRot implements BoneParameter 11 | { 12 | private var bone:Bone; 13 | 14 | private var min:Number; 15 | private var def:Number; 16 | private var max:Number; 17 | 18 | private var currentWeight:Number; 19 | private var nextWeight:Number; 20 | 21 | 22 | public function BoneRot(bone:Bone, xml:XMLList) { 23 | this.bone = bone; 24 | 25 | if (xml.length() > 0) { 26 | this.def = xml.@def; 27 | this.min = xml.@min; 28 | this.max = xml.@max; 29 | 30 | this.currentWeight = xml.@inertia; 31 | this.nextWeight = 1.0 - this.currentWeight; 32 | 33 | bone.rotationX = def; 34 | } else { 35 | def = max = min = bone.rotationX; 36 | this.currentWeight = 0.0; 37 | this.nextWeight = 1.0; 38 | } 39 | } 40 | 41 | 42 | public function set value(value:Number):void { 43 | bone.rotationX *= currentWeight; 44 | 45 | if (value > 0) { 46 | if (value > 1.0) value = 1.0; 47 | 48 | bone.rotationX += nextWeight * (def + max * value); 49 | } else if (value < 0) { 50 | value = -value; 51 | if (value > 1.0) value = 1.0; 52 | 53 | bone.rotationX += nextWeight * (def + min * value); 54 | } else { 55 | bone.rotationX += nextWeight * def; 56 | } 57 | } 58 | 59 | public function get value():Number { 60 | return bone.rotationX; 61 | } 62 | 63 | 64 | public function refreshValue(change:Number):void { 65 | bone.rotationX *= (nextWeight * change + currentWeight); 66 | } 67 | 68 | public function setValueTween(value:Number, time:Number, delay:Number = 0.0, transition:String = "linear"):void { 69 | var target:Number = 0.0; 70 | if (value > 0) { 71 | if (value > 1.0) value = 1.0; 72 | 73 | target = def + max * value; 74 | } else if (value < 0) { 75 | value = -value; 76 | if (value > 1.0) value = 1.0; 77 | 78 | target = def + min * value; 79 | } else { 80 | target = def; 81 | } 82 | 83 | Tweener.addTween(bone, { rotationX:target, time:time, delay:delay, transition:transition } ); 84 | } 85 | 86 | } 87 | } -------------------------------------------------------------------------------- /src/lipsync/core/network/Neuron.as: -------------------------------------------------------------------------------- 1 | package lipsync.core.network 2 | { 3 | public class Neuron 4 | { 5 | internal var value:Number; 6 | internal var bias:Number; 7 | internal var momentum:Number; 8 | 9 | internal var size:int; 10 | internal var inputs:Vector.; 11 | internal var weights:Vector.; 12 | internal var momentums:Vector.; 13 | 14 | internal function createNeuron(inputsCount:int, bias:Number, weightRange:Number = 1):void { 15 | this.size = inputsCount; 16 | this.bias = bias; 17 | this.momentum = 0; 18 | 19 | this.inputs = new Vector.(size); 20 | this.weights = new Vector.(size); 21 | this.momentums = new Vector.(size); 22 | 23 | for (var i:int = 0; i < size; i++) { 24 | this.inputs[i] = NaN; 25 | this.weights[i] = (Math.random() * (weightRange + weightRange)) - weightRange; 26 | this.momentums[i] = 0; 27 | } 28 | } 29 | 30 | internal function adjustWeights(nError:Number, learningRate:Number, globalMomentum:Number, error:Array):void { 31 | var delta:Number = nError * this.value * (1 - this.value); 32 | 33 | for (var i:int = 0; i < size; i++) { 34 | var weightChange:Number = delta * inputs[i] * learningRate + momentums[i] * globalMomentum; 35 | momentums[i] = weightChange; 36 | weights[i] += weightChange; 37 | error[i] += delta * weights[i]; 38 | } 39 | 40 | var biasChange:Number = delta * learningRate + this.momentum * globalMomentum; 41 | this.momentum = biasChange; 42 | this.bias += biasChange; 43 | } 44 | 45 | /* 46 | internal function adjustN(nError:Number, learningRate:Number, globalMomentum:Number, error:Array):void { 47 | var delta:Number = nError * this.value * (1 - this.value); 48 | 49 | for (var i:int = 0; i < size; i++) { 50 | var weightChange:Number = delta * inputs[i] * learningRate + momentums[i] * globalMomentum; 51 | error[i] += delta * weights[i]; 52 | } 53 | } 54 | */ 55 | 56 | internal function calculateValue(inputsArray:Vector.):Number { 57 | var sum:Number = 0; 58 | 59 | for (var i:int = 0; i < size; i++) { 60 | inputs[i] = inputsArray[i]; 61 | sum += weights[i] * inputs[i]; 62 | } 63 | 64 | value = 1 / (1 + Math.exp( -1 * (sum + this.bias))); 65 | return value; 66 | } 67 | 68 | } 69 | } -------------------------------------------------------------------------------- /src/avatar3D/AvatarBuilder.as: -------------------------------------------------------------------------------- 1 | package avatar3D 2 | { 3 | import avatar3D.expression.ExpressionsCollection; 4 | import avatar3D.face.mouth.AvatarMouth; 5 | import util.AvatarXMLProvider; 6 | import avatar3D.face.eyes.AvatarEye; 7 | import avatar3D.face.neck.AvatarNeck; 8 | import away3d.containers.ObjectContainer3D; 9 | import avatar3D.AvatarCore; 10 | import avatar3D.AvatarAnimator; 11 | 12 | /** 13 | * ... 14 | * @author S 15 | */ 16 | public class AvatarBuilder 17 | { 18 | private var avatarObject:ObjectContainer3D; 19 | private var avatarXML:XML; 20 | 21 | private var avatar:AvatarAnimator; 22 | 23 | public function AvatarBuilder(avatarObject:ObjectContainer3D) { 24 | this.avatarObject = avatarObject; 25 | 26 | avatar = new AvatarAnimator(); 27 | avatar.setAvatarObject3D(avatarObject); 28 | 29 | var avatarXMLProvider:AvatarXMLProvider = new AvatarXMLProvider(); 30 | avatarXML = avatarXMLProvider.xml; 31 | 32 | initExpressions(); 33 | } 34 | 35 | private function initExpressions():void { 36 | ExpressionsCollection.initCollection(avatarXML.expressions); 37 | } 38 | 39 | public function buildAvatar():AvatarAnimator { 40 | setupAvatarEyes(); 41 | setupAvatarNeck(); 42 | setupAvatarMouth(); 43 | 44 | avatar.initAvatar(); 45 | 46 | return avatar; 47 | } 48 | 49 | private function setupAvatarEyes():void { 50 | var eyesXML:XMLList = avatarXML.avatar.face_features.eyes; 51 | var blinkDelay:Number = eyesXML.@blink_delay; 52 | var blinkTime:Number = eyesXML.@blink_time; 53 | var blinkPause:Number = eyesXML.@blink_pause; 54 | 55 | avatar.blinkDelay = blinkDelay * 1000; 56 | 57 | avatar.left_eye = new AvatarEye(avatarObject, eyesXML.left_eye); 58 | avatar.right_eye = new AvatarEye(avatarObject, eyesXML.right_eye); 59 | 60 | avatar.left_eye.setupMotionParameters(blinkTime, blinkPause); 61 | avatar.right_eye.setupMotionParameters(blinkTime, blinkPause); 62 | } 63 | 64 | private function setupAvatarMouth():void { 65 | var mouthXML:XMLList = avatarXML.avatar.face_features.mouth; 66 | 67 | avatar.mouth = new AvatarMouth(avatarObject, mouthXML); 68 | } 69 | 70 | private function setupAvatarNeck():void { 71 | var neckXML:XMLList = avatarXML.avatar.face_features.neck; 72 | 73 | avatar.neck = new AvatarNeck(avatarObject, neckXML); 74 | } 75 | 76 | } 77 | } -------------------------------------------------------------------------------- /src/lipsync/training/generator/SampleProvider.as: -------------------------------------------------------------------------------- 1 | package lipsync.training.generator { 2 | import flash.events.Event; 3 | import flash.events.EventDispatcher; 4 | import flash.media.Sound; 5 | import flash.net.URLRequest; 6 | import flash.utils.ByteArray; 7 | import lipsync.core.LipsyncSettings; 8 | import lipsync.core.lpc.LP; 9 | import lipsync.core.phoneme.Phoneme; 10 | import lipsync.training.generator.ProviderEvent; 11 | /** 12 | * ... 13 | * @author S 14 | */ 15 | public class SampleProvider extends EventDispatcher 16 | { 17 | private var sound:Sound; 18 | 19 | private var phoneme:Phoneme; 20 | private var phonemeList:Array; 21 | 22 | 23 | internal function readTrainingSequence(fileName:String, phoneme:Phoneme, phonemeList:Array):void { 24 | this.phoneme = phoneme; 25 | this.phonemeList = phonemeList; 26 | 27 | sound = new Sound(); 28 | sound.load(new URLRequest(fileName)); 29 | sound.addEventListener(Event.COMPLETE, fileLoaded); 30 | } 31 | 32 | private function fileLoaded(e:Event):void { 33 | var sampleArraySet:Array = new Array(); 34 | 35 | for each(var samplePos:int in phonemeList) { 36 | sampleArraySet.push(getPhonemes(samplePos)); 37 | } 38 | 39 | var event:ProviderEvent = new ProviderEvent(ProviderEvent.TRAINING_SEQ); 40 | event.sampleArraySet = sampleArraySet; 41 | event.phoneme = phoneme; 42 | 43 | dispatchEvent(event); 44 | } 45 | 46 | internal function getPhonemes(position:int):Vector. { 47 | var samples:Vector. = extractSound(position); 48 | var lpcParam:Vector. = LP.analyze(samples); 49 | 50 | var output:Vector. = new Vector.(); 51 | 52 | for each(var param:Number in lpcParam) { 53 | output.push(param); 54 | } 55 | 56 | return output; 57 | } 58 | 59 | private function extractSound(position:int):Vector. { 60 | var buffer:ByteArray = new ByteArray(); 61 | 62 | sound.extract(buffer, LipsyncSettings.windowLength * LipsyncSettings.samplingRateMS, position); 63 | 64 | buffer.position = 0; 65 | var array:Vector. = new Vector.(); 66 | 67 | var offset:int = 4 * (LipsyncSettings.samplingDecimate + 1); 68 | while (buffer.bytesAvailable > 0) { 69 | array.push(buffer.readFloat()); 70 | buffer.position += offset; 71 | } 72 | 73 | return array; 74 | } 75 | 76 | } 77 | 78 | } -------------------------------------------------------------------------------- /src/avatar3D/core/AvatarFeature.as: -------------------------------------------------------------------------------- 1 | package avatar3D.core 2 | { 3 | import avatar3D.core.bone.BoneRot; 4 | import avatar3D.core.bone.BoneMov; 5 | import avatar3D.core.BoneParameter; 6 | import avatar3D.expression.ExpressionsCollection; 7 | import avatar3D.expression.setting.ExpressionParameter; 8 | import away3d.containers.Bone; 9 | import away3d.containers.ObjectContainer3D; 10 | import away3d.core.base.Object3D; 11 | import util.AvatarDebugger; 12 | 13 | /** 14 | * ... 15 | * @author S 16 | */ 17 | public class AvatarFeature 18 | { 19 | public var rotX:BoneParameter; 20 | public var rotY:BoneParameter; 21 | public var rotZ:BoneParameter; 22 | 23 | public var movX:BoneParameter; 24 | public var movY:BoneParameter; 25 | public var movZ:BoneParameter; 26 | 27 | 28 | public function AvatarFeature(avatar:ObjectContainer3D, parameter:XMLList) { 29 | var boneName:String = parameter.@name; 30 | var bone:Bone = avatar.getBoneByName(boneName); 31 | 32 | if (bone) { 33 | rotX = new BoneRot(bone, parameter.rot_X); 34 | rotY = new BoneRot(bone, parameter.rot_Y); 35 | rotZ = new BoneRot(bone, parameter.rot_Z); 36 | 37 | movX = new BoneMov(bone, parameter.mov_X); 38 | movY = new BoneMov(bone, parameter.mov_Y); 39 | movZ = new BoneMov(bone, parameter.mov_Z); 40 | 41 | AvatarDebugger.log("avatar bone " + boneName + " created."); 42 | } else { 43 | AvatarDebugger.error("avatar bone " + boneName + " not present"); 44 | } 45 | } 46 | 47 | public function setParameter(value:Number, parameter:ExpressionParameter):void { 48 | if (parameter.rotation == true) { 49 | rotX.value = value * parameter.rot_x; 50 | rotY.value = value * parameter.rot_y; 51 | rotZ.value = value * parameter.rot_z; 52 | } 53 | 54 | if (parameter.movement == true) { 55 | movX.value = value * parameter.mov_x; 56 | movY.value = value * parameter.mov_y; 57 | movZ.value = value * parameter.mov_z; 58 | } 59 | } 60 | 61 | public function setParameterTween(value:Number, parameter:ExpressionParameter, time:Number):void { 62 | if (parameter.rotation == true) { 63 | rotX.setValueTween(value * parameter.rot_x, time); 64 | rotY.setValueTween(value * parameter.rot_y, time); 65 | rotZ.setValueTween(value * parameter.rot_z, time); 66 | } 67 | 68 | if (parameter.movement == true) { 69 | movX.setValueTween(value * parameter.mov_x, time); 70 | movY.setValueTween(value * parameter.mov_y, time); 71 | movZ.setValueTween(value * parameter.mov_z, time); 72 | } 73 | } 74 | 75 | } 76 | } -------------------------------------------------------------------------------- /src/lipsync/training/generator/TrainingPatternGenerator.as: -------------------------------------------------------------------------------- 1 | package lipsync.training.generator 2 | { 3 | import flash.events.Event; 4 | import flash.events.EventDispatcher; 5 | import flash.geom.Vector3D; 6 | import lipsync.core.phoneme.Phoneme; 7 | import lipsync.core.phoneme.PhonemeCollection; 8 | import lipsync.training.TrainingPattern; 9 | import lipsync.training.generator.SampleProvider; 10 | import lipsync.training.generator.ProviderEvent; 11 | /** 12 | * ... 13 | * @author S 14 | */ 15 | public class TrainingPatternGenerator extends EventDispatcher 16 | { 17 | private var soundTrainer:SampleProvider; 18 | private var samplingQueue:Array; 19 | private var filesDirectory:String; 20 | 21 | private var sampleOffset:Number = 0.2; 22 | 23 | private var patternArray:Vector.; 24 | 25 | public function TrainingPatternGenerator(dir:String) { 26 | this.soundTrainer = new SampleProvider(); 27 | this.filesDirectory = dir; 28 | samplingQueue = new Array(); 29 | 30 | patternArray = new Vector.(); 31 | 32 | soundTrainer.addEventListener(ProviderEvent.TRAINING_SEQ, getTrainingSeq); 33 | } 34 | 35 | public function addSequence(fileName:String, phoneme:Phoneme, start:Number, stop:Number, count:Number = 30):void { 36 | samplingQueue.push(new Array(fileName, phoneme, generateArray(start * 1000, stop * 1000, count))); 37 | } 38 | 39 | public function getSamples():Vector. { 40 | return patternArray; 41 | } 42 | 43 | public function start():void { 44 | readNext(); 45 | } 46 | 47 | private function readNext():void { 48 | if (samplingQueue.length == 0) { 49 | var event:Event = new Event(Event.COMPLETE); 50 | dispatchEvent(event); 51 | return; 52 | } 53 | 54 | var list:Array = samplingQueue.pop(); 55 | 56 | var soundFile:String = filesDirectory + list.shift(); 57 | var phoneme:Phoneme = list.shift(); 58 | 59 | soundTrainer.readTrainingSequence(soundFile, phoneme, list.shift()); 60 | } 61 | 62 | 63 | private function getTrainingSeq(event:ProviderEvent):void { 64 | var sampleSet:Array = event.sampleArraySet; 65 | var phoneme:Phoneme = event.phoneme; 66 | 67 | for each(var samples:Vector. in sampleSet) { 68 | var pattern:TrainingPattern = new TrainingPattern(); 69 | 70 | pattern.output = PhonemeCollection.phonemeToArray(phoneme); 71 | pattern.input = samples; 72 | 73 | patternArray.push(pattern); 74 | } 75 | 76 | readNext(); 77 | } 78 | 79 | private function generateArray(start:int, stop:int, steps:int):Array { 80 | var array:Array = new Array(); 81 | 82 | var dist:int = (stop - start) / steps; 83 | for (var i:int = start; i < stop; ) { 84 | array.push(i); 85 | i += (dist + sampleOffset * dist * (0.5 - Math.random())); 86 | } 87 | 88 | return array; 89 | } 90 | 91 | } 92 | 93 | } -------------------------------------------------------------------------------- /src/scenes/SetupVisemeScene.as: -------------------------------------------------------------------------------- 1 | package scenes 2 | { 3 | import avatar3D.AvatarAnimator; 4 | import avatar3D.AvatarBuilder; 5 | import avatar3D.expression.ExpressionsCollection; 6 | import away3d.animators.*; 7 | import away3d.cameras.*; 8 | import away3d.containers.*; 9 | import away3d.core.utils.*; 10 | import away3d.events.*; 11 | import away3d.loaders.*; 12 | import away3d.materials.*; 13 | import away3d.primitives.*; 14 | import flash.display.*; 15 | import flash.events.*; 16 | import flash.net.URLLoader; 17 | import flash.net.URLRequest; 18 | import flash.utils.*; 19 | import generic3D.AvatarScene; 20 | import generic3D.collada.AvatarModelProvider; 21 | 22 | 23 | [SWF(backgroundColor="#000000", frameRate="20", quality="LOW", width="800", height="800")] 24 | public class SetupVisemeScene extends Sprite { 25 | private var avatarScene:AvatarScene; 26 | private var modelProvider:AvatarModelProvider; 27 | private var avatar:AvatarAnimator; 28 | 29 | private var pos:Number; 30 | 31 | public function SetupVisemeScene(){ 32 | Debug.active = true; 33 | 34 | avatarScene = new AvatarScene(); 35 | this.addChild(avatarScene); 36 | 37 | modelProvider = new AvatarModelProvider(); 38 | modelProvider.addEventListener(Event.COMPLETE, onComplete); 39 | modelProvider.readModel(); 40 | } 41 | 42 | 43 | private function onComplete(e:Event):void { 44 | var avatarBuilder:AvatarBuilder = new AvatarBuilder(modelProvider.getModel()); 45 | avatar = avatarBuilder.buildAvatar(); 46 | 47 | avatarScene.addAvatar(avatar); 48 | 49 | avatar.getAvatarObject3D().moveTo(0, -25, 0); 50 | avatarScene.camera.moveTo( -50, 50, -50); 51 | //avatarScene.camera.lookAt(avatar.getAvatarObject3D().position); 52 | 53 | avatar.getAvatarObject3D().rotationY += 15; 54 | 55 | onLoadXML(null); 56 | 57 | addRefresh(); 58 | 59 | addEventListener(Event.ENTER_FRAME, onEnterFrame); 60 | } 61 | 62 | private function onEnterFrame(event:Event):void { 63 | //avatar.getAvatarObject3D().rotationY += 1; 64 | 65 | avatar.lookAt(this.mouseX, this.mouseY); 66 | } 67 | 68 | 69 | 70 | private function addRefresh():void { 71 | var timer:Timer = new Timer(1000); 72 | timer.addEventListener(TimerEvent.TIMER, reloadExpressions); 73 | timer.start(); 74 | } 75 | 76 | private function reloadExpressions(e:Event):void { 77 | var loader:URLLoader = new URLLoader(); 78 | loader.addEventListener(Event.COMPLETE, onLoadXML); 79 | loader.load(new URLRequest("../lib/xml/avatar_default.xml")); 80 | } 81 | 82 | private function onLoadXML(e:Event):void { 83 | if (e != null) { 84 | var xml:XML = new XML(e.target.data); 85 | ExpressionsCollection.initCollection(xml.expressions); 86 | } 87 | 88 | for (var i:int = 0; i < 10; i++) 89 | //avatar.setViseme(ExpressionsCollection.JOY, 1.0); 90 | avatar.setViseme(ExpressionsCollection.getVisemeByAlias("v7"), 1.0); 91 | 92 | } 93 | 94 | } 95 | } -------------------------------------------------------------------------------- /src/scenes/ColladaTestScene.as: -------------------------------------------------------------------------------- 1 | package scenes 2 | { 3 | import avatar3D.AvatarAnimator; 4 | import avatar3D.AvatarBuilder; 5 | import avatar3D.expression.ExpressionsCollection; 6 | import away3d.animators.*; 7 | import away3d.cameras.*; 8 | import away3d.containers.*; 9 | import away3d.core.utils.*; 10 | import away3d.events.*; 11 | import away3d.loaders.*; 12 | import away3d.materials.*; 13 | import away3d.primitives.*; 14 | import flash.display.*; 15 | import flash.events.*; 16 | import flash.net.URLLoader; 17 | import flash.net.URLRequest; 18 | import flash.utils.*; 19 | import generic3D.AvatarScene; 20 | import generic3D.collada.AvatarModelProvider; 21 | 22 | 23 | [SWF(backgroundColor="#000000", frameRate="20", quality="LOW", width="800", height="800")] 24 | public class ColladaTestScene extends Sprite 25 | { 26 | private var avatarScene:AvatarScene; 27 | private var modelProvider:AvatarModelProvider; 28 | private var avatar:AvatarAnimator; 29 | 30 | private var pos:Number; 31 | 32 | public function ColladaTestScene(){ 33 | Debug.active = true; 34 | 35 | avatarScene = new AvatarScene(); 36 | this.addChild(avatarScene); 37 | 38 | modelProvider = new AvatarModelProvider(); 39 | modelProvider.addEventListener(Event.COMPLETE, onComplete); 40 | modelProvider.readModel(); 41 | 42 | } 43 | 44 | 45 | private function onComplete(e:Event):void { 46 | var avatarBuilder:AvatarBuilder = new AvatarBuilder(modelProvider.getModel()); 47 | avatar = avatarBuilder.buildAvatar(); 48 | 49 | avatarScene.addAvatar(avatar); 50 | 51 | avatar.getAvatarObject3D().moveTo(0, -25, 0); 52 | avatarScene.camera.moveTo( -50, 50, -50); 53 | //avatarScene.camera.lookAt(avatar.getAvatarObject3D().position); 54 | 55 | avatar.getAvatarObject3D().rotationY += 15; 56 | 57 | avatar.saySentences(["../lib/amy_aeiou.mp3"]); 58 | //addRefresh(); 59 | 60 | onLoadXML(null); 61 | 62 | addEventListener(Event.ENTER_FRAME, onEnterFrame); 63 | } 64 | 65 | private function onEnterFrame(event:Event):void { 66 | //avatar.getAvatarObject3D().rotationY += 1; 67 | 68 | avatar.lookAt(this.mouseX, this.mouseY); 69 | } 70 | 71 | 72 | 73 | private function addRefresh():void { 74 | var timer:Timer = new Timer(1000); 75 | timer.addEventListener(TimerEvent.TIMER, reloadExpressions); 76 | timer.start(); 77 | } 78 | 79 | private function reloadExpressions(e:Event):void { 80 | var loader:URLLoader = new URLLoader(); 81 | loader.addEventListener(Event.COMPLETE, onLoadXML); 82 | loader.load(new URLRequest("../lib/xml/avatar_default.xml")); 83 | } 84 | 85 | private function onLoadXML(e:Event):void { 86 | if (e != null) { 87 | var xml:XML = new XML(e.target.data); 88 | ExpressionsCollection.initCollection(xml.expressions); 89 | } 90 | 91 | for (var i:int = 0; i < 10; i++) 92 | //avatar.setViseme(ExpressionsCollection.JOY, 1.0); 93 | avatar.setViseme(ExpressionsCollection.getVisemeByAlias("U"), 1.0); 94 | 95 | } 96 | 97 | } 98 | } -------------------------------------------------------------------------------- /Lipsync.as3proj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /src/avatar3D/AvatarCore.as: -------------------------------------------------------------------------------- 1 | package avatar3D 2 | { 3 | import avatar3D.expression.AvatarExpression; 4 | import avatar3D.expression.ExpressionsCollection; 5 | import avatar3D.face.neck.AvatarNeck; 6 | import away3d.containers.ObjectContainer3D; 7 | import avatar3D.face.eyes.AvatarEye; 8 | import avatar3D.face.mouth.AvatarMouth; 9 | import flash.events.Event; 10 | import flash.events.TimerEvent; 11 | import flash.utils.Timer; 12 | /** 13 | * ... 14 | * @author S 15 | */ 16 | public class AvatarCore 17 | { 18 | protected var avatar:ObjectContainer3D; 19 | 20 | internal var left_eye:AvatarEye; 21 | internal var right_eye:AvatarEye; 22 | internal var mouth:AvatarMouth; 23 | internal var neck:AvatarNeck; 24 | 25 | internal var blinkDelay:Number; 26 | private var blinkTimer:Timer; 27 | 28 | private var visemeValue:Number; 29 | private var viseme:AvatarExpression; 30 | private var emotionValue:Number; 31 | private var emotion:AvatarExpression; 32 | 33 | 34 | public function AvatarCore() { 35 | 36 | } 37 | 38 | protected function initAvatarCore():void { 39 | blinkTimer = new Timer(blinkDelay, 0); 40 | blinkTimer.addEventListener(TimerEvent.TIMER, onBlinkTimer); 41 | blinkTimer.start(); 42 | } 43 | 44 | 45 | public function setAvatarObject3D(avatar:ObjectContainer3D):void { 46 | this.avatar = avatar; 47 | } 48 | 49 | public function getAvatarObject3D():ObjectContainer3D { 50 | return this.avatar; 51 | } 52 | 53 | 54 | public function setEmotion(emotion:AvatarExpression, value:Number = NaN):void { 55 | this.emotion = emotion; 56 | 57 | if (isNaN(value)) { 58 | value = emotionValue; 59 | } else { 60 | emotionValue = value; 61 | } 62 | 63 | mouth.setViseme(value, emotion); 64 | } 65 | 66 | public function setViseme(viseme:AvatarExpression, value:Number = NaN):void { 67 | this.viseme = viseme; 68 | 69 | if (isNaN(value)) { 70 | value = visemeValue; 71 | } else { 72 | visemeValue = value; 73 | } 74 | 75 | mouth.setViseme(value, viseme); 76 | } 77 | 78 | public function setVisemeValue(value:Number):void { 79 | var change:Number = value / visemeValue; 80 | 81 | visemeValue = value; 82 | 83 | //mouth.setExpression(value, viseme, emotion); 84 | //mouth.refreshExpression(change); 85 | } 86 | 87 | public function setEmotionValue(value:Number):void { 88 | var change:Number = value / emotionValue; 89 | 90 | //trace(change + " * " + emotionValue); 91 | 92 | emotionValue = value; 93 | if (isNaN(change)) change = 0.001; 94 | 95 | mouth.setViseme(emotionValue, emotion); 96 | //mouth.refreshExpression(change); 97 | } 98 | 99 | 100 | private function onBlinkTimer(event:Event):void { 101 | left_eye.blink(); 102 | right_eye.blink(); 103 | } 104 | 105 | public function lookAt(mouseX:Number, mouseY:Number):void { 106 | var posX:Number = (400 - mouseX) / 400; 107 | var posY:Number = (400 - mouseY) / 400; 108 | 109 | neck.lookAt(posX / 2, posY / 2); 110 | 111 | left_eye.lookAt(posX, posY); 112 | right_eye.lookAt(posX, posY); 113 | } 114 | 115 | public function lookAtTween():void { 116 | 117 | } 118 | 119 | public function openMouth(value:Number):void { 120 | mouth.jaw.rotX.value = value; 121 | } 122 | 123 | } 124 | } -------------------------------------------------------------------------------- /src/generic3D/collada/AvatarModelProvider.as: -------------------------------------------------------------------------------- 1 | package generic3D.collada 2 | { 3 | import away3d.containers.ObjectContainer3D; 4 | import away3d.core.base.Object3D; 5 | import away3d.core.utils.Cast; 6 | import away3d.events.ParserEvent; 7 | import away3d.loaders.Collada; 8 | import away3d.materials.BitmapMaterial; 9 | import flash.events.Event; 10 | import flash.events.EventDispatcher; 11 | import flash.utils.ByteArray; 12 | /** 13 | * ... 14 | * @author S 15 | */ 16 | public class AvatarModelProvider extends EventDispatcher 17 | { 18 | [Embed(source = "../../../lib/model/eye.jpg")] 19 | private var eye_texture:Class; 20 | 21 | [Embed(source = "../../../lib/model/teeth.jpg")] 22 | private var teeth_texture:Class; 23 | 24 | [Embed(source = "../../../lib/model/texture_m.jpg")] private var model_texture_m:Class; 25 | [Embed(source = "../../../lib/model/texture.jpg")] private var model_texture_f:Class; 26 | 27 | [Embed(source = "../../../lib/model/model.dae", mimeType = "application/octet-stream")] private var model_f:Class; 28 | [Embed(source = "../../../lib/model/model_man.dae", mimeType = "application/octet-stream")] private var model_m:Class; 29 | 30 | private var modelMaterial:BitmapMaterial; 31 | private var eyeMaterial:BitmapMaterial; 32 | private var teethMaterial:BitmapMaterial; 33 | 34 | private var colladaParser:Collada; 35 | private var avatarModel:ObjectContainer3D; 36 | 37 | 38 | public static const MALE:String = "MALE"; 39 | public static const FEMALE:String = "FEMALE"; 40 | public var sex:String = MALE; 41 | 42 | public function AvatarModelProvider(sex:String = FEMALE) { 43 | this.sex = sex; 44 | } 45 | 46 | public function readModel():void { 47 | if (sex == MALE) modelMaterial = new BitmapMaterial(Cast.bitmap(model_texture_m)); 48 | else if (sex == FEMALE) modelMaterial = new BitmapMaterial(Cast.bitmap(model_texture_f)); 49 | 50 | eyeMaterial = new BitmapMaterial(Cast.bitmap(eye_texture)); 51 | teethMaterial = new BitmapMaterial(Cast.bitmap(teeth_texture)); 52 | 53 | colladaParser = new Collada(); 54 | colladaParser.scaling = 10.5; 55 | colladaParser.addEventListener(ParserEvent.PARSE_SUCCESS, onParseCollada); 56 | 57 | var modelByteArray:ByteArray; 58 | if (sex == MALE) modelByteArray = new model_m; 59 | else if (sex == FEMALE) modelByteArray = new model_f; 60 | 61 | colladaParser.parseGeometry(modelByteArray); 62 | } 63 | 64 | private function onParseCollada(event:ParserEvent):void { 65 | avatarModel = (colladaParser.container as ObjectContainer3D); 66 | 67 | if (sex == MALE) avatarModel.materialLibrary.getMaterial("texture_jpg").material = modelMaterial; 68 | else if(sex == FEMALE) avatarModel.materialLibrary.getMaterial("texture_jpg").material = modelMaterial; 69 | 70 | avatarModel.materialLibrary.getMaterial("eye_jpg").material = eyeMaterial; 71 | avatarModel.materialLibrary.getMaterial("teeth_jpg_001").material = teethMaterial; 72 | 73 | setupObjectBones(avatarModel.children); 74 | 75 | dispatchEvent(new Event(Event.COMPLETE)); 76 | } 77 | 78 | private function setupObjectBones(model:Object):void { 79 | for each(var obj:Object in model) { 80 | obj.rotationX = obj.rotationX; 81 | 82 | try { 83 | var array:Vector. = obj.children; 84 | setupObjectBones(array); 85 | } catch (e:Error) { } 86 | } 87 | } 88 | 89 | public function getModel():ObjectContainer3D { 90 | return avatarModel; 91 | } 92 | 93 | } 94 | } -------------------------------------------------------------------------------- /src/lipsync/core/lpc/LP.as: -------------------------------------------------------------------------------- 1 | package lipsync.core.lpc 2 | { 3 | /** 4 | * ... 5 | * @author S 6 | */ 7 | public class LP 8 | { 9 | public static var order:int = 9; 10 | 11 | private static function createWindow(length:int):Vector. { 12 | var w:Vector. = new Vector.(); 13 | 14 | for (var n:int = 0; n < length; n++) { 15 | var x:Number = 0.0; 16 | 17 | var arg:Number = (2 * Math.PI * n) / (length - 1); 18 | x = 0.54 - 0.46 * Math.cos( arg ); // hamming 19 | //x = 0.5 * (1 + Math.cos(arg)); // hanning 20 | //x = 0.42 - 0.5 * Math.cos(arg) + 0.08 * Math.cos(2 * arg); // blackman 21 | //x = 0.35875 - 0.48829*Math.cos(arg) + 0.14128*Math.cos(2*arg) + 0.01168*Math.cos(3*arg); // blackman-harris 22 | 23 | w.push(x); 24 | } 25 | 26 | return w; 27 | } 28 | 29 | private static function computeAutocorrelation(x:Vector.):Vector. { 30 | var dl:Vector. = new Vector.(); 31 | var Rt:Vector. = new Vector.(); 32 | var R:Vector. = new Vector.(); 33 | var r1:Number, r2:Number, r1t:Number; 34 | var L:int = x.length; 35 | var lambda:Number = 0.0; 36 | var P:int = order; 37 | 38 | for (var z:int = 0; z < L; z++) { 39 | dl.push(0.0); 40 | Rt.push(0.0); 41 | } 42 | 43 | R[0] = Rt[0] = 0; 44 | r1 = r2 = r1t = 0; 45 | 46 | for (var k:int = 0; k < L; k++) { 47 | Rt[0] += (Number)(x[k]) * (Number)(x[k]); 48 | dl[k] = r1 - (Number)(lambda) * (Number)(x[k] - r2); 49 | r1 = x[k]; 50 | r2 = dl[k]; 51 | } 52 | for (var i:int = 1; i <= P; i++) { 53 | Rt[i] = 0; 54 | r1 = r2 = 0; 55 | for(k=0; k):Vector. { 71 | var km:Number, Em1:Number, Em:Number; 72 | var k:int, s:int, m:int; 73 | 74 | var A:Vector. = new Vector.(); 75 | var Am:Vector. = new Vector.(); 76 | var K:Vector. = new Vector.(); 77 | 78 | for (var j:int = 0; j <= order; j++) { 79 | K.push(0.0); 80 | A.push(0.0); 81 | Am.push(0.0); 82 | } 83 | 84 | Em1 = R[0]; 85 | A[0] = Am[0] = 1; 86 | km = 0; 87 | 88 | for (m = 1; m <= order; m++) { 89 | var err:Number = 0.0; 90 | 91 | for (k = 1; k <= m - 1; k++) { 92 | err += Am[k] * R[m - k]; 93 | } 94 | 95 | km = (R[m] - err) / Em1; 96 | K[m - 1] = -(Number)(km); 97 | A[m] = km; 98 | 99 | for (k = 1; k <= m - 1; k++) { 100 | A[k] = (Number)(Am[k] - km * Am[m - k]); 101 | } 102 | 103 | Em = (1 - km * km) * Em1; 104 | 105 | for (s = 0; s <= order; s++) { 106 | Am[s] = A[s]; 107 | } 108 | 109 | Em1 = Em; 110 | } 111 | 112 | return K; 113 | } 114 | 115 | public static function analyze(samples:Vector.):Vector. { 116 | var R:Vector. = computeAutocorrelation(samples); 117 | var output:Vector. = computeCoef(R); 118 | 119 | // NORMALISATION 120 | //for (var i:int = 0; i < output.length; i++) output[i] = (output[i] + 1) / 2; 121 | 122 | output.pop(); 123 | 124 | return output; 125 | } 126 | 127 | } 128 | 129 | } -------------------------------------------------------------------------------- /src/avatar3D/face/mouth/AvatarMouth.as: -------------------------------------------------------------------------------- 1 | package avatar3D.face.mouth 2 | { 3 | import avatar3D.core.AvatarFeature; 4 | import avatar3D.expression.AvatarExpression; 5 | import avatar3D.expression.ExpressionsCollection; 6 | import away3d.containers.ObjectContainer3D; 7 | /** 8 | * ... 9 | * @author S 10 | */ 11 | public class AvatarMouth 12 | { 13 | public var jaw:AvatarFeature; 14 | public var tongue:AvatarFeature; 15 | public var mouth_r:AvatarFeature; 16 | public var mouth_l:AvatarFeature; 17 | public var lip_down_r:AvatarFeature; 18 | public var lip_down_m:AvatarFeature; 19 | public var lip_down_l:AvatarFeature; 20 | public var lip_top_r:AvatarFeature; 21 | public var lip_top_m:AvatarFeature; 22 | public var lip_top_l:AvatarFeature; 23 | public var cheek_r:AvatarFeature; 24 | public var cheek_l:AvatarFeature; 25 | public var cheekb_r:AvatarFeature; 26 | public var cheekb_l:AvatarFeature; 27 | 28 | public function AvatarMouth(avatar:ObjectContainer3D, xml:XMLList) { 29 | jaw = new AvatarFeature(avatar, xml.jaw); 30 | tongue = new AvatarFeature(avatar, xml.tongue); 31 | mouth_r = new AvatarFeature(avatar, xml.mouth_r); 32 | mouth_l = new AvatarFeature(avatar, xml.mouth_l); 33 | lip_down_r = new AvatarFeature(avatar, xml.lip_down_r); 34 | lip_down_m = new AvatarFeature(avatar, xml.lip_down_m); 35 | lip_down_l = new AvatarFeature(avatar, xml.lip_down_l); 36 | lip_top_r = new AvatarFeature(avatar, xml.lip_top_r); 37 | lip_top_m = new AvatarFeature(avatar, xml.lip_top_m); 38 | lip_top_l = new AvatarFeature(avatar, xml.lip_top_l); 39 | cheek_r = new AvatarFeature(avatar, xml.cheek_r); 40 | cheek_l = new AvatarFeature(avatar, xml.cheek_l); 41 | cheekb_r = new AvatarFeature(avatar, xml.cheekb_r); 42 | cheekb_l = new AvatarFeature(avatar, xml.cheekb_l); 43 | } 44 | 45 | public function smile(value:Number):void { 46 | mouth_r.movY.value = -value; 47 | mouth_l.movY.value = -value; 48 | } 49 | 50 | public function setViseme(value:Number, viseme:AvatarExpression):void { 51 | jaw.setParameter(value, viseme.jaw); 52 | tongue.setParameter(value, viseme.tongue); 53 | mouth_l.setParameter(value, viseme.mouth_l); 54 | mouth_r.setParameter(value, viseme.mouth_r); 55 | lip_down_l.setParameter(value, viseme.lip_down_l); 56 | lip_down_m.setParameter(value, viseme.lip_down_m); 57 | lip_down_r.setParameter(value, viseme.lip_down_r); 58 | lip_top_l.setParameter(value, viseme.lip_top_l); 59 | lip_top_m.setParameter(value, viseme.lip_top_m); 60 | lip_top_r.setParameter(value, viseme.lip_top_r); 61 | cheek_l.setParameter(value, viseme.cheek_l); 62 | cheek_r.setParameter(value, viseme.cheek_r); 63 | cheekb_l.setParameter(value, viseme.cheekb_l); 64 | cheekb_r.setParameter(value, viseme.cheekb_r); 65 | } 66 | 67 | public function setNeutral(time:Number):void { 68 | var value:Number = 1.0; 69 | var viseme:AvatarExpression = ExpressionsCollection.NEUTRAL; 70 | 71 | jaw.setParameterTween(value, viseme.jaw, time); 72 | tongue.setParameterTween(value, viseme.tongue, time); 73 | mouth_l.setParameterTween(value, viseme.mouth_l, time); 74 | mouth_r.setParameterTween(value, viseme.mouth_r, time); 75 | lip_down_l.setParameterTween(value, viseme.lip_down_l, time); 76 | lip_down_m.setParameterTween(value, viseme.lip_down_m, time); 77 | lip_down_r.setParameterTween(value, viseme.lip_down_r, time); 78 | lip_top_l.setParameterTween(value, viseme.lip_top_l, time); 79 | lip_top_m.setParameterTween(value, viseme.lip_top_m, time); 80 | lip_top_r.setParameterTween(value, viseme.lip_top_r, time); 81 | cheek_l.setParameterTween(value, viseme.cheek_l, time); 82 | cheek_r.setParameterTween(value, viseme.cheek_r, time); 83 | cheekb_l.setParameterTween(value, viseme.cheekb_l, time); 84 | cheekb_r.setParameterTween(value, viseme.cheekb_r, time); 85 | } 86 | 87 | } 88 | } -------------------------------------------------------------------------------- /src/lipsync/training/LipsyncCreator.mxml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | @namespace spark "library://ns.adobe.com/flex/spark"; 6 | @namespace "library://ns.adobe.com/flex/mx"; 7 | 8 | global { 9 | fontSize:12; 10 | } 11 | 12 | Button { 13 | } 14 | 15 | .trainingPanelStyle { 16 | padding-left: 10; 17 | padding-top: 10; 18 | } 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | female/aeiou.mp3 82 | female/count.mp3 83 | female/example.mp3 84 | female/lipsync.mp3 85 | female/speech.mp3 86 | male/aeiou.mp3 87 | male/count.mp3 88 | male/example.mp3 89 | male/lipsync.mp3 90 | male/speech.mp3 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /src/editor/LipsyncEditorWindow.as: -------------------------------------------------------------------------------- 1 | package editor { 2 | import avatar3D.AvatarAnimator; 3 | import avatar3D.AvatarBuilder; 4 | import flash.display.Bitmap; 5 | import flash.display.BitmapData; 6 | import flash.display.Sprite; 7 | import flash.events.Event; 8 | import flash.filters.BlurFilter; 9 | import flash.filters.GlowFilter; 10 | import flash.geom.Matrix; 11 | import flash.geom.Rectangle; 12 | import generic3D.AvatarScene; 13 | import generic3D.collada.AvatarModelProvider; 14 | import util.Label; 15 | import lipsync.core.network.NeuralNetwork; 16 | import util.NeuralNetworkProvider; 17 | /** 18 | * ... 19 | * @author S 20 | */ 21 | 22 | [SWF(backgroundColor="#ffffff", frameRate="18", quality="LOW", width="500", height="700")] 23 | public class LipsyncEditorWindow extends Sprite { 24 | private var avatarScene:AvatarScene; 25 | private var modelProvider:AvatarModelProvider; 26 | 27 | private var currentAvatar:AvatarAnimator; 28 | private var avatarMale:AvatarAnimator; 29 | private var avatarFemale:AvatarAnimator; 30 | 31 | private var magnify:Bitmap; 32 | 33 | public static const MALE:String = "MALE"; 34 | public static const FEMALE:String = "FEMALE"; 35 | public var sex:String = FEMALE; 36 | 37 | private var voiceNN:NeuralNetwork; 38 | private var voiceUrl:String = "../lib/final/female/"; 39 | 40 | 41 | public function LipsyncEditorWindow() { 42 | avatarScene = new AvatarScene(); 43 | this.addChild(avatarScene); 44 | avatarScene.scaleX = avatarScene.scaleY = 1.2; 45 | avatarScene.y = -80; 46 | avatarScene.x = -70; 47 | avatarScene.camera.moveTo( -50, 50, -50); 48 | 49 | modelProvider = new AvatarModelProvider(FEMALE); 50 | modelProvider.addEventListener(Event.COMPLETE, onCompleteFemale); 51 | modelProvider.readModel(); 52 | 53 | addEventListener(Event.ENTER_FRAME, onEnterFrame); 54 | 55 | createForeground(); 56 | createLinks(); 57 | 58 | magnify = new Bitmap(); 59 | magnify.bitmapData = new BitmapData(420, 260); 60 | this.addChild(magnify); 61 | magnify.filters = [new BlurFilter(3, 3), new GlowFilter(0x000000, 0.5, 20, 20)]; 62 | magnify.x = 50; 63 | magnify.y = 380; 64 | } 65 | 66 | private function onCompleteFemale(e:Event):void { 67 | var avatarBuilder:AvatarBuilder = new AvatarBuilder(modelProvider.getModel()); 68 | avatarFemale = avatarBuilder.buildAvatar(); 69 | 70 | avatarFemale.getAvatarObject3D().moveTo(0, -25, 0); 71 | avatarFemale.getAvatarObject3D().rotationY += 15; 72 | avatarFemale.getAvatarObject3D().rotationX -= 5; 73 | avatarFemale.getAvatarObject3D().rotationZ += 5; 74 | 75 | if (sex == FEMALE) 76 | avatarScene.addAvatar(avatarFemale); 77 | 78 | modelProvider = new AvatarModelProvider(MALE); 79 | modelProvider.addEventListener(Event.COMPLETE, onCompleteMale); 80 | modelProvider.readModel(); 81 | } 82 | private function onCompleteMale(e:Event):void { 83 | var avatarBuilder:AvatarBuilder = new AvatarBuilder(modelProvider.getModel()); 84 | avatarMale = avatarBuilder.buildAvatar(); 85 | 86 | avatarMale.getAvatarObject3D().moveTo(0, -25, 0); 87 | avatarMale.getAvatarObject3D().rotationY += 15; 88 | avatarMale.getAvatarObject3D().rotationX -= 5; 89 | avatarMale.getAvatarObject3D().rotationZ += 5; 90 | 91 | if (sex == MALE) { 92 | avatarScene.addAvatar(avatarMale); 93 | currentAvatar = avatarMale; 94 | voiceNN = NeuralNetworkProvider.build(NeuralNetworkProvider.networkImageMale); 95 | } else { 96 | currentAvatar = avatarFemale; 97 | voiceNN = NeuralNetworkProvider.build(NeuralNetworkProvider.networkImageFemale); 98 | } 99 | 100 | } 101 | 102 | public function changeSex():void { 103 | if (currentAvatar == avatarFemale) { 104 | sex = MALE; 105 | voiceNN = NeuralNetworkProvider.build(NeuralNetworkProvider.networkImageMale); 106 | voiceUrl = "../lib/final/male/"; 107 | currentAvatar = avatarMale; 108 | avatarScene.addAvatar(avatarMale); 109 | avatarScene.removeAvatar(avatarFemale); 110 | } else if (currentAvatar == avatarMale) { 111 | sex = FEMALE; 112 | voiceNN = NeuralNetworkProvider.build(NeuralNetworkProvider.networkImageFemale); 113 | voiceUrl = "../lib/final/female/"; 114 | currentAvatar = avatarFemale; 115 | avatarScene.addAvatar(avatarFemale); 116 | avatarScene.removeAvatar(avatarMale); 117 | } 118 | } 119 | 120 | private function onEnterFrame(event:Event):void { 121 | drawMagnify(); 122 | currentAvatar.lookAt(this.mouseX * 0.5 + 250, this.mouseY * 0.5 + 250); 123 | } 124 | 125 | private function createForeground():void { 126 | var s:Sprite = new Sprite(); 127 | s.graphics.beginFill(0xffffff); 128 | s.graphics.drawRect(0, 0, 500, 300); 129 | s.graphics.endFill(); 130 | 131 | this.addChild(s); 132 | s.x = 80; 133 | s.y = 350; 134 | } 135 | 136 | private function drawMagnify():void { 137 | var m:Matrix = new Matrix(); 138 | m.translate( -245, -230); 139 | m.scale(3.5, 3.5); 140 | magnify.bitmapData.draw(this, m, null, null, new Rectangle(0, 0, 420, 260)); 141 | } 142 | 143 | private function createLinks():void { 144 | var link:Label; 145 | 146 | link = createChangeSex("AWATAR"); 147 | link.y = 20; 148 | this.addChild(link); 149 | 150 | link = createLink("aeiou", "aeiou.mp3"); 151 | link.y = 80; 152 | this.addChild(link); 153 | 154 | link = createLink("count", "count.mp3"); 155 | link.y = 110; 156 | this.addChild(link); 157 | 158 | link = createLink("example", "example.mp3"); 159 | link.y = 140; 160 | this.addChild(link); 161 | 162 | link = createLink("lipsync", "lipsync.mp3"); 163 | link.y = 170; 164 | this.addChild(link); 165 | 166 | link = createLink("speech", "speech.mp3"); 167 | link.y = 200; 168 | this.addChild(link); 169 | 170 | } 171 | 172 | private function createLink(text:String, url:String):Label { 173 | var label:Label = new Label(text); 174 | label.x = 30; 175 | label.onMouseClick(createLinkAction(url)); 176 | return label; 177 | } 178 | 179 | private function createLinkAction(file:String):Function { 180 | return function(e:Event):void { 181 | currentAvatar.saySentencesUsingNetwork([voiceUrl + file], voiceNN); 182 | currentAvatar.lookAt(360, 180); 183 | }; 184 | } 185 | 186 | private function createChangeSex(text:String):Label { 187 | var label:Label = new Label(text); 188 | label.x = 20; 189 | label.onMouseClick(function(e:Event):void { changeSex(); } ); 190 | return label; 191 | } 192 | 193 | 194 | } 195 | } -------------------------------------------------------------------------------- /src/lipsync/core/network/NeuralNetwork.as: -------------------------------------------------------------------------------- 1 | package lipsync.core.network 2 | { 3 | import flash.utils.ByteArray; 4 | import lipsync.core.LipsyncSettings; 5 | import lipsync.core.lpc.LP; 6 | import lipsync.training.TrainingPattern; 7 | 8 | public class NeuralNetwork 9 | { 10 | private var layers:Array; 11 | 12 | private var momentum:Number = 0.5; 13 | private var neuronalBias:Number = 1; 14 | private var initialWeightRange:Number = 1; 15 | private var realLearningRate:Number = NaN; 16 | private var hiddenLayers:int = 2; 17 | 18 | 19 | public function createNetwork(inputs:int, outputs:int, neuronsPerLayer:int):void { 20 | this.layers = []; 21 | 22 | this.layers[0] = createLayer(neuronsPerLayer, inputs, neuronalBias, initialWeightRange); 23 | for (var i:int = 1; i < hiddenLayers; i++) { 24 | this.layers[i] = createLayer(neuronsPerLayer, inputs, neuronalBias, initialWeightRange); 25 | } 26 | this.layers[hiddenLayers] = createLayer(outputs, neuronsPerLayer, neuronalBias, initialWeightRange); 27 | } 28 | 29 | private function createLayer(neurons:int, inputs:int, bias:Number, weightRange:Number):Vector. { 30 | var newLayer:Vector. = new Vector.(); 31 | for (var i:int = 0; i < neurons; i++) { 32 | var neuron:Neuron = new Neuron(); 33 | neuron.createNeuron(inputs, bias, weightRange); 34 | 35 | newLayer.push(neuron); 36 | } 37 | return newLayer; 38 | } 39 | 40 | 41 | public function run(inputArray:Vector.):Vector. { 42 | var layerOutputs:Array = new Array(layers.length + 1); 43 | for (var l:int = 0; l <= layers.length; l++) { 44 | layerOutputs[l] = new Vector.(); 45 | } 46 | 47 | var inputs:Vector. = inputArray; 48 | for (l = 0; l < layers.length; l++) { 49 | var output:Vector. = layerOutputs[l + 1]; 50 | 51 | for each (var neuron:Neuron in layers[l]) { 52 | output.push(neuron.calculateValue(inputs)); 53 | } 54 | 55 | inputs = output; 56 | } 57 | 58 | return layerOutputs[layerOutputs.length - 1]; 59 | } 60 | 61 | public function train(patterns:Vector., epochs:int, learningRate:Number, targetMSE:Number = 0.02):Number { 62 | if (isNaN(realLearningRate)) { 63 | realLearningRate = learningRate; 64 | } 65 | 66 | var MSE:Number = 0; 67 | for (var r:int = 0; r < epochs; r++) { 68 | patterns = shufflePatterns(patterns); 69 | 70 | MSE = 0; 71 | for (var i:int = 0; i < patterns.length; i++) { 72 | var input:Vector. = (patterns[i] as TrainingPattern).input; 73 | var output:Vector. = (patterns[i] as TrainingPattern).output; 74 | 75 | this.run(input); 76 | MSE += this.adjust(output, realLearningRate); 77 | } 78 | 79 | MSE = MSE / patterns.length; 80 | realLearningRate = learningRate * MSE; 81 | 82 | if (MSE <= targetMSE) { 83 | break; 84 | } 85 | } 86 | 87 | return MSE; 88 | } 89 | 90 | private function adjust(outputArray:Vector., learningRate:Number):Number { 91 | var MSEsum:Number = 0; 92 | var layerCount:int = this.layers.length - 1; 93 | var error:Array = []; 94 | 95 | for (var l:int = layerCount; l >= 0; --l) { 96 | var layer:Vector. = layers[l]; 97 | 98 | error[l] = []; 99 | for (var i:int = 0; i < layer[0].size; i++) { 100 | error[l].push(0); 101 | } 102 | 103 | var nError:Number = 0; 104 | for (var n:int = 0; n < layer.length; n++) { 105 | var neuron:Neuron = layer[n]; 106 | 107 | if (l == layerCount) { 108 | nError = outputArray[n] - neuron.value; 109 | MSEsum += nError * nError; 110 | } else { 111 | nError = error[l + 1][n]; 112 | } 113 | trace(nError); 114 | neuron.adjustWeights(nError, learningRate, momentum, error[l]); 115 | } 116 | } 117 | trace("MSESum " + MSEsum); 118 | return MSEsum / (layers[layerCount].length); 119 | } 120 | 121 | 122 | public static function shufflePatterns(array_arr:Vector.):Vector. { 123 | for(var i:Number = 0; i < array_arr.length; i++){ 124 | var randomNum_num:int = Math.floor(Math.random() * array_arr.length) 125 | var arrayIndex:TrainingPattern = array_arr[i]; 126 | array_arr[i] = array_arr[randomNum_num]; 127 | array_arr[randomNum_num] = arrayIndex; 128 | } 129 | return array_arr; 130 | } 131 | 132 | public function save():ByteArray { 133 | var output:ByteArray = new ByteArray(); 134 | 135 | output.writeInt(LP.order); 136 | output.writeInt(LipsyncSettings.outputCount); 137 | output.writeInt(LipsyncSettings.samplingDecimate); 138 | output.writeInt(LipsyncSettings.windowLength); 139 | 140 | output.writeDouble(momentum); 141 | output.writeDouble(realLearningRate); 142 | output.writeInt(layers.length); 143 | 144 | for (var l:int = 0; l < layers.length; l++) { 145 | var layer:Vector. = layers[l]; 146 | 147 | output.writeInt(layer.length); 148 | for (var n:int = 0; n < layer.length; n++) { 149 | var neuron:Neuron = layer[n]; 150 | 151 | output.writeDouble(neuron.value); 152 | output.writeDouble(neuron.bias); 153 | output.writeDouble(neuron.momentum); 154 | 155 | output.writeInt(neuron.size); 156 | for (var s:int = 0; s < neuron.size; s++) { 157 | output.writeDouble(neuron.inputs[s]); 158 | output.writeDouble(neuron.weights[s]); 159 | output.writeDouble(neuron.momentums[s]); 160 | } 161 | } 162 | } 163 | 164 | output.compress(); 165 | output.position = 0; 166 | 167 | return output; 168 | } 169 | 170 | public function load(input:ByteArray):void { 171 | input.uncompress(); 172 | input.position = 0; 173 | 174 | LP.order = input.readInt(); 175 | LipsyncSettings.outputCount = input.readInt(); 176 | LipsyncSettings.samplingDecimate = input.readInt(); 177 | LipsyncSettings.windowLength = input.readInt(); 178 | 179 | this.momentum = input.readDouble(); 180 | this.realLearningRate = input.readDouble(); 181 | 182 | var layersLength:int = input.readInt(); 183 | layers = new Array(layersLength); 184 | 185 | for (var l:int = 0; l < layersLength; l++) { 186 | var layerLength:int = input.readInt(); 187 | var layer:Vector. = new Vector.(); 188 | 189 | for (var n:int = 0; n < layerLength; n++) { 190 | var neuron:Neuron = new Neuron(); 191 | 192 | neuron.value = input.readDouble(); 193 | neuron.bias = input.readDouble(); 194 | neuron.momentum = input.readDouble(); 195 | 196 | neuron.size = input.readInt(); 197 | neuron.inputs = new Vector.(neuron.size); 198 | neuron.weights = new Vector.(neuron.size); 199 | neuron.momentums = new Vector.(neuron.size); 200 | 201 | for (var s:int = 0; s < neuron.size; s++) { 202 | neuron.inputs[s] = input.readDouble(); 203 | neuron.weights[s] = input.readDouble(); 204 | neuron.momentums[s] = input.readDouble(); 205 | } 206 | 207 | layer[n] = neuron; 208 | 209 | 210 | } 211 | 212 | layers[l] = layer; 213 | } 214 | 215 | } 216 | 217 | } 218 | } -------------------------------------------------------------------------------- /src/lipsync/training/LipsyncTrainer.as: -------------------------------------------------------------------------------- 1 | import flash.display.Sprite; 2 | import flash.events.Event; 3 | import flash.events.TimerEvent; 4 | import flash.utils.Timer; 5 | import mx.rpc.events.ResultEvent; 6 | import mx.controls.Alert; 7 | import flash.display.Sprite; 8 | import flash.events.Event; 9 | import flash.events.MouseEvent; 10 | import flash.net.FileReference; 11 | import flash.net.URLLoader; 12 | import flash.net.URLLoaderDataFormat; 13 | import flash.net.URLRequest; 14 | import flash.text.TextField; 15 | import flash.utils.ByteArray; 16 | import lipsync.core.LipsyncSettings; 17 | import lipsync.core.lpc.LP; 18 | import lipsync.core.network.NeuralNetwork; 19 | import lipsync.core.phoneme.Phoneme; 20 | import lipsync.core.phoneme.PhonemeCollection; 21 | import lipsync.player.LipsyncPlayer; 22 | import lipsync.player.LipsyncEvent; 23 | import lipsync.training.generator.TrainingPatternGenerator; 24 | import lipsync.training.TrainingPattern; 25 | 26 | internal var targetMSE:Number; 27 | internal var epochsToRun:int; 28 | internal var learningRate:Number; 29 | internal var hiddenLayers:int; 30 | internal var hiddenNeuronsPerLayer:int; 31 | 32 | internal var network:NeuralNetwork; 33 | internal var trainer:TrainingPatternGenerator; 34 | internal var trainingSequence:Vector.; 35 | internal var soundPlayer:LipsyncPlayer; 36 | internal var totalTraining:Boolean = false; 37 | 38 | 39 | internal function init():void { 40 | setVariablesFunction(); 41 | 42 | var inputNeurons:int = LP.order; 43 | var outputNeurons:int = LipsyncSettings.outputCount; 44 | 45 | network = new NeuralNetwork(); 46 | network.createNetwork(inputNeurons, outputNeurons, hiddenNeuronsPerLayer); 47 | 48 | soundPlayer = new LipsyncPlayer(100, 1); 49 | soundPlayer.setupNeuralNetwork(network); 50 | soundPlayer.addEventListener(LipsyncEvent.PHONEME, onGetPhoneme); 51 | 52 | output.text = ""; 53 | log.text = ""; 54 | } 55 | 56 | internal function setVariablesFunction():void { 57 | targetMSE = new Number(targetMSEField.text); 58 | epochsToRun = new int(epochsField.text); 59 | learningRate = new Number(learningRateField.text); 60 | hiddenNeuronsPerLayer = new int(neuronsCountField.text); 61 | LipsyncSettings.recognizePhonemeDelay = new int(eventDelayField.text); 62 | LipsyncSettings.windowLength = new int(windowLengthField.text); 63 | LipsyncSettings.outputCount = new int(outputCountField.text); 64 | LipsyncSettings.samplingDecimate = new int(samplingDecimateField.text); 65 | LP.order = new int(lpcOrderField.text); 66 | 67 | /* 68 | trace(LipsyncSettings.lipsyncEventDelay); 69 | trace(LipsyncSettings.windowLength); 70 | trace(LipsyncSettings.outputCount); 71 | trace(LipsyncSettings.samplingDecimate); 72 | trace(LP.order); 73 | 74 | trace(targetMSE); 75 | trace(epochsToRun); 76 | trace(learningRate); 77 | trace(hiddenLayers); 78 | trace(hiddenNeuronsPerLayer); 79 | */ 80 | } 81 | 82 | 83 | internal function loadSamples():void { 84 | setVariablesFunction(); 85 | 86 | var count:int = 40; 87 | var file:String; 88 | 89 | // FEMALE 90 | /* 91 | trainer = new TrainingPatternGenerator("../lib/female/"); 92 | trainer.addEventListener(Event.COMPLETE, onTrainingSequence); 93 | 94 | file = "aeiou.mp3"; 95 | trainer.addSequence(file, Phoneme.v3a, 8, 13, count); 96 | trainer.addSequence(file, Phoneme.v3b, 13, 20, count); 97 | trainer.addSequence(file, Phoneme.v7a, 42, 48, count); 98 | trainer.addSequence(file, Phoneme.v7b, 48, 54, count); 99 | trainer.addSequence(file, Phoneme.v2a, 78, 83, count); 100 | trainer.addSequence(file, Phoneme.v2b, 83, 89, count); 101 | trainer.addSequence(file, Phoneme.v6a, 111, 115, count); 102 | trainer.addSequence(file, Phoneme.v6b, 115, 122, count); 103 | trainer.addSequence(file, Phoneme.v5a, 145, 150, count); 104 | trainer.addSequence(file, Phoneme.v5b, 150, 158, count); 105 | 106 | file = "example.mp3"; 107 | trainer.addSequence(file, Phoneme.v1a, 46, 50, count); 108 | trainer.addSequence(file, Phoneme.v1b, 50, 56, count); 109 | trainer.addSequence(file, Phoneme.v4a, 83, 85, count); 110 | trainer.addSequence(file, Phoneme.v4b, 85, 89, count); 111 | */ 112 | 113 | // MALE 114 | trainer = new TrainingPatternGenerator("../lib/male/"); 115 | trainer.addEventListener(Event.COMPLETE, onTrainingSequence); 116 | 117 | file = "aeiou.mp3"; 118 | trainer.addSequence(file, Phoneme.v3a, 9, 15, count); 119 | trainer.addSequence(file, Phoneme.v3b, 15, 24, count); 120 | trainer.addSequence(file, Phoneme.v7a, 50, 55, count); 121 | trainer.addSequence(file, Phoneme.v7b, 55, 62, count); 122 | trainer.addSequence(file, Phoneme.v1a, 87, 95, count); 123 | trainer.addSequence(file, Phoneme.v1b, 95, 102, count); 124 | trainer.addSequence(file, Phoneme.v6a, 127, 135, count); 125 | trainer.addSequence(file, Phoneme.v6b, 135, 144, count); 126 | trainer.addSequence(file, Phoneme.v5a, 167, 175, count); 127 | trainer.addSequence(file, Phoneme.v5b, 175, 187, count); 128 | 129 | file = "example.mp3"; 130 | trainer.addSequence(file, Phoneme.v2a, 48, 53, count); 131 | trainer.addSequence(file, Phoneme.v2b, 53, 59, count); 132 | trainer.addSequence(file, Phoneme.v4a, 211, 218, count); 133 | trainer.addSequence(file, Phoneme.v4b, 218, 224, count); 134 | 135 | 136 | trainer.start(); 137 | } 138 | 139 | internal function onTrainingSequence(evt:Event):void { 140 | trainingSequence = trainer.getSamples(); 141 | 142 | if (totalTraining == false) { 143 | for each(var seq:TrainingPattern in trainingSequence) { 144 | //writeTo(output, seq.output + " - " + seq.input); 145 | } 146 | writeTo(log, "Training samples loaded", true); 147 | } else { 148 | partialTraining(); 149 | } 150 | } 151 | 152 | 153 | internal function runNetwork(file:String = ""):void { 154 | output.text = ""; 155 | 156 | setVariablesFunction(); 157 | 158 | writeTo(log, "Run network. Test file: " + file, true); 159 | soundPlayer.playSound("../lib/"+file); 160 | 161 | } 162 | 163 | internal function trainNetwork():void { 164 | setVariablesFunction(); 165 | 166 | writeTo(log, "Train network start", true); 167 | 168 | totalTraining = false; 169 | var mse:Number = network.train(trainingSequence, epochsToRun, learningRate); 170 | 171 | writeTo(log, "Training end. MSE: " + mse, true); 172 | } 173 | 174 | internal function completeTraining():void { 175 | setVariablesFunction(); 176 | init(); 177 | 178 | writeTo(log, "Complete training start", true); 179 | 180 | totalTraining = true; 181 | 182 | loadSamples(); 183 | } 184 | 185 | internal function partialTraining():void { 186 | var mse:Number = network.train(trainingSequence, epochsToRun, learningRate, targetMSE); 187 | 188 | if (mse <= targetMSE) { 189 | writeTo(log, "Total training complete. MSE: " + mse, true); 190 | } else { 191 | writeTo(log, ""+mse, false); 192 | loadSamples(); 193 | } 194 | } 195 | 196 | 197 | internal function saveNetwork():void { 198 | var fr:FileReference = new FileReference(); 199 | fr.save(network.save(), "network_image"); 200 | writeTo(log, "Save neural network", true); 201 | } 202 | 203 | internal function loadNetwork():void { 204 | function onOpen(evt:Event):void { 205 | var ba:ByteArray = evt.target.data 206 | network.load(ba); 207 | writeTo(log, "Neural network loaded", true); 208 | 209 | windowLengthField.text = "" + LipsyncSettings.windowLength; 210 | outputCountField.text = "" + LipsyncSettings.outputCount; 211 | lpcOrderField.text = "" + LP.order; 212 | samplingDecimateField.text = "" + LipsyncSettings.samplingDecimate; 213 | } 214 | 215 | var loader:URLLoader = new URLLoader(); 216 | loader.addEventListener(Event.COMPLETE, onOpen); 217 | loader.dataFormat = URLLoaderDataFormat.BINARY; 218 | loader.load(new URLRequest("../lib/network_image")); 219 | } 220 | 221 | internal function clearNetwork():void { 222 | init(); 223 | output.text = ""; 224 | log.text = ""; 225 | } 226 | 227 | 228 | internal function onGetPhoneme(event:LipsyncEvent):void { 229 | var phoneme:Phoneme = event.phoneme; 230 | 231 | writeTo(output, " " + phoneme.symbol, false, false); 232 | phonemeLabel.text = phoneme.symbol; 233 | } 234 | 235 | internal function writeTo(obj:mx.controls.TextArea, text:String, date:Boolean = false, ln:Boolean = true):void { 236 | if (date) { 237 | var split:Array = (new Date().toTimeString().split(" ")[0]).split(":"); 238 | obj.text += "[" + split[1] + ":" + split[2] + "] - "; 239 | } 240 | 241 | obj.text += text; 242 | 243 | if (ln) { 244 | obj.text += "\n"; 245 | } 246 | } 247 | 248 | -------------------------------------------------------------------------------- /src/lipsync/player/LipsyncPlayer.as: -------------------------------------------------------------------------------- 1 | package lipsync.player 2 | { 3 | import flash.events.Event; 4 | import flash.events.EventDispatcher; 5 | import flash.events.IOErrorEvent; 6 | import flash.events.TimerEvent; 7 | import flash.media.Sound; 8 | import flash.media.SoundChannel; 9 | import flash.media.SoundTransform; 10 | import flash.net.URLRequest; 11 | import flash.utils.ByteArray; 12 | import flash.utils.Timer; 13 | import lipsync.core.LipsyncSettings; 14 | import lipsync.core.lpc.LP; 15 | import lipsync.core.network.NeuralNetwork; 16 | import lipsync.core.phoneme.Phoneme; 17 | import lipsync.core.phoneme.PhonemeCollection; 18 | 19 | /** 20 | * ... 21 | * @author Szymon 22 | */ 23 | 24 | public class LipsyncPlayer extends EventDispatcher { 25 | private var network:NeuralNetwork; 26 | 27 | private var phonemePositionStep:int; 28 | private var phonemeBufferArray:Array; 29 | 30 | private var soundVolume:Number; 31 | private var soundEnabled:Boolean; 32 | private var soundIsPlaying:Boolean; 33 | 34 | private var soundsArray:Array; 35 | private var sound:Sound; 36 | private var soundChannel:SoundChannel; 37 | private var soundTransform:SoundTransform; 38 | 39 | private var dispatchEventTimer:Timer; 40 | private var dispatchEventDelay:int; 41 | 42 | private var recognizePhonemeTimer:Timer; 43 | 44 | 45 | // SETUP 46 | public function LipsyncPlayer(dispatchDelay:int, init_volume:Number = 1.0) { 47 | this.dispatchEventDelay = dispatchDelay; 48 | 49 | soundEnabled = true; 50 | soundIsPlaying = false; 51 | 52 | soundTransform = new SoundTransform(1.00); 53 | 54 | this.setSoundVolume(init_volume); 55 | } 56 | 57 | public function setupNeuralNetwork(network:NeuralNetwork):void { 58 | this.network = network; 59 | } 60 | 61 | 62 | // VOLUME 63 | public function isSoundEnabled():Boolean { 64 | return soundEnabled; 65 | } 66 | 67 | public function isSoundPlaying():Boolean { 68 | return soundIsPlaying; 69 | } 70 | 71 | public function setSoundVolume(volume:Number):void { 72 | if (volume != 0.00) { 73 | volume = Math.max(volume, 0.00); 74 | volume = Math.min(volume, 1.00); 75 | 76 | soundTransform.volume = volume; 77 | 78 | soundEnabled = true; 79 | } 80 | else { 81 | soundTransform.volume = 0.00; 82 | soundEnabled = false; 83 | } 84 | 85 | 86 | if ( soundIsPlaying == false ) return; 87 | else try{ 88 | var soundPosition:Number = soundChannel.position; 89 | soundChannel.stop(); 90 | soundChannel = sound.play(soundPosition, 0, soundTransform); 91 | } 92 | catch(e:Error){ trace("Error - turning on sound"); } 93 | 94 | } 95 | 96 | public function getSoundVolume():Number { 97 | return soundTransform.volume; 98 | } 99 | 100 | 101 | // BASE 102 | public function playSounds(soundsArray:Array):void { 103 | this.soundsArray = soundsArray; 104 | 105 | if (soundIsPlaying == true) { 106 | soundPlayComplete(); 107 | } 108 | 109 | playNextSound(); 110 | } 111 | 112 | public function playSound(url:String):void { 113 | playSounds(new Array(url)); 114 | } 115 | 116 | public function stopPlaying():void { 117 | soundsArray = []; 118 | 119 | if( soundIsPlaying == true ) 120 | soundPlayComplete(null); 121 | } 122 | 123 | 124 | // SOUND HANDLING 125 | private function playNextSound():void { 126 | if (soundsArray.length == 0) return; 127 | 128 | var url:String = soundsArray[0]; 129 | soundsArray.splice(0, 1); 130 | 131 | try { 132 | var urlReq:URLRequest = new URLRequest(url); 133 | 134 | sound = new Sound(); 135 | sound.addEventListener(Event.COMPLETE, soundLoaded); 136 | sound.addEventListener(IOErrorEvent.IO_ERROR, soundLoadError); 137 | sound.load(urlReq); 138 | } catch (error:Error){ 139 | return; 140 | } 141 | } 142 | 143 | private function soundLoaded(event:Event):void { 144 | sound.removeEventListener(Event.COMPLETE, soundLoaded); 145 | 146 | preparePhonemeBuffer(); 147 | 148 | try { 149 | soundChannel = sound.play(0,0, soundTransform); 150 | } catch(error:Error){ 151 | soundChannel.stop(); 152 | return; 153 | } 154 | 155 | soundIsPlaying = true; 156 | 157 | dispatchEventTimer = new Timer(dispatchEventDelay, 0); 158 | dispatchEventTimer.addEventListener(TimerEvent.TIMER, dispatchSoundEvent); 159 | dispatchEventTimer.start(); 160 | 161 | recognizePhonemeTimer = new Timer(LipsyncSettings.recognizePhonemeDelay, 0); 162 | recognizePhonemeTimer.addEventListener(TimerEvent.TIMER, recognizeLipsyncEvent); 163 | recognizePhonemeTimer.start(); 164 | 165 | soundChannel.addEventListener(Event.SOUND_COMPLETE, soundPlayComplete); 166 | 167 | var soundEvent:LipsyncEvent = new LipsyncEvent(LipsyncEvent.PLAYING_START); 168 | dispatchEvent(soundEvent); 169 | 170 | dispatchSoundEvent(null); 171 | } 172 | 173 | private function soundLoadError(event:Event):void { 174 | sound.removeEventListener(IOErrorEvent.IO_ERROR, soundLoadError); 175 | playNextSound(); 176 | } 177 | 178 | private function soundPlayComplete(event:Event = null):void { 179 | soundChannel.removeEventListener(Event.SOUND_COMPLETE, soundPlayComplete); 180 | dispatchEventTimer.removeEventListener(TimerEvent.TIMER, dispatchSoundEvent); 181 | recognizePhonemeTimer.removeEventListener(TimerEvent.TIMER, recognizeLipsyncEvent); 182 | 183 | dispatchEventTimer.stop(); 184 | 185 | soundChannel.stop(); 186 | sound = null; 187 | 188 | if (soundsArray.length != 0) 189 | playNextSound(); 190 | else { 191 | soundIsPlaying = false; 192 | 193 | var soundEvent:LipsyncEvent = new LipsyncEvent(LipsyncEvent.PLAYING_COMPLETE); 194 | dispatchEvent(soundEvent); 195 | } 196 | } 197 | 198 | private function dispatchSoundEvent(event:Event):void { 199 | var soundEvent:LipsyncEvent = new LipsyncEvent(LipsyncEvent.AMPLITUDE_SAMPLE); 200 | soundEvent.amplitude = 0.5 * (soundChannel.leftPeak + soundChannel.rightPeak); 201 | dispatchEvent(soundEvent); 202 | } 203 | 204 | 205 | // LIPSYNC 206 | private function preparePhonemeBuffer():void { 207 | phonemePositionStep = LipsyncSettings.recognizePhonemeDelay * LipsyncSettings.samplingRateMS; 208 | 209 | phonemeBufferArray = new Array(); 210 | for (var position:int = phonemePositionStep; position < (sound.length * LipsyncSettings.samplingRateMS); position+=phonemePositionStep) { 211 | var phoneme:LipsyncBufferItem = recognizePhoneme(position); 212 | phonemeBufferArray.push(phoneme); 213 | } 214 | 215 | setupBuffer(); 216 | } 217 | 218 | private function setupBuffer():void { 219 | var f:LipsyncBufferItem; 220 | var c:LipsyncBufferItem; 221 | var s:LipsyncBufferItem; 222 | 223 | for (var i:int = 1; i < phonemeBufferArray.length - 1; i++) { 224 | f = phonemeBufferArray[i - 1]; 225 | c = phonemeBufferArray[i]; 226 | s = phonemeBufferArray[i + 1]; 227 | 228 | if (c.phoneme != f.phoneme && c.phoneme != s.phoneme) { 229 | if (s.phoneme != Phoneme.NULL) { 230 | c.phoneme = s.phoneme; 231 | } else if (f.phoneme != Phoneme.NULL) { 232 | c.phoneme = f.phoneme; 233 | } 234 | } 235 | } 236 | 237 | } 238 | 239 | private function recognizeLipsyncEvent(event:Event):void { 240 | var position:int = (soundChannel.position * LipsyncSettings.samplingRateMS); 241 | var item:LipsyncBufferItem = null; 242 | 243 | do { 244 | item = phonemeBufferArray.shift(); 245 | if (item == null) item = new LipsyncBufferItem(); 246 | } while ((item.position + phonemePositionStep) < position); 247 | 248 | //item = recognizePhoneme(position); 249 | 250 | var soundEvent:LipsyncEvent = new LipsyncEvent(LipsyncEvent.PHONEME); 251 | soundEvent.phoneme = item.phoneme; 252 | soundEvent.amplitude = item.energy; 253 | dispatchEvent(soundEvent); 254 | } 255 | 256 | private function recognizePhoneme(position:int):LipsyncBufferItem { 257 | var item:LipsyncBufferItem = extractSound(position); 258 | 259 | if (item.energy >= LipsyncSettings.activationEnergy) { 260 | var K:Vector. = LP.analyze(item.samples); 261 | var result:Vector. = network.run(K); 262 | item.phoneme = PhonemeCollection.arrayToPhoneme(result); 263 | } 264 | 265 | return item; 266 | } 267 | 268 | private function extractSound(position:int):LipsyncBufferItem { 269 | var buffer:ByteArray = new ByteArray(); 270 | sound.extract(buffer, LipsyncSettings.windowLength * LipsyncSettings.samplingRateMS, position); 271 | buffer.position = 0; 272 | 273 | var offset:int = 4 * (LipsyncSettings.samplingDecimate + 1); 274 | var samples:Vector. = new Vector.(); 275 | 276 | var energy:Number = 0.0; 277 | var sample:Number = 0.0; 278 | while (buffer.bytesAvailable > 0) { 279 | sample = buffer.readFloat(); 280 | samples.push(sample); 281 | buffer.position += offset; 282 | 283 | if (sample < 0) energy -= sample; 284 | else energy += sample; 285 | 286 | } 287 | energy /= samples.length; 288 | 289 | var item:LipsyncBufferItem = new LipsyncBufferItem(); 290 | item.position = position; 291 | item.energy = energy; 292 | item.samples = samples; 293 | 294 | return item; 295 | } 296 | 297 | } 298 | } --------------------------------------------------------------------------------