multimedial
9 years agoExplorer
Record and playback avatar animations
Hello,
so since I finally got some time today and since the topic is pretty straightforward, I hereby post my first take on recording and playing back (canned) avatar animations in Unity.
The script is super simple, and should be added to the RemoteLoopback Unity Sample scene. It writes out a file with animation data (default is "C:\temp\Recording.avs", see the script paramters). A boolean variable called "Recording" switches between recording (=True) and playback (=False).
I want to flesh this out in order to use Avatars as trainers and holograms.
The next stop is to be able to record voice data from the microphone to a WAV file (code is available on the net), but ideally with added cue-points in the WAV in order to sync up the animation frames with the sound.
Also changing the default avatar would be nice, without having to provide an avatar ID of a recorded profile. Also, getting everything nicer and smoother, and so on and so forth, eventually I'd also like to add LZ4 compression to make the animation frames smaller.
For those who don't want to record anything, I provided a sample demo file which can be downloaded at
http://old.multimedial.de/share/Recording.avs (580Kb)
Download it, then either place it in the default directory (C:\temp\) or point the property called "F Name" (for fileName, I should rename that...) to the new place of the file.
Enjoy, feedback is much appreciated.
so since I finally got some time today and since the topic is pretty straightforward, I hereby post my first take on recording and playing back (canned) avatar animations in Unity.
The script is super simple, and should be added to the RemoteLoopback Unity Sample scene. It writes out a file with animation data (default is "C:\temp\Recording.avs", see the script paramters). A boolean variable called "Recording" switches between recording (=True) and playback (=False).
I want to flesh this out in order to use Avatars as trainers and holograms.
The next stop is to be able to record voice data from the microphone to a WAV file (code is available on the net), but ideally with added cue-points in the WAV in order to sync up the animation frames with the sound.
Also changing the default avatar would be nice, without having to provide an avatar ID of a recorded profile. Also, getting everything nicer and smoother, and so on and so forth, eventually I'd also like to add LZ4 compression to make the animation frames smaller.
For those who don't want to record anything, I provided a sample demo file which can be downloaded at
http://old.multimedial.de/share/Recording.avs (580Kb)
Download it, then either place it in the default directory (C:\temp\) or point the property called "F Name" (for fileName, I should rename that...) to the new place of the file.
Enjoy, feedback is much appreciated.
using UnityEngine;using System.Collections;using System.Collections.Generic;using System;using System.IO;
public class RemoteLoopbackManager : MonoBehaviour {
public OvrAvatar LocalAvatar; public OvrAvatar LoopbackAvatar; public String fName = "C:\\temp\\Recording.avs"; public Boolean recording = true; int offset = 0; float startTime; FileStream file; Dictionary<string, object> animationFrame; int packetSequence = 0;
// Use this for initialization void Start () {
LocalAvatar.RecordPackets = recording; if (recording) { // recording mode LocalAvatar.PacketRecorded += OnLocalAvatarPacketRecorded; File.Delete(fName); file = new FileStream(fName, FileMode.CreateNew); } else { // playback mode file = new FileStream(fName, FileMode.Open); }
initPlayback();
} void Update() { if (recording) return; // bail out, nothing to record
////////////////////////////////////////////////////// // if we are still here, then we are playing back! ////////////////////////////////////////////////////// if (animationFrame == null) animationFrame = ReadAnimationFrame();
var currentTime = Time.time - startTime; var animationTime = (double)animationFrame["time"];
if (animationTime > currentTime) { Debug.Log("Not playing yet " + animationTime + " " + currentTime); return; }
PlaybackPacketData((byte[])animationFrame["data"]); // reset frame animationFrame = null;
}
Dictionary<string, object> ReadAnimationFrame() { ///////////////////////////////////////////////////////////////////////////////////////////// // from hereon, we play back... ///////////////////////////////////////////////////////////////////////////////////////////// // read frameSize ///////////////////////////////////////////////////////////////////////////////////////////// var frameSize = new byte[sizeof(int)]; offset += file.Read(frameSize, 0, frameSize.Length);
///////////////////////////////////////////////////////////////////////////////////////////// // read frameTime ///////////////////////////////////////////////////////////////////////////////////////////// var frameTime = new byte[sizeof(double)]; offset += file.Read(frameTime, 0, frameTime.Length);
var animationTime = BitConverter.ToDouble(frameTime, 0);
var frameContent = new byte[BitConverter.ToInt32(frameSize, 0)];
// read in frameSize bytes of data offset += file.Read(frameContent, 0, frameContent.Length); // wrap around if (offset >= file.Length) { initPlayback(); return null; } var animationFrame = new Dictionary<string, object>(); animationFrame.Add("time", animationTime); animationFrame.Add("data", frameContent);
return animationFrame;
}
void initPlayback() { // reset filepointer to the beginning of file file.Position = offset = 0; // reset time startTime = Time.time; // reset animationFrame animationFrame = null; }
void OnLocalAvatarPacketRecorded(object sender, OvrAvatar.PacketEventArgs args) { using (MemoryStream outputStream = new MemoryStream()) {
BinaryWriter writer = new BinaryWriter(outputStream); writer.Write(packetSequence); args.Packet.Write(outputStream); var dataBytes = outputStream.ToArray();
//////////////////////////////////////////////////////////// // write frameSize //////////////////////////////////////////////////////////// var frameSize = BitConverter.GetBytes(dataBytes.Length); file.Write(frameSize, 0, frameSize.Length);// write size of frame
//////////////////////////////////////////////////////////// // write currentTime //////////////////////////////////////////////////////////// var diffTime = (double)(Time.time - startTime); var currentTime = BitConverter.GetBytes(diffTime); file.Write(currentTime, 0, currentTime.Length); // write relative time
Debug.Log(diffTime);
//////////////////////////////////////////////////////////// // write framedata //////////////////////////////////////////////////////////// file.Write(dataBytes, 0, dataBytes.Length);// write data
file.Flush(); SendPacketData(outputStream.ToArray());
} }
void SendPacketData(byte[] data) { // Loopback by just "receiving" the data ReceivePacketData(data); }
void ReceivePacketData(byte[] data) { using (MemoryStream inputStream = new MemoryStream(data)) { BinaryReader reader = new BinaryReader(inputStream); int sequence = reader.ReadInt32(); OvrAvatarPacket packet = OvrAvatarPacket.Read(inputStream); LoopbackAvatar.GetComponent<OvrAvatarRemoteDriver>().QueuePacket(sequence, packet); } }
void PlaybackPacketData(byte[] data) { using (MemoryStream inputStream = new MemoryStream(data)) { BinaryReader reader = new BinaryReader(inputStream); LoopbackAvatar.GetComponent<OvrAvatarRemoteDriver>().QueuePacket(reader.ReadInt32(), OvrAvatarPacket.Read(inputStream)); } }}