cancel
Showing results for 
Search instead for 
Did you mean: 

Record and playback avatar animations

multimedial
Explorer
Hello, 

so since I finally got some time today and since the topic is pretty straightforward, I hereby post my first take on recording and playing back (canned) avatar animations in Unity. 

The script is super simple, and should be added to the RemoteLoopback Unity Sample scene. It writes out a file with animation data (default is "C:\temp\Recording.avs", see the script paramters). A boolean variable called "Recording" switches between recording (=True) and playback (=False). 

I want to flesh this out  in order to use Avatars as trainers and holograms.

The next stop is to be able to record voice data from the microphone to a WAV file (code is available on the net), but ideally with added cue-points in the WAV in order to sync up the animation frames with the sound. 

Also changing the default avatar would be nice, without having to provide an avatar ID of a recorded profile. Also, getting everything nicer and smoother, and so on and so forth, eventually I'd also like to add LZ4 compression to make the animation frames smaller. 

For those who don't want to record anything, I provided a sample demo file which can be downloaded at 

http://old.multimedial.de/share/Recording.avs (580Kb)

Download it, then either place it in the default directory (C:\temp\) or point the property called "F Name" (for fileName, I should rename that...) to the new place of the file. 

Enjoy, feedback is much appreciated. 

using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
using System.IO;


public class RemoteLoopbackManager : MonoBehaviour {

    public OvrAvatar LocalAvatar;
    public OvrAvatar LoopbackAvatar;
    public String fName = "C:\\temp\\Recording.avs";
    public Boolean recording = true;
    int offset = 0;
    float startTime;
    FileStream file;
    Dictionary<string, object> animationFrame;
    
    int packetSequence = 0;

// Use this for initialization
void Start () {

        LocalAvatar.RecordPackets = recording;
                
        if (recording)
        {   // recording mode
            LocalAvatar.PacketRecorded += OnLocalAvatarPacketRecorded;
            File.Delete(fName);
            file = new FileStream(fName, FileMode.CreateNew);
        }
        else
        {   // playback mode
            file = new FileStream(fName, FileMode.Open);
        }

        initPlayback();

    }
    
    void Update()
    {
        if (recording) return; // bail out, nothing to record

        //////////////////////////////////////////////////////
        // if we are still here, then we are playing back!
        //////////////////////////////////////////////////////
        if (animationFrame == null)
            animationFrame = ReadAnimationFrame();

        var currentTime = Time.time - startTime;
        var animationTime = (double)animationFrame["time"];

        if (animationTime > currentTime)
        {
            Debug.Log("Not playing yet " + animationTime + " " + currentTime);
            return;
        }


        PlaybackPacketData((byte[])animationFrame["data"]);
        // reset frame
        animationFrame = null;

    }


    Dictionary<string, object> ReadAnimationFrame()
    {
        /////////////////////////////////////////////////////////////////////////////////////////////
        // from hereon, we play back...
        /////////////////////////////////////////////////////////////////////////////////////////////
        // read frameSize
        /////////////////////////////////////////////////////////////////////////////////////////////
        var frameSize = new byte[sizeof(int)];
        offset += file.Read(frameSize, 0, frameSize.Length);

        /////////////////////////////////////////////////////////////////////////////////////////////
        // read frameTime
        /////////////////////////////////////////////////////////////////////////////////////////////
        var frameTime = new byte[sizeof(double)];
        offset += file.Read(frameTime, 0, frameTime.Length);

        var animationTime = BitConverter.ToDouble(frameTime, 0);

        var frameContent = new byte[BitConverter.ToInt32(frameSize, 0)];

        // read in frameSize bytes of data
        offset += file.Read(frameContent, 0, frameContent.Length);
        
        // wrap around    
        if (offset >= file.Length)
        {
            initPlayback();
            return null;
        }
        
        var animationFrame = new Dictionary<string, object>();
        animationFrame.Add("time", animationTime);
        animationFrame.Add("data", frameContent);

        return animationFrame;

    }

    void initPlayback()
    {
        // reset filepointer to the beginning of file
        file.Position = offset = 0;
        // reset time
        startTime = Time.time;
        // reset animationFrame
        animationFrame = null;
    }

    void OnLocalAvatarPacketRecorded(object sender, OvrAvatar.PacketEventArgs args)
    {
        using (MemoryStream outputStream = new MemoryStream())
        {

            BinaryWriter writer = new BinaryWriter(outputStream);
            
            writer.Write(packetSequence);
            args.Packet.Write(outputStream);
           
            var dataBytes = outputStream.ToArray();

            ////////////////////////////////////////////////////////////
            // write frameSize
            ////////////////////////////////////////////////////////////
            var frameSize = BitConverter.GetBytes(dataBytes.Length);
            file.Write(frameSize, 0, frameSize.Length);// write size of frame

            ////////////////////////////////////////////////////////////
            // write currentTime
            ////////////////////////////////////////////////////////////
            var diffTime = (double)(Time.time - startTime);
            var currentTime = BitConverter.GetBytes(diffTime);
            file.Write(currentTime, 0, currentTime.Length); // write relative time

            Debug.Log(diffTime);

            ////////////////////////////////////////////////////////////
            // write framedata
            ////////////////////////////////////////////////////////////
            file.Write(dataBytes, 0, dataBytes.Length);// write data

            file.Flush();
            
            SendPacketData(outputStream.ToArray());

        }
    }

    void SendPacketData(byte[] data)
    {
        // Loopback by just "receiving" the data
        ReceivePacketData(data);
    }

    void ReceivePacketData(byte[] data)
    {
        using (MemoryStream inputStream = new MemoryStream(data))
        {
            BinaryReader reader = new BinaryReader(inputStream);
            int sequence = reader.ReadInt32();
            OvrAvatarPacket packet = OvrAvatarPacket.Read(inputStream);
            LoopbackAvatar.GetComponent<OvrAvatarRemoteDriver>().QueuePacket(sequence, packet);
        }
    }

    void PlaybackPacketData(byte[] data)
    {
        using (MemoryStream inputStream = new MemoryStream(data))
        {
            BinaryReader reader = new BinaryReader(inputStream);
            LoopbackAvatar.GetComponent<OvrAvatarRemoteDriver>().QueuePacket(reader.ReadInt32(), OvrAvatarPacket.Read(inputStream));
        }
    }
}
10 REPLIES 10

multimedial
Explorer
Still looking forward to your feedback... I am looking forward to lip and face animations for the avatar sdk. Any news when this will be coming? I am looking into recording and playback sound. 

Also, I would like to be able to show other avatars other than the default one if possible. Right now, one can only show preconfigured avatars....

multimedial
Explorer
Hi innerspaceapp, 

would you care to elaborate what changes you did? I haven't looked into the new SDK yet, this is just a side project for me. We would like to turn it into an asset for the Unity Asset Store if possible. 

I am mostly interested in getting other avatars to work other than the standard one. We also got a lot of different ideas on how to flesh this out, like encrypting the performance and multiplexing the whole data. 

As far as I can tell, there also seems to be audio support in the works?

I'd love to chat with someone at Oculus about the opportunity that this offers - my feeling is that there are only a few modifications necessary to turn this into a valuable training tool in VR.

KevinDerksen199
Honored Guest
Hi @innerspaceapp,

I wanted to thank you for this amazing contribution, it is exactly what I Am looking for. I am working on games, and I need a tool to explain some mechanics simply to the user, by recording the required movements I am saving a lot of time.

Thanks! 

owenwp
Expert Protege
I was planning to implement something just like this since my app will also allow you to play with people who do not have VR, plus local AI, and I wanted to do an acted out tutorial sequence as well.  Having a recorded avatar agent that can physically point things out to you seems like a great feature.

multimedial
Explorer
This is an on and off side project for me. We have audio sound recording in the works, with embedded cue points that allow for triggering events during playback. As for facial animations, I haven't looked into it yet.

hazelshi
Honored Guest
Hi!
Thanks for your contribution, I'm currently doing the pretty same thing.

to meet the new SDK, I changed my playbackPacketData into:

void PlaybackPacketData(byte[] data)
{
using (MemoryStream inputStream = new MemoryStream(data))
{
Debug.Log("Playing");
BinaryReader reader = new BinaryReader(inputStream);
int sequence = reader.ReadInt32();
int size = reader.ReadInt32();
byte[] sdkData = reader.ReadBytes(size);
IntPtr packet = CAPI.ovrAvatarPacket_Read((UInt32)data.Length, sdkData);
LoopbackAvatar.GetComponent<OvrAvatarRemoteDriver>().QueuePacket(sequence, new OvrAvatarPacket { ovrNativePacket = packet });
}
}

But I don'k know for what reason, the avatar won't move, could you help?

GabrielVA
Honored Guest
Working code as of Unity 2017.1.1f1 + SDK 3.4.1 (I think, at least it's what's written on LICENSE.txt). This is not optimized or organized at all. It's the bare minimum I wrote to make it work.

Made on top of RemoteLoopbackManager and inspired by OP.

To record: start the scene with the "record" box toggled and untoggle it before stopping the scene
To playback: start the scene with the "playback" box toggled or toggle it after starting

using UnityEngine;
using System.Collections;
using System;
using System.IO;
using Oculus.Avatar;
using System.Runtime.InteropServices;
using System.Collections.Generic;

public class Playback : MonoBehaviour
{
[Serializable]
class PacketsFile
{
public LinkedList<Packet> packetList;
};

[Serializable]
class Packet
{
public byte[] PacketData;
};

public OvrAvatar LocalAvatar;
public OvrAvatar LoopbackAvatar;

private int PacketSequence = 0;

LinkedList<Packet> packetQueue = new LinkedList<Packet>();

public bool record = false;
bool _lastRecord = false;
public bool playback = false;
public string fileName = "C:\\temp\\Recording.avs";
LinkedList<Packet> _recordedQueue = new LinkedList<Packet>();


void Start()
{
LocalAvatar.RecordPackets = true;
LocalAvatar.PacketRecorded += OnLocalAvatarPacketRecorded;
_lastRecord = record;
if (playback)
{
ReadFile();
}
}

void OnLocalAvatarPacketRecorded(object sender, OvrAvatar.PacketEventArgs args)
{
if (playback)
{
LinkedListNode<Packet> packet = _recordedQueue.First;
if (packet == null)
{
ReadFile();
packet = _recordedQueue.First;
}
SendPacketData(packet.Value.PacketData);
_recordedQueue.RemoveFirst();

}

else using (MemoryStream outputStream = new MemoryStream())
{
BinaryWriter writer = new BinaryWriter(outputStream);

var size = CAPI.ovrAvatarPacket_GetSize(args.Packet.ovrNativePacket);
byte[] data = new byte[size];
CAPI.ovrAvatarPacket_Write(args.Packet.ovrNativePacket, size, data);

writer.Write(PacketSequence++);
writer.Write(size);
writer.Write(data);

SendPacketData(outputStream.ToArray());
}
}

void Update()
{
if (!record && _lastRecord)
{
WriteToFile();
_lastRecord = record;
}

if (packetQueue.Count > 0)
{
List<Packet> deadList = new List<Packet>();
foreach (Packet packet in packetQueue)
{
ReceivePacketData(packet.PacketData);
deadList.Add(packet);
}

foreach (var packet in deadList)
{
packetQueue.Remove(packet);
}
}
}

void SendPacketData(byte[] data)
{
Packet packet = new Packet();
packet.PacketData = data;

packetQueue.AddLast(packet);
if (record && !playback) _recordedQueue.AddLast(packet);
}

void ReceivePacketData(byte[] data)
{
using (MemoryStream inputStream = new MemoryStream(data))
{
BinaryReader reader = new BinaryReader(inputStream);
int sequence = reader.ReadInt32();

int size = reader.ReadInt32();
byte[] sdkData = reader.ReadBytes(size);

IntPtr packet = CAPI.ovrAvatarPacket_Read((UInt32)data.Length, sdkData);
LoopbackAvatar.GetComponent<OvrAvatarRemoteDriver>().QueuePacket(sequence, new OvrAvatarPacket { ovrNativePacket = packet });
}
}

void WriteToFile()
{
using (Stream stream = File.Open(fileName, FileMode.Create))
{
new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter().Serialize(stream, new PacketsFile { packetList = _recordedQueue });
}
Debug.Log("File written");
}

void ReadFile()
{
using (Stream stream = File.Open(fileName, FileMode.Open))
{
_recordedQueue = (new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter().Deserialize(stream) as PacketsFile).packetList;
}
Debug.Log("File read");
}
}

kidnee
Protege
This works great. Now I need to work out a clean way to record objects the avatar interacts with!

zapfanatic
Honored Guest
Hi guys,

in case someone needs avatar recording functionality in 2019, you are welcome to check out our (VR Labs) free tool for recording and playing back Oculus avatars:

https://medium.com/telerik-ar-vr/motion-recording-for-oculus-avatars-game-objects-in-unity-with-vr-l...