Forum Discussion
chndevti
3 years agoHonored Guest
Assigning Texture to OVROverlay during runtime not working in build
Hi everyone.
I am working on a VR app that has dynamic text content fetched from an API. There is a loading screen in the app that uses the OVROverlay (Quad) to assign a texture (RenderTexture) created from a TextMeshPro component in runtime. This works fine in PCVR while I am testing using Quest Link, but doesn't work in an actual build. I have tried few things to understand why it doesn't work. I tried converting the RenderTexture to a Texture2D, but even that doesn't work. I verified that the texture is indeed correct by saving it as PNG to the PersistentPath. I also tried using the OverrideOverlayTextureInfo method (in Update), but it didn't work (not sure if I did it correctly). I have attached a screenshot of the OVROverlay Component from the inspector and you can find the code I used to achieve this below (Commented lines include some of the stuff I tried to fix this issue)
Thanks in advance!
Device: Quest 2
using System;
using System.Collections;
using UnityEngine;
using TMPro;
namespace Utility
{
[RequireComponent(typeof(OVROverlay))]
public class OVRText2Texture : MonoBehaviour
{
private static RenderTexture rt;
private static IntPtr rtPointer;
[SerializeField] private TextMeshPro text;
[SerializeField] private Vector2 textureSize = new Vector2(1024, 1024);
[SerializeField] private TMP_FontAsset hebrewFontAsset;
[SerializeField] private TMP_FontAsset arabicFontAsset;
[SerializeField] private TMP_FontAsset defaultFontAsset;
private void OnEnable() => StartCoroutine(ExecuteCoroutine());
private IEnumerator ExecuteCoroutine()
{
yield return new WaitForEndOfFrame();
Execute();
}
[ContextMenu("Execute")]
public void Execute()
{
var size = new Vector2(textureSize.x, textureSize.y);
rt = RenderTexture.GetTemporary((int)size.x, (int)size.y, 24, RenderTextureFormat.ARGB32);
var proj = Matrix4x4.Ortho(-size.x / 2.0f, size.x / 2.0f, -size.x / 2.0f, size.y / 2.0f, -1, 100);
GL.LoadProjectionMatrix(proj);
RenderTexture currentActiveRT = RenderTexture.active;
Graphics.SetRenderTarget(rt);
GL.Clear(true, true, new UnityEngine.Color(0f, 0f, 0f, 0f));
text.renderer.sharedMaterial.SetPass(0);
var camTransform = Camera.main.transform;
Graphics.DrawMeshNow(text.mesh, Matrix4x4.TRS(camTransform.position, camTransform.rotation, camTransform.localScale));
RenderTexture.active = currentActiveRT;
rtPointer = rt.GetNativeTexturePtr();
// var t2D = new Texture2D(rt.width, rt.height, rt.graphicsFormat, 0, UnityEngine.Experimental.Rendering.TextureCreationFlags.None);
// RenderTexture.active = rt;
// t2D.ReadPixels(new Rect(0, 0, rt.width, rt.height), 0, 0);
// t2D.Apply();
// RenderTexture.active = null;
// File.WriteAllBytes(Application.persistentDataPath + "loading.png", t2D.EncodeToPNG());
GetComponent<OVROverlay>().textures = new Texture[2] { rt, rt };
// GetComponent<OVROverlay>().UpdateTextureRectMatrix();
// GetComponent<OVROverlay>().OverrideOverlayTextureInfo(rt, rt.GetNativeTexturePtr(), UnityEngine.XR.XRNode.LeftEye);
}
}
}
No RepliesBe the first to reply
Quick Links
- Horizon Developer Support
- Quest User Forums
- Troubleshooting Forum for problems with a game or app
- Quest Support for problems with your device
Other Meta Support
Related Content
- 2 months ago
- 10 months ago
- 9 months ago
- 5 months ago