EasyAR 3.1.0 worked properly in older versions of Unity. The webcam loaded as expected, but since I've updated to Unity 2021.1.18f1, the webcam only shows a black screen. I tested it with the samples provided here: https://www.easyar.com/view/downloadHistory.html with the same result: the webcam texture stays black.
To make sure, that it's not a problem with my webcam, I tried this example here: https://community.theta360.guide/t/simplest-webcam-test-in-unity/516/3 and was able to load the webcam texture properly.
I also tried using another webcam with EasyAR, but the EasyAR webcam texture stays black.
I have no idea what the problem is, since it worked before without any problems. Has anything changed between Unity 2020.2 and 2021.1 in terms of loading the webcam?
**Here is the CameraImageRenderer.cs script:**
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
using UnityEngine.Rendering;
namespace easyar
{
[RequireComponent(typeof(RenderCameraController))]
public class CameraImageRenderer : MonoBehaviour
{
private RenderCameraController controller;
private CommandBuffer commandBuffer;
private CameraImageMaterial arMaterial;
private Material material;
private CameraParameters cameraParameters;
private bool renderImageHFlip;
private UserRequest request;
public event Action OnFrameRenderUpdate;
private event Action TargetTextureChange;
protected virtual void Awake()
{
controller = GetComponent();
arMaterial = new CameraImageMaterial();
}
protected virtual void OnEnable()
{
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
}
protected virtual void OnDisable()
{
RemoveCommandBuffer(controller ? controller.TargetCamera : null);
}
protected virtual void OnDestroy()
{
arMaterial.Dispose();
if (request != null) { request.Dispose(); }
if (cameraParameters != null) { cameraParameters.Dispose(); }
}
public void RequestTargetTexture(Action targetTextureEventHandler)
{
if (request == null)
{
request = new UserRequest();
}
TargetTextureChange += targetTextureEventHandler;
RenderTexture texture;
request.UpdateTexture(controller ? controller.TargetCamera : null, material, out texture);
if (TargetTextureChange != null && texture)
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
public void DropTargetTexture(Action targetTextureEventHandler)
{
if (controller)
{
targetTextureEventHandler(controller.TargetCamera, null);
}
TargetTextureChange -= targetTextureEventHandler;
if (TargetTextureChange == null && request != null)
{
request.RemoveCommandBuffer(controller ? controller.TargetCamera : null);
request.Dispose();
request = null;
}
}
public void OnAssemble(ARSession session)
{
session.FrameChange += OnFrameChange;
session.FrameUpdate += OnFrameUpdate;
}
public void SetHFilp(bool hFlip)
{
renderImageHFlip = hFlip;
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
material = null;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null)
{
request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
return;
}
if (!enabled && request == null && OnFrameRenderUpdate == null)
{
return;
}
using (var frame = outputFrame.inputFrame())
{
using (var image = frame.image())
{
var materialUpdated = arMaterial.UpdateByImage(image);
if (material != materialUpdated)
{
material = materialUpdated;
UpdateCommandBuffer(controller ? controller.TargetCamera : null, material);
if (request != null) { request.UpdateCommandBuffer(controller ? controller.TargetCamera : null, material); }
}
}
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
if (!controller || (!enabled && request == null && OnFrameRenderUpdate == null))
{
return;
}
if (request != null)
{
RenderTexture texture;
if (TargetTextureChange != null && request.UpdateTexture(controller.TargetCamera, material, out texture))
{
TargetTextureChange(controller.TargetCamera, texture);
}
}
if (!material)
{
return;
}
bool cameraFront = cameraParameters.cameraDeviceType() == CameraDeviceType.Front ? true : false;
var imageProjection = cameraParameters.imageProjection(controller.TargetCamera.aspect, EasyARController.Instance.Display.Rotation, false, cameraFront? !renderImageHFlip : renderImageHFlip).ToUnityMatrix();
if (renderImageHFlip)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
imageProjection = translateMatrix * imageProjection;
}
material.SetMatrix("_TextureRotation", imageProjection);
if (OnFrameRenderUpdate != null)
{
OnFrameRenderUpdate(material, new Vector2(Screen.width * controller.TargetCamera.rect.width, Screen.height * controller.TargetCamera.rect.height));
}
}
private void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (enabled)
{
commandBuffer = new CommandBuffer();
commandBuffer.Blit(null, BuiltinRenderTextureType.CameraTarget, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
private void RemoveCommandBuffer(Camera cam)
{
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
private class UserRequest : IDisposable
{
private RenderTexture texture;
private CommandBuffer commandBuffer;
~UserRequest()
{
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
}
public void Dispose()
{
if (commandBuffer != null) { commandBuffer.Dispose(); }
if (texture) { Destroy(texture); }
GC.SuppressFinalize(this);
}
public bool UpdateTexture(Camera cam, Material material, out RenderTexture tex)
{
tex = texture;
if (!cam || !material)
{
if (texture)
{
Destroy(texture);
tex = texture = null;
return true;
}
return false;
}
int w = (int)(Screen.width * cam.rect.width);
int h = (int)(Screen.height * cam.rect.height);
if (texture && (texture.width != w || texture.height != h))
{
Destroy(texture);
}
if (texture)
{
return false;
}
else
{
texture = new RenderTexture(w, h, 0);
UpdateCommandBuffer(cam, material);
tex = texture;
return true;
}
}
public void UpdateCommandBuffer(Camera cam, Material material)
{
RemoveCommandBuffer(cam);
if (!cam || !material)
{
return;
}
if (texture)
{
commandBuffer = new CommandBuffer();
commandBuffer.Blit(null, texture, material);
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
}
public void RemoveCommandBuffer(Camera cam)
{
if (commandBuffer != null)
{
if (cam)
{
cam.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, commandBuffer);
}
commandBuffer.Dispose();
commandBuffer = null;
}
}
}
}
}
**Here is the VideoCameraDevice.cs script:**
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using System;
using UnityEngine;
namespace easyar
{
public class VideoCameraDevice : CameraSource
{
///
/// EasyAR Sense API. Accessible between DeviceCreated and DeviceClosed event if available.
///
public CameraDevice Device { get; private set; }
public CameraDeviceFocusMode FocusMode = CameraDeviceFocusMode.Continousauto;
public Vector2 CameraSize = new Vector2(1280, 960);
public CameraDeviceOpenMethod CameraOpenMethod = CameraDeviceOpenMethod.DeviceType;
[HideInInspector, SerializeField]
public CameraDeviceType CameraType = CameraDeviceType.Back;
[HideInInspector, SerializeField]
public int CameraIndex = 0;
[HideInInspector, SerializeField]
private CameraDevicePreference cameraPreference = CameraDevicePreference.PreferObjectSensing;
private CameraParameters parameters = null;
private bool willOpen;
public event Action DeviceCreated;
public event Action DeviceOpened;
public event Action DeviceClosed;
public enum CameraDeviceOpenMethod
{
DeviceType,
DeviceIndex,
}
public override int BufferCapacity
{
get
{
if (Device != null)
{
return Device.bufferCapacity();
}
return bufferCapacity;
}
set
{
bufferCapacity = value;
if (Device != null)
{
Device.setBufferCapacity(value);
}
}
}
public override bool HasSpatialInformation
{
get { return false; }
}
public CameraDevicePreference CameraPreference
{
get { return cameraPreference; }
// Switch to prefered FocusMode when switch CameraPreference.
// You can set other FocusMode after this, but the tracking results may differ.
set
{
cameraPreference = value;
switch (cameraPreference)
{
case CameraDevicePreference.PreferObjectSensing:
FocusMode = CameraDeviceFocusMode.Continousauto;
break;
case CameraDevicePreference.PreferSurfaceTracking:
FocusMode = CameraDeviceFocusMode.Medium;
break;
default:
break;
}
}
}
public CameraParameters Parameters
{
get
{
if (Device != null)
{
return Device.cameraParameters();
}
return parameters;
}
set
{
parameters = value;
}
}
protected override void OnEnable()
{
base.OnEnable();
if (Device != null)
{
Device.start();
}
}
protected override void Start()
{
if (!CameraDevice.isAvailable())
{
throw new UIPopupException(typeof(CameraDevice) + " not available");
}
base.Start();
}
protected override void OnDisable()
{
base.OnDisable();
if (Device != null)
{
Device.stop();
}
}
public override void Open()
{
willOpen = true;
CameraDevice.requestPermissions(EasyARController.Scheduler, (Action)((status, msg) =>
{
if (!willOpen)
{
return;
}
if (status != PermissionStatus.Granted)
{
throw new UIPopupException("Camera permission not granted");
}
Close();
Device = CameraDeviceSelector.createCameraDevice(CameraPreference);
if (DeviceCreated != null)
{
DeviceCreated();
}
bool openResult = false;
switch (CameraOpenMethod)
{
case CameraDeviceOpenMethod.DeviceType:
openResult = Device.openWithPreferredType(CameraType);
break;
case CameraDeviceOpenMethod.DeviceIndex:
openResult = Device.openWithIndex(CameraIndex);
break;
default:
break;
}
if (!openResult)
{
Debug.LogError("Camera open failed");
Device.Dispose();
Device = null;
return;
}
Device.setFocusMode(FocusMode);
Device.setSize(new Vec2I((int)CameraSize.x, (int)CameraSize.y));
if (parameters != null)
{
Device.setCameraParameters(parameters);
}
if (bufferCapacity != 0)
{
Device.setBufferCapacity(bufferCapacity);
}
if (sink != null)
Device.inputFrameSource().connect(sink);
if (DeviceOpened != null)
{
DeviceOpened();
}
if (enabled)
{
OnEnable();
}
}));
}
public override void Close()
{
willOpen = false;
if (Device != null)
{
OnDisable();
Device.close();
Device.Dispose();
if (DeviceClosed != null)
{
DeviceClosed();
}
Device = null;
}
}
public override void Connect(InputFrameSink val)
{
base.Connect(val);
if (Device != null)
{
Device.inputFrameSource().connect(val);
}
}
}
}
**Here is the RenderCameraController.cs script:**
//================================================================================================================================
//
// Copyright (c) 2015-2019 VisionStar Information Technology (Shanghai) Co., Ltd. All Rights Reserved.
// EasyAR is the registered trademark or trademark of VisionStar Information Technology (Shanghai) Co., Ltd in China
// and other countries for the augmented reality technology developed by VisionStar Information Technology (Shanghai) Co., Ltd.
//
//================================================================================================================================
using UnityEngine;
namespace easyar
{
public class RenderCameraController : MonoBehaviour
{
public Camera TargetCamera;
public RenderCameraParameters ExternalParameters;
private CameraImageRenderer cameraRenderer;
private Matrix4x4 currentDisplayCompensation = Matrix4x4.identity;
private CameraParameters cameraParameters;
private bool projectHFilp;
private ARSession arSession;
protected virtual void OnEnable()
{
if (arSession)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
}
protected virtual void OnDisable()
{
if (arSession)
{
arSession.FrameChange -= OnFrameChange;
arSession.FrameUpdate -= OnFrameUpdate;
}
}
protected virtual void OnDestroy()
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
if (ExternalParameters)
{
ExternalParameters.Dispose();
}
}
internal void OnAssemble(ARSession session)
{
arSession = session;
if (!TargetCamera)
{
TargetCamera = session.Assembly.Camera;
}
if (enabled)
{
arSession.FrameChange += OnFrameChange;
arSession.FrameUpdate += OnFrameUpdate;
}
cameraRenderer = GetComponent();
if (cameraRenderer)
{
cameraRenderer.OnAssemble(session);
}
}
internal void SetProjectHFlip(bool hFlip)
{
projectHFilp = hFlip;
}
internal void SetRenderImageHFilp(bool hFlip)
{
if (cameraRenderer)
{
cameraRenderer.SetHFilp(hFlip);
}
}
private void OnFrameChange(OutputFrame outputFrame, Matrix4x4 displayCompensation)
{
if (outputFrame == null)
{
return;
}
currentDisplayCompensation = displayCompensation.inverse;
using (var frame = outputFrame.inputFrame())
{
if (cameraParameters != null)
{
cameraParameters.Dispose();
}
cameraParameters = frame.cameraParameters();
if (ExternalParameters)
{
ExternalParameters.Build(cameraParameters);
}
}
}
private void OnFrameUpdate(OutputFrame outputFrame)
{
var camParameters = ExternalParameters ? ExternalParameters.Parameters : cameraParameters;
var projection = camParameters.projection(TargetCamera.nearClipPlane, TargetCamera.farClipPlane, TargetCamera.aspect, EasyARController.Instance.Display.Rotation, false, false).ToUnityMatrix();
if (ExternalParameters)
{
projection *= ExternalParameters.Transform;
}
projection *= currentDisplayCompensation;
if (projectHFilp)
{
var translateMatrix = Matrix4x4.identity;
translateMatrix.m00 = -1;
projection = translateMatrix * projection;
}
TargetCamera.projectionMatrix = projection;
GL.invertCulling = projectHFilp;
}
}
}
I am grateful for any tip or help, because I am already getting really desperate how to solve the problem.
↧