Unity3D allows the ability to render webcam stream as a texture using the class WebCamTexture. While this was useful to render raw webcam data, my application required doing some OpenCV processing on the webcam data before rendering it in the scene.
The default data type to store image data with EmguCV is Image<TColor, TDepth>. In the Unity land however, textures do not have any parsing capability for Image<TColor, TDepth> but only allow Color32[] data to be set as textures. A quick google search did not help much to find ways of converting Image<TColor, TDepth> to Color32[]. I found some code that uses Bitmap and MemoryStream but couldn't get it to work.
Eventually, I found Texture2D.LoadRawTextureData() but couldn't find any good reference to use it with Image<TColor, TDepth>. After much playing around and some coffees later, I got it to work. Here is my code:
The default data type to store image data with EmguCV is Image<TColor, TDepth>. In the Unity land however, textures do not have any parsing capability for Image<TColor, TDepth> but only allow Color32[] data to be set as textures. A quick google search did not help much to find ways of converting Image<TColor, TDepth> to Color32[]. I found some code that uses Bitmap and MemoryStream but couldn't get it to work.
Eventually, I found Texture2D.LoadRawTextureData() but couldn't find any good reference to use it with Image<TColor, TDepth>. After much playing around and some coffees later, I got it to work. Here is my code:
using UnityEngine; using System.Collections; using Emgu.CV; using Emgu.Util; using Emgu.CV.Structure; using System; public class WebcamCaptureCV : MonoBehaviour { private int frameWidth; private int frameHeight; private Capture cvCapture; private Image<Bgr, byte> currentFrameBgr; private Image<Rgb, byte> currentFrameRgb; private Image<Bgra, byte> currentFrameBgra; private Texture2D tex; void Start () { cvCapture = new Capture(0); cvCapture.FlipVertical = true; //The image I am getting from webcam is flipped cvCapture.ImageGrabbed += ProcessFrame; frameWidth = (int) cvCapture.GetCaptureProperty (Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH); frameHeight = (int) cvCapture.GetCaptureProperty (Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT); tex = new Texture2D (frameWidth, frameHeight, TextureFormat.BGRA32, false); gameObject.renderer.material.mainTexture = tex; currentFrameBgr = new Image<Bgr, byte> (frameWidth, frameHeight); currentFrameRgb = new Image<Rgb, byte> (frameWidth, frameHeight); currentFrameBgra = new Image<Bgra, byte> (frameWidth, frameHeight); cvCapture.Start(); } private void ProcessFrame(object sender, EventArgs arg) { currentFrameBgr = cvCapture.RetrieveBgrFrame (); currentFrameRgb.Bytes = currentFrameBgr.Bytes; currentFrameBgra = currentFrameRgb.Convert<Bgra, byte>(); // showImageDebug<Rgb, Byte> (currentFrameRgb); } void Update () { tex.LoadRawTextureData(currentFrameBgra.Bytes); tex.Apply(); } void OnDestroy(){ cvCapture.Stop(); } private void showImageDebug<TColor, TDepth>(Image<TColor, TDepth> img, string windowName="Debug Window" ) where TColor : struct, IColor where TDepth : struct { CvInvoke.cvShowImage(windowName, img); CvInvoke.cvWaitKey(0); CvInvoke.cvDestroyWindow(windowName); } }