Unity capture information appear in front of the camera

using UnityEngine;
using System.Collections;
using OpenCvSharp;
using UnityEngine.UI;
using System.IO;

public class FaceDetection : MonoBehaviour
{
    public WebCamTexture cameraTexture;//摄像头对象
    public string cameraName = "";
    Texture2D rt;

    [Header("五官模型")]
    public GameObject FacePhoto;
    public GameObject EyeLeft;
    public GameObject EyeRight;
    public GameObject Nose;
    public GameObject Mouth;

    [Header("五官")]
    public Texture2D FaceTexture;
    public Texture2D EyeLeftTexture;
    public Texture2D EyeRightTexture;
    public Texture2D NoseTexture;
    the Texture2D MouthTexture public; 


    Private = BOOL DisplayRID to false; 
    static int = 320. mPreviewWidth; 
    static int mPreviewHeight = 240; 
    BOOL = State to false; 
    CascadeClassifier haarCascade; 
    WebCamDevice [] Devices; // description of the structure of the network camera apparatus. 
    TEMP the Texture2D; 
    // initialize to 
    void the Start () 
    { 
        RT = new new the Texture2D (mPreviewWidth, mPreviewHeight, TextureFormat.RGB565, to false); 
        TEMP = the Texture2D new new (mPreviewWidth, mPreviewHeight, TextureFormat.RGB565, to false); 
        StartCoroutine (the Test () ); // 

        // CascadeClassifier cascade classifier, can be used to detect the object
        = new new CascadeClassifier haarCascade (Application.streamingAssetsPath + "/haarcascades/haarcascade_frontalface_alt.xml");//haarcascade_frontalface_alt2.xml//haarcascade_frontalface_alt.xml 
        the yield return Application.RequestUserAuthorization (UserAuthorization.WebCam); // wait for user allow access
        
    } 

    Public void StartDetectionFace () 
    { 
        this.state = to true; 
    } 

    public void StopDetectionFace () 
    { 
        this.state = to false; 
    } 

    // Called the Update Once per IS Frame 
    void the Update () 
    { 

    } 


    // Open coroutine, acquired camera images data 
    the IEnumerator the Test () 
    { 
        IF (Application.HasUserAuthorization (UserAuthorization.WebCam)) // if the user is allowed to access, begins acquiring image    
        { 
            devices = WebCamTexture.devices; // first acquisition apparatus 
            cameraName devices = [0] .name; 
            the Debug. log ( "Camera:" + cameraName );
            cameraTexture = new WebCamTexture (cameraName, mPreviewWidth , mPreviewHeight, 15); // image is then acquired 
            cameraTexture.Play (); // Get started 
            DisplayRID = to true; 
        } 
    } 
    Mat haarResult; 
    byte [] BS; 

    // display the camera sees object 
    void OnGUI () 
    { 
        IF (DisplayRID) 
        { 
           //GUI.DrawTexture(new UnityEngine.Rect (0, 0, mPreviewWidth, mPreviewHeight), cameraTexture, ScaleMode.ScaleToFit); 

            IF (State) 
                BS = haarResult.ToBytes ( "PNG."); 
            {
                // recognition 
                haarResult = DetectFace (haarCascade, GetTexture2D (cameraTexture)); 

                // oadImage the PNG / JPG image texture loaded into the byte array. 
                rt.LoadImage (BS); 
                rt.Apply (); 

              //System.IO.File.WriteAllBytes(@"d:\temp\1.jpg ", BS); 

             //GUI.DrawTexture(new UnityEngine.Rect (mPreviewWidth , 0, mPreviewWidth, mPreviewHeight), RT, ScaleMode.StretchToFill); 

            } 
            the else 
            { 
                StopCam (); 
            } 

        } 

    } 

    Mat Result; 
    OpenCvSharp.Rect [] Faces; 
    Mat the src; 
    Mat Mat Gray new new = (); 
    Size = new new axes Size (); 
    Point Center = new new Point (); 
    /// <Summary> 
     
    /// </ Summary> 
    /// <param name = "Cascade"> </ param> 
    /// <Returns> </ Returns> 
    Private Mat DetectFace (CascadeClassifier Cascade, the Texture2D T) 
    { 
        the src = Mat.FromImageData (t.EncodeToPNG (), ImreadModes.Color); // read the image data and extracts the data Mat 
        result = src.Clone (); // copy the image 
        Cv2. CvtColor (src, gray, ColorConversionCodes.BGR2GRAY) ; // color space conversion 
        the src = null; 

        #region detect face 
        // face detection 
        // DetectMultiScale image can be detected in all of the face, and face with each vector save facial coordinates, size (represented by a rectangle) 
        // DetectMultiScale function is called by the classifier object (CascadeClassifier) 
        faces = cascade.DetectMultiScale (Gray,. 2F, 2 , HaarDetectionType.DoCannyPruning, new new Size (80, 80)); 
        Debug.Log ( "face COUNT:" + faces.Length); 
        // render all people detected face
        for (int i = 0; i < faces.Length; i++)
        {
            center.X = (int)(faces[i].X + faces[i].Width * 0.5);
            center.Y = (int)(faces[i].Y + faces[i].Height * 0.5);
            axes.Width = (int)(faces[i].Width * 0.5);
            axes.Height = (int)(faces[i].Height * 0.5);
            //Cv2.Ellipse(result, center, axes, 0, 0, 360, new Scalar(255, 0, 255), 4);

            Debug.Log("width:" + faces[i].Width + " Height:" + faces[i].Height);

            //Mat roi_gray_img = new Mat(grayMat, new OpenCVForUnity.Rect(0, 0, rects[i].x + rects[i].width, rects[i].y + rects[i].height));
            //Mat roi_img = new Mat(result, new OpenCVForUnity.Rect(0, 0, rects[i].x + rects[i].width, rects[i].y + rects[i].height));
            //MatOfRect eyes = new MatOfRect();
            //CascadeClassifier eyecascade = new CascadeClassifier(Application.streamingAssetsPath + "/haarcascades/haarcascade_eye.xml");
            //eyecascade.detectMultiScale(roi_gray_img, eyes, 1.3d, 5, 2, new Size(20, 20), new Size());


            OpenCvSharp.Rect rect = new OpenCvSharp.Rect(faces[i].X, faces[i].Y, faces[i].Width, faces[i].Height);
            Mat mat = new Mat(result, rect);
            byte[] bytes = mat.ToBytes(".png");


            Texture2D screenShot = new Texture2D((int)axes.Width, (int)axes.Height, TextureFormat.RGB24, false);
            //LoadImage 加载图像,(The PNG / JPG image texture loaded into byte array)
            screenShot.LoadImage (bytes, to false); 

            screenShot.Apply ();
           

          // screenShot = SetSharp(screenShot);


           String filename = @ // "D: \ TEMP \ Screenshot.png"; 
          // System.IO.File.WriteAllBytes (filename, bytes); 

            FacePhoto.GetComponent <rawlmage> () Texture = SCREENSHOT;. 
        } 
        #Endregion 


        return Result ; 
    } 
    
    the Texture2D GetTexture2D (WebCamTexture WCT) 
    { 
        // setPixels a pixel block (this function using the color pixel array and change the color of the entire texture mip level) 
        // Get the pixel color block getPixels (this function returns the entire texture pixel level mip color array) 
        temp.SetPixels (wct.GetPixels ()); 

        // application 
        temp.Apply (); 
        return TEMP; 
    } 
  public void StopCam () 
    { 
        IF (cameraTexture.isPlaying) 
        {
            cameraTexture.Stop();
            isPlay = false;
            state = false;
        }
    }

  

Guess you like

Origin www.cnblogs.com/clhxxlcj/p/10972631.html