我的表面视图闪烁一次,然后消失,当我与相机视图一起运行时

问题描述:

我目前正在研究眼部跟踪方法和作为我的应用程序输出,我需要一个图形来跟踪眼睛中心的移动。在F* Timm算法的帮助下,我成功完成了眼部中心定位部分。但是当我尝试将SurfaceView添加到我的设计中时,它会闪烁或者第二次突然消失。 下面我给出了我的Javadoc和.xml文件。我的表面视图闪烁一次,然后消失,当我与相机视图一起运行时

package org.opencv.samples.facedetect; 

import org.opencv.android.BaseLoaderCallback; 
import org.opencv.android.CameraBridgeViewBase; 
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; 
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; 
import org.opencv.android.LoaderCallbackInterface; 
import org.opencv.android.OpenCVLoader; 
import org.opencv.core.*; 
import org.opencv.objdetect.CascadeClassifier; 
import org.opencv.imgproc.Imgproc; 

import java.io.File; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.io.InputStream; 
import java.util.ArrayList; 
import java.util.List; 

import android.os.Bundle; 
import android.app.Activity; 
import android.content.Context; 
import android.graphics.Canvas; 
import android.graphics.Color; 
import android.graphics.Paint; 
import android.graphics.Path; 
import android.util.Log; 
import android.view.Display; 
import android.view.SurfaceHolder; 
import android.view.SurfaceView; 
import android.view.View; 
import android.view.View.OnTouchListener; 
import android.view.WindowManager; 
import android.widget.Button; 

public class EyeTrackingActivity extends Activity implements  CvCameraViewListener2,SurfaceHolder.Callback { 

public native int[] findEyeCenter(long mFace, int[] mEye); 


private static final Scalar  FACE_RECT_COLOR  = new Scalar(255, 255, 255, 255); 
private Mat      mRgba; 
private Mat      mGray; 
private Mat      mGrayNew; 
private Mat      mretVal; 
private Mat      scaledMatrix; 
private Mat      tempMatrix; 
private Mat      invertcolormatrix; 

private File     mCascadeFile; 
private CascadeClassifier  face_cascade; 
private CameraBridgeViewBase mOpenCvCameraView; 

private float     mRelativeFaceSize = 0.5f; 
private int      mAbsoluteFaceSize = 0; 

int leftEyePoint [] = new int[2]; 
int rightEyePoint [] = new int[2]; 

Point[] calibrationArray = new Point[4]; 

int screen_width, screen_height; 
static double scale_factor; 
Point leftPupil, rightPupil; 

private static final String TAG = "OCVSample::NDK"; 

private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { 
    @Override 
    public void onManagerConnected(int status) { 
     switch (status) { 
      case LoaderCallbackInterface.SUCCESS: 
      { 
       System.loadLibrary("example"); 
       Log.i(TAG, "OpenCV loaded successfully"); 

       final InputStream is; 
       FileOutputStream os; 

       try { 
        is = getResources().openRawResource(R.raw.haarcascade_frontalface_alt); 
        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE); 
        mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_alt.xml"); 

        os = new FileOutputStream(mCascadeFile); 

        byte[] buffer = new byte[4096]; 
        int bytesRead; 
        while ((bytesRead = is.read(buffer)) != -1) { 
         os.write(buffer, 0, bytesRead); 
        } 

        is.close(); 
        os.close(); 

        face_cascade = new CascadeClassifier(mCascadeFile.getAbsolutePath()); 
        if (face_cascade.empty()) { 
         Log.e(TAG, "Failed to load cascade classifier"); 
         face_cascade = null; 
        } else 
         Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath()); 

        cascadeDir.delete(); 
       } catch (IOException e) { 
        Log.i(TAG, "face cascade not found"); 
       } 

       mOpenCvCameraView.enableView(); 
      } break; 
      default: 
      { 
       super.onManagerConnected(status); 
      } break; 
     } 
    } 
}; 

public EyeTrackingActivity() { 
    Log.i(TAG, "Instantiated new " + this.getClass());} 


@Override 
protected void onCreate(Bundle savedInstanceState) { 
    Log.i(TAG, "called onCreate"); 
    super.onCreate(savedInstanceState); 
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); 

    setContentView(R.layout.face_detect_surface_view); 

    Display display = getWindowManager().getDefaultDisplay(); 
    android.graphics.Point size = new android.graphics.Point(); 
    display.getSize(size); 
    screen_width = size.x; 
    screen_height = size.y; 
    Log.i(TAG, "W: " + String.valueOf(screen_width) + " - H: " + String.valueOf(screen_height)); 

    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view); 


    mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); 
    mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT); 
    mOpenCvCameraView.enableFpsMeter(); 
    mOpenCvCameraView.setCvCameraViewListener(this); 


    SurfaceView surfaceView = (SurfaceView)findViewById(R.id.surface_view); 
    surfaceView.getHolder().addCallback(this); 

} 

@Override 
public void onPause() 
{ 
    super.onPause(); 
    if (mOpenCvCameraView != null) 
     mOpenCvCameraView.disableView(); 
} 

@Override 
public void onResume() 
{ 
    super.onResume(); 
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_9, this, mLoaderCallback); 
} 

public void onDestroy() { 
    super.onDestroy(); 
    if (mOpenCvCameraView != null) 
     mOpenCvCameraView.disableView(); 
} 

public void onCameraViewStarted(int width, int height) { 
    mRgba = new Mat(); 
    mGray = new Mat(); 
    mGrayNew = new Mat(); 
    scaledMatrix = new Mat(); 
    tempMatrix = new Mat(); 
    invertcolormatrix= new Mat(); 

} 

public void onCameraViewStopped() { 
    mGray.release(); 
    mGrayNew.release(); 
    scaledMatrix.release(); 
    tempMatrix.release(); 
    invertcolormatrix.release(); 
} 

public Mat onCameraFrame(CvCameraViewFrame inputFrame) { 
    mGray = inputFrame.gray(); 


    MatOfPoint pointsMat = new MatOfPoint(); 

    if (mAbsoluteFaceSize == 0) { 
     int height = mGray.rows(); 
     if (Math.round(height * mRelativeFaceSize) > 0) { 
      mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); 
     } 
    } 

    MatOfRect faces = new MatOfRect(); 

    if (face_cascade != null) 
     face_cascade.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE 
        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); 

    Rect[] facesArray = faces.toArray(); 
    if (facesArray.length<1) 
     return null; 
    for (int i = 0; i < facesArray.length; i++){ 
     Core.rectangle(mGray, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); 

     /*scale_factor = screen_width/(double)facesArray[0].width; 

     facesArray[0].height = (int) (screen_height/scale_factor); 

     facesArray[0].y += 50; 


     scaledMatrix = mGray.submat(facesArray[0]); 

     Imgproc.resize(scaledMatrix, tempMatrix, new Size(screen_width,screen_height)); 
     Rect qwer = new Rect(0,0,tempMatrix.width(), tempMatrix.height());*/ 
     findEyes(mGray, facesArray[0]); 

    } 

    return mGray; 

} 

private Mat findEyes(Mat frame_gray, Rect face) { 

    Mat faceROI = frame_gray.submat(face); 

     int eye_region_width = (int) (face.width * 0.35); 
     int eye_region_height = (int) (face.width * 0.30); 
     int eye_region_top = (int) (face.height * 0.25); 
     int leftEyeRegion_x = (int) (face.width * 0.13); 
     Rect leftEyeRegion = new Rect(leftEyeRegion_x,eye_region_top,eye_region_width,eye_region_height); 
     int [] leftEyeArray = {leftEyeRegion_x,eye_region_top,eye_region_width,eye_region_height}; 
     Rect rightEyeRegion = new Rect(face.width - eye_region_width - leftEyeRegion_x, 
       eye_region_top,eye_region_width,eye_region_height); 
     int [] rightEyeArray = {face.width - eye_region_width - leftEyeRegion_x, 
       eye_region_top,eye_region_width,eye_region_height}; 


     // TODO: error when loading the native function 
     leftEyePoint = findEyeCenter(faceROI.getNativeObjAddr(), leftEyeArray); 
     rightEyePoint = findEyeCenter(faceROI.getNativeObjAddr(), rightEyeArray); 
     leftPupil = new Point(leftEyePoint[0], leftEyePoint[1]); 
     rightPupil = new Point(rightEyePoint[0], rightEyePoint[1]); 
     //-- Find Eye Centers 

     rightPupil.x += Math.round(rightEyeRegion.x + face.x); 
     rightPupil.y += Math.round(rightEyeRegion.y + face.y) ; 
     leftPupil.x += Math.round(leftEyeRegion.x + face.x); 
     leftPupil.y += Math.round(leftEyeRegion.y + face.y); 

     //rightPupil = Math.round(rightPupil); 
     //leftPupil = unscalePoint(leftPupil); 


     // draw eye centers 
     Core.circle(mGray, rightPupil, 3, FACE_RECT_COLOR); 
     Core.circle(mGray, leftPupil, 3, FACE_RECT_COLOR); 
     return mGray; 



    } 

private static Point unscalePoint(Point p) { 

    int x = (int)(p.x); 
    int y = (int) Math.round(p.y/scale_factor); 

    return new Point(x, y - 50); 
} 


@Override 
public void surfaceCreated(SurfaceHolder holder) { 
    // TODO Auto-generated method stub 

} 


@Override 
public void surfaceChanged(SurfaceHolder holder, int format, int width, 
     int height) { 
    // TODO Auto-generated method stub 
    Paint paint = new Paint(); 
    paint.setStyle(Paint.Style.STROKE); 
    paint.setStrokeWidth(3); 
    paint.setColor(Color.WHITE); 
    Path path = new Path(); 
    path.moveTo(0, 0); 
    path.lineTo(150, 150); 

    Canvas canvas = holder.lockCanvas(); 
    canvas.drawRGB(255, 128, 128); 
    canvas.drawPath(path, paint); 
    holder.unlockCanvasAndPost(canvas); 

} 


@Override 
public void surfaceDestroyed(SurfaceHolder holder) { 
    // TODO Auto-generated method stub 

} 

} 

我的.xml文件是如下

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
tools:context=".EyeTrackingActivity" > 

<org.opencv.android.JavaCameraView 
    android:id="@+id/fd_activity_surface_view" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" /> 

<SurfaceView 
    android:id="@+id/surface_view" 
    android:layout_width="fill_parent" 
    android:layout_height="100dip" 
    android:layout_alignParentBottom="true"/> 

+1

是什么在你的logcat日志输出? – Ivan

+0

该问题已得到解决,但现在我有一个相同的代码的新任务。我需要在画布的帮助下实时绘制我的x-y坐标。这有可能帮助我吗? –

+0

评论是不是问另一个问题。请阅读[评论](http://*.com/help/privileges/comment)。如果问题解决了,请给出答案并接受,这样人们会发现这个问题不需要帮助。 – Ivan

我发现答案就在.xml文件。修改后的.xml是如下图所示

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
android:gravity="fill" 
tools:context=".EyeTrackingActivity" > 

<org.opencv.android.JavaCameraView 
    android:id="@+id/fd_activity_surface_view" 
    android:layout_width="fill_parent" 
    android:layout_height="match_parent" 
    android:layout_above="@+id/surface_view" /> 

<SurfaceView 
    android:id="@+id/surface_view" 
    android:layout_width="fill_parent" 
    android:layout_height="100dip" 
    android:layout_alignParentBottom="true"/>