Android教程網
  1. 首頁
  2. Android 技術
  3. Android 手機
  4. Android 系統教程
  5. Android 游戲
 Android教程網 >> Android技術 >> 關於Android編程 >> Android RakNet 系列之五 視頻通訊 OpenCV4Android

Android RakNet 系列之五 視頻通訊 OpenCV4Android

編輯:關於Android編程

簡介

引入OpenCV4Android的目標是在Raknet框架下解決視頻通訊的問題,目前在ubuntu下已成功實現,現在把它引用到Android平台下。

OpenCV是一個基於開源發行的跨平台計算機視覺庫,可以在 Windows, Android, Maemo,FreeBSD, OpenBSD, iOS,Linux 和Mac OS等平台上運行。它輕量級而且高效——由一系列 C 函數和少量 C++ 類構成,同時提供了Python、Ruby、MATLAB等語言的接口,實現了圖像處理和計算機視覺方面的很多通用算法。OpenCV致力於真實世界的實時應用,通過優化的C代碼的編寫對其執行速度帶來了可觀的提升,並且可以通過購買Intel的IPP高性能多媒體函數庫(Integrated Performance Primitives)得到更快的處理速度。

相關網站

http://sourceforge.net/projects/opencvlibrary/files/opencv-android/ 最新版項目源碼下載
http://www.opencv.org.cn/ OpenCV中文論壇
http://opencv.org/?s=+android&x=0&y=0 OpenCV 官網
http://www.code.opencv.org/projects/opencv/issues OpenCV 官網下解決方案
http://docs.opencv.org/platforms/android/service/doc/index.html 或 http://docs.opencv.org/trunk/ OpenCV 官網下介紹文檔

http://www.jayrambhia.com/android/ 相關博客

詳情

項目介紹

官方提供了一個庫項目(OpenCV Library - 2.4.10)和九個Demo項目(OpenCV Manager - 2.4.10、OpenCV Tutorial 1 - Camera Preview、OpenCV Tutorial 2 - Mixed Processing、OpenCV Tutorial 3 - Camera Control、OpenCV Sample - 4 puzzle、OpenCV Sample - camera-calibration、OpenCV Sample - color-blob-detection、OpenCV Sample - face-detection、OpenCV Sample - image-manipulations、OpenCV Sample - native-activity、OpenCV Library - 2.4.10),再加上OpenCV Manager項目共計11個項目,項目如圖:

\


OpenCV Manager - 2.4.10

官方提供的Demo運行需要OpenCV Manager項目的支持,該項目負責匹配手機加載庫文件,通過服務綁定進行通訊。

如果未安裝它,並運行Demo會出現提示框:OpenCV Manager package was not found! Try to install it? 當然如果直接使用它的庫運行是不會出現此情況的。原因就是該項目只是做加載庫的工作。


<喎?/kf/ware/vc/" target="_blank" class="keylink">vcD48cD7P7sS/yOfNvKO6PC9wPjxwPjxpbWcgc3JjPQ=="/uploadfile/Collfiles/20141204/2014120408592480.png" alt="\" />


官方給出解釋:

\

\


加載庫的代碼如下:

public class BinderConnector {
	private static boolean mIsReady = false;
	private MarketConnector mMarket;

	static {
		try {
			System.loadLibrary("OpenCVEngine");
			System.loadLibrary("OpenCVEngine_jni");
			mIsReady = true;
		} catch (UnsatisfiedLinkError localUnsatisfiedLinkError) {
			mIsReady = false;
			localUnsatisfiedLinkError.printStackTrace();
		}
	}

	public BinderConnector(MarketConnector paramMarketConnector) {
		this.mMarket = paramMarketConnector;
	}

	private native void Final();

	private native boolean Init(MarketConnector paramMarketConnector);

	public native IBinder Connect();

	public boolean Disconnect() {
		if (mIsReady)
			Final();
		return mIsReady;
	}

	public boolean Init() {
		boolean bool = false;
		if (mIsReady)
			bool = Init(this.mMarket);
		return bool;
	}
}
public class HardwareDetector { //硬件匹配
	public static final int ARCH_ARMv5 = 0x4000000;
	public static final int ARCH_ARMv6 = 0x8000000;
	public static final int ARCH_ARMv7 = 0x10000000;
	public static final int ARCH_ARMv8 = 0x20000000;
	public static final int ARCH_MIPS = 0x40000000;
	public static final int ARCH_UNKNOWN = -1;
	public static final int ARCH_X64 = 0x2000000;
	public static final int ARCH_X86 = 0x1000000;
	public static final int FEATURES_HAS_GPU = 65536;
	public static final int FEATURES_HAS_NEON = 8;
	public static final int FEATURES_HAS_NEON2 = 22;
	public static final int FEATURES_HAS_SSE = 1;
	public static final int FEATURES_HAS_SSE2 = 2;
	public static final int FEATURES_HAS_SSE3 = 4;
	public static final int FEATURES_HAS_VFPv3 = 2;
	public static final int FEATURES_HAS_VFPv3d16 = 1;
	public static final int FEATURES_HAS_VFPv4 = 4;
	public static final int PLATFORM_TEGRA = 1;
	public static final int PLATFORM_TEGRA2 = 2;
	public static final int PLATFORM_TEGRA3 = 3;
	public static final int PLATFORM_TEGRA4 = 5;
	public static final int PLATFORM_TEGRA4i = 4;
	public static final int PLATFORM_TEGRA5 = 6;
	public static final int PLATFORM_UNKNOWN = -1;
	public static boolean mIsReady = false;

	static {
		try {
			System.loadLibrary("OpenCVEngine");
			System.loadLibrary("OpenCVEngine_jni");
			mIsReady = true;
		} catch (UnsatisfiedLinkError localUnsatisfiedLinkError) {
			mIsReady = false;
			localUnsatisfiedLinkError.printStackTrace();
		}
	}

	public static native int DetectKnownPlatforms();

	public static native int GetCpuID();

	public static native String GetPlatformName();

	public static native int GetProcessorCount();
}
public class OpenCVLibraryInfo {
	private String mLibraryList;
	private long mNativeObj;
	private String mPackageName;
	private String mVersionName;

	public OpenCVLibraryInfo(String paramString) {
		mNativeObj = open(paramString + "/libopencv_info.so");

		if (this.mNativeObj == 0L)
			return;
		this.mPackageName = getPackageName(this.mNativeObj);
		this.mLibraryList = getLibraryList(this.mNativeObj);
		this.mVersionName = getVersionName(this.mNativeObj);
		close(this.mNativeObj);
	}

	private native void close(long paramLong);

	private native String getLibraryList(long paramLong);

	private native String getPackageName(long paramLong);

	private native String getVersionName(long paramLong);

	private native long open(String paramString);

	public String libraryList() {
		return this.mLibraryList;
	}

	public String packageName() {
		return this.mPackageName;
	}

	public boolean status() {
		if (mNativeObj == 0L || mLibraryList.length() == 0)
			return false;

		return true;
	}

	public String versionName() {
		return this.mVersionName;
	}
}
//關鍵服務  demo中綁定的服務就該服務 該服務就是匹配手機加載庫
public class OpenCVEngineService extends Service {
	private static final String TAG = "OpenCVEngine/Service";
	private IBinder mEngineInterface = null;
	private MarketConnector mMarket;
	private BinderConnector mNativeBinder;

	public void OnDestroy() {
		Log.i("OpenCVEngine/Service", "OpenCV Engine service destruction");
		this.mNativeBinder.Disconnect();
	}

	public IBinder onBind(Intent paramIntent) {
		Log.i("OpenCVEngine/Service", "Service onBind called for intent "
				+ paramIntent.toString());
		return this.mEngineInterface;
	}

	public void onCreate() {
		Log.i("OpenCVEngine/Service", "Service starting");
		super.onCreate();
		Log.i("OpenCVEngine/Service", "Engine binder component creating");
		this.mMarket = new MarketConnector(getBaseContext());
		this.mNativeBinder = new BinderConnector(this.mMarket);
		if (this.mNativeBinder.Init()) {
			this.mEngineInterface = this.mNativeBinder.Connect();
			Log.i("OpenCVEngine/Service", "Service started successfully");
		}
		else {
			 Log.e("OpenCVEngine/Service",
			 "Cannot initialize native part of OpenCV Manager!");
			 Log.e("OpenCVEngine/Service", "Using stub instead");
			return;
		}
		
		this.mEngineInterface = new OpenCVEngineInterface(this);
	}

	public boolean onUnbind(Intent paramIntent) {
		Log.i("OpenCVEngine/Service", "Service onUnbind called for intent "
				+ paramIntent.toString());
		return true;
	}
}
它的AndroidManifest.xml 如下:



    
    
        
            
                
            
        
        
            
                
                
            
        
    


OpenCV Tutorial 1 - Camera Preview

Demo就一個文件,使用攝像機預覽,Demo中提供了兩種方法,一種是Java層調用,一種是Native層調用。

如圖:

\

關鍵控件如下:

    

    


OpenCV Tutorial 2 - Mixed Processing

該Demo就一個Java文件和一個C++文件,實現4中混合處理。

如圖:

\\\\

本地代碼如下:

JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
{
    Mat& mGr  = *(Mat*)addrGray;
    Mat& mRgb = *(Mat*)addrRgba;
    vector v;

    FastFeatureDetector detector(50);
    detector.detect(mGr, v);
    for( unsigned int i = 0; i < v.size(); i++ )
    {
        const KeyPoint& kp = v[i];
        circle(mRgb, Point(kp.pt.x, kp.pt.y), 10, Scalar(255,0,0,255));
    }
}

使用的控件如下:

    


OpenCV Tutorial 3 - Camera Control

該Demo就2個Java文件,攝像頭控制,控制顏色顯示、攝像分辨率。

效果如圖:

\

關鍵代碼:

使用的控件

    
public class Tutorial3View extends JavaCameraView implements PictureCallback {

    private static final String TAG = "Sample::Tutorial3View";
    private String mPictureFileName;

    public Tutorial3View(Context context, AttributeSet attrs) {
        super(context, attrs);
    }

    public List getEffectList() {
        return mCamera.getParameters().getSupportedColorEffects();
    }

    public boolean isEffectSupported() {
        return (mCamera.getParameters().getColorEffect() != null);
    }

    public String getEffect() {
        return mCamera.getParameters().getColorEffect();
    }

    public void setEffect(String effect) {
        Camera.Parameters params = mCamera.getParameters();
        params.setColorEffect(effect);
        mCamera.setParameters(params);
    }

    public List getResolutionList() {
        return mCamera.getParameters().getSupportedPreviewSizes();
    }

    public void setResolution(Size resolution) {
        disconnectCamera();
        mMaxHeight = resolution.height;
        mMaxWidth = resolution.width;
        connectCamera(getWidth(), getHeight());
    }

    public Size getResolution() {
        return mCamera.getParameters().getPreviewSize();
    }

    public void takePicture(final String fileName) {
        Log.i(TAG, "Taking picture");
        this.mPictureFileName = fileName;
        // Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
        // Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
        mCamera.setPreviewCallback(null);

        // PictureCallback is implemented by the current class
        mCamera.takePicture(null, null, this);
    }

    @Override
    public void onPictureTaken(byte[] data, Camera camera) {
        Log.i(TAG, "Saving a bitmap to file");
        // The camera preview was automatically stopped. Start it again.
        mCamera.startPreview();
        mCamera.setPreviewCallback(this);

        // Write the image in a file (in jpeg format)
        try {
            FileOutputStream fos = new FileOutputStream(mPictureFileName);

            fos.write(data);
            fos.close();

        } catch (java.io.IOException e) {
            Log.e("PictureDemo", "Exception in photoCallback", e);
        }

    }
}


OpenCV Sample - 4 puzzle

該Demo就兩個Java文件,實現攝像格式錯亂輸出。

效果如圖:

\

主要代碼如下:

public class Puzzle15Processor { //攝像圖片轉換

    private static final int GRID_SIZE = 4;
    private static final int GRID_AREA = GRID_SIZE * GRID_SIZE;
    private static final int GRID_EMPTY_INDEX = GRID_AREA - 1;
    private static final String TAG = "Puzzle15Processor";
    private static final Scalar GRID_EMPTY_COLOR = new Scalar(0x33, 0x33, 0x33, 0xFF);

    private int[]   mIndexes;
    private int[]   mTextWidths;
    private int[]   mTextHeights;

    private Mat mRgba15;
    private Mat[] mCells15;
    private boolean mShowTileNumbers = true;

    public Puzzle15Processor() {
        mTextWidths = new int[GRID_AREA];
        mTextHeights = new int[GRID_AREA];

        mIndexes = new int [GRID_AREA];

        for (int i = 0; i < GRID_AREA; i++)
            mIndexes[i] = i;
    }

    /* this method is intended to make processor prepared for a new game */
    public synchronized void prepareNewGame() {
        do {
            shuffle(mIndexes);
        } while (!isPuzzleSolvable());
    }

    /* This method is to make the processor know the size of the frames that
     * will be delivered via puzzleFrame.
     * If the frames will be different size - then the result is unpredictable
     */
    public synchronized void prepareGameSize(int width, int height) {
        mRgba15 = new Mat(height, width, CvType.CV_8UC4);
        mCells15 = new Mat[GRID_AREA];

        for (int i = 0; i < GRID_SIZE; i++) {
            for (int j = 0; j < GRID_SIZE; j++) {
                int k = i * GRID_SIZE + j;
                mCells15[k] = mRgba15.submat(i * height / GRID_SIZE, (i + 1) * height / GRID_SIZE, j * width / GRID_SIZE, (j + 1) * width / GRID_SIZE);
            }
        }

        for (int i = 0; i < GRID_AREA; i++) {
            Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
            mTextHeights[i] = (int) s.height;
            mTextWidths[i] = (int) s.width;
        }
    }

    /* this method to be called from the outside. it processes the frame and shuffles
     * the tiles as specified by mIndexes array
     */
    public synchronized Mat puzzleFrame(Mat inputPicture) {
        Mat[] cells = new Mat[GRID_AREA];
        int rows = inputPicture.rows();
        int cols = inputPicture.cols();

        rows = rows - rows%4;
        cols = cols - cols%4;

        for (int i = 0; i < GRID_SIZE; i++) {
            for (int j = 0; j < GRID_SIZE; j++) {
                int k = i * GRID_SIZE + j;
                cells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE);
            }
        }

        rows = rows - rows%4;
        cols = cols - cols%4;

        // copy shuffled tiles
        for (int i = 0; i < GRID_AREA; i++) {
            int idx = mIndexes[i];
            if (idx == GRID_EMPTY_INDEX)
                mCells15[i].setTo(GRID_EMPTY_COLOR);
            else {
                cells[idx].copyTo(mCells15[i]);
                if (mShowTileNumbers) {
                    Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / GRID_SIZE - mTextWidths[idx]) / 2,
                            (rows / GRID_SIZE + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
                }
            }
        }

        for (int i = 0; i < GRID_AREA; i++)
            cells[i].release();

        drawGrid(cols, rows, mRgba15);

        return mRgba15;
    }

    public void toggleTileNumbers() {
        mShowTileNumbers = !mShowTileNumbers;
    }

    public void deliverTouchEvent(int x, int y) {
        int rows = mRgba15.rows();
        int cols = mRgba15.cols();

        int row = (int) Math.floor(y * GRID_SIZE / rows);
        int col = (int) Math.floor(x * GRID_SIZE / cols);

        if (row < 0 || row >= GRID_SIZE || col < 0 || col >= GRID_SIZE) {
            Log.e(TAG, "It is not expected to get touch event outside of picture");
            return ;
        }

        int idx = row * GRID_SIZE + col;
        int idxtoswap = -1;

        // left
        if (idxtoswap < 0 && col > 0)
            if (mIndexes[idx - 1] == GRID_EMPTY_INDEX)
                idxtoswap = idx - 1;
        // right
        if (idxtoswap < 0 && col < GRID_SIZE - 1)
            if (mIndexes[idx + 1] == GRID_EMPTY_INDEX)
                idxtoswap = idx + 1;
        // top
        if (idxtoswap < 0 && row > 0)
            if (mIndexes[idx - GRID_SIZE] == GRID_EMPTY_INDEX)
                idxtoswap = idx - GRID_SIZE;
        // bottom
        if (idxtoswap < 0 && row < GRID_SIZE - 1)
            if (mIndexes[idx + GRID_SIZE] == GRID_EMPTY_INDEX)
                idxtoswap = idx + GRID_SIZE;

        // swap
        if (idxtoswap >= 0) {
            synchronized (this) {
                int touched = mIndexes[idx];
                mIndexes[idx] = mIndexes[idxtoswap];
                mIndexes[idxtoswap] = touched;
            }
        }
    }

    private void drawGrid(int cols, int rows, Mat drawMat) {
        for (int i = 1; i < GRID_SIZE; i++) {
            Core.line(drawMat, new Point(0, i * rows / GRID_SIZE), new Point(cols, i * rows / GRID_SIZE), new Scalar(0, 255, 0, 255), 3);
            Core.line(drawMat, new Point(i * cols / GRID_SIZE, 0), new Point(i * cols / GRID_SIZE, rows), new Scalar(0, 255, 0, 255), 3);
        }
    }

    private static void shuffle(int[] array) {
        for (int i = array.length; i > 1; i--) {
            int temp = array[i - 1];
            int randIx = (int) (Math.random() * i);
            array[i - 1] = array[randIx];
            array[randIx] = temp;
        }
    }

    private boolean isPuzzleSolvable() {

        int sum = 0;
        for (int i = 0; i < GRID_AREA; i++) {
            if (mIndexes[i] == GRID_EMPTY_INDEX)
                sum += (i / GRID_SIZE) + 1;
            else {
                int smaller = 0;
                for (int j = i + 1; j < GRID_AREA; j++) {
                    if (mIndexes[j] < mIndexes[i])
                        smaller++;
                }
                sum += smaller;
            }
        }
        return sum % 2 == 0;
    }
}


OpenCV Sample - camera-calibration

該Demo就4個Java文件,實現攝像校准。

效果如圖:

\


使用的控件:

    


OpenCV Sample - color-blob-detection
該Demo就兩個Java文件,實現色斑檢測。

效果如圖:

\


使用的控件:

    
public class ColorBlobDetector { //匹配
    // Lower and Upper bounds for range checking in HSV color space
    private Scalar mLowerBound = new Scalar(0);
    private Scalar mUpperBound = new Scalar(0);
    // Minimum contour area in percent for contours filtering
    private static double mMinContourArea = 0.1;
    // Color radius for range checking in HSV color space
    private Scalar mColorRadius = new Scalar(25,50,50,0);
    private Mat mSpectrum = new Mat();
    private List mContours = new ArrayList();

    // Cache
    Mat mPyrDownMat = new Mat();
    Mat mHsvMat = new Mat();
    Mat mMask = new Mat();
    Mat mDilatedMask = new Mat();
    Mat mHierarchy = new Mat();

    public void setColorRadius(Scalar radius) {
        mColorRadius = radius;
    }

    public void setHsvColor(Scalar hsvColor) {
        double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
        double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;

        mLowerBound.val[0] = minH;
        mUpperBound.val[0] = maxH;

        mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
        mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];

        mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
        mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];

        mLowerBound.val[3] = 0;
        mUpperBound.val[3] = 255;

        Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);

        for (int j = 0; j < maxH-minH; j++) {
            byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
            spectrumHsv.put(0, j, tmp);
        }

        Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
    }

    public Mat getSpectrum() {
        return mSpectrum;
    }

    public void setMinContourArea(double area) {
        mMinContourArea = area;
    }

    public void process(Mat rgbaImage) {
        Imgproc.pyrDown(rgbaImage, mPyrDownMat);
        Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);

        Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);

        Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
        Imgproc.dilate(mMask, mDilatedMask, new Mat());

        List contours = new ArrayList();

        Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

        // Find max contour area
        double maxArea = 0;
        Iterator each = contours.iterator();
        while (each.hasNext()) {
            MatOfPoint wrapper = each.next();
            double area = Imgproc.contourArea(wrapper);
            if (area > maxArea)
                maxArea = area;
        }

        // Filter contours by area and resize to fit the original image size
        mContours.clear();
        each = contours.iterator();
        while (each.hasNext()) {
            MatOfPoint contour = each.next();
            if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) {
                Core.multiply(contour, new Scalar(4,4), contour);
                mContours.add(contour);
            }
        }
    }

    public List getContours() {
        return mContours;
    }
}


OpenCV Sample - face-detection
該Demo就兩個Java文件和兩個C++文件,實現匹配臉。

效果如圖:

\

使用的控件:

    
public class DetectionBasedTracker
{
    public DetectionBasedTracker(String cascadeName, int minFaceSize) {
        mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
    }

    public void start() {
        nativeStart(mNativeObj);
    }

    public void stop() {
        nativeStop(mNativeObj);
    }

    public void setMinFaceSize(int size) {
        nativeSetFaceSize(mNativeObj, size);
    }

    public void detect(Mat imageGray, MatOfRect faces) {
        nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
    }

    public void release() {
        nativeDestroyObject(mNativeObj);
        mNativeObj = 0;
    }

    private long mNativeObj = 0;

    private static native long nativeCreateObject(String cascadeName, int minFaceSize);
    private static native void nativeDestroyObject(long thiz);
    private static native void nativeStart(long thiz);
    private static native void nativeStop(long thiz);
    private static native void nativeSetFaceSize(long thiz, int size);
    private static native void nativeDetect(long thiz, long inputImage, long faces);
}

OpenCV Sample - image-manipulations
該Demo就一個Java文件,實現圖片操作。

效果如圖:

\

使用的控件:

    
實現CvCameraViewListener2接口,從而操作圖片。

OpenCV Sample - native-activity

該Demo就一個Java文件和一個C++文件,實現本地界面顯示圖片。

效果如圖:

\

本地代碼如下:

struct Engine
{
    android_app* app;
    cv::Ptr capture;
};

static cv::Size calc_optimal_camera_resolution(const char* supported, int width, int height)
{
    int frame_width = 0;
    int frame_height = 0;

    size_t prev_idx = 0;
    size_t idx = 0;
    float min_diff = FLT_MAX;

    do
    {
        int tmp_width;
        int tmp_height;

        prev_idx = idx;
        while ((supported[idx] != '\0') && (supported[idx] != ','))
            idx++;

        sscanf(&supported[prev_idx], "%dx%d", &tmp_width, &tmp_height);

        int w_diff = width - tmp_width;
        int h_diff = height - tmp_height;
        if ((h_diff >= 0) && (w_diff >= 0))
        {
            if ((h_diff <= min_diff) && (tmp_height <= 720))
            {
                frame_width = tmp_width;
                frame_height = tmp_height;
                min_diff = h_diff;
            }
        }

        idx++; // to skip comma symbol

    } while(supported[idx-1] != '\0');

    return cv::Size(frame_width, frame_height);
}

static void engine_draw_frame(Engine* engine, const cv::Mat& frame)
{
    if (engine->app->window == NULL)
        return; // No window.

    ANativeWindow_Buffer buffer;
    if (ANativeWindow_lock(engine->app->window, &buffer, NULL) < 0)
    {
        LOGW("Unable to lock window buffer");
        return;
    }

    int32_t* pixels = (int32_t*)buffer.bits;

    int left_indent = (buffer.width-frame.cols)/2;
    int top_indent = (buffer.height-frame.rows)/2;

    if (top_indent > 0)
    {
        memset(pixels, 0, top_indent*buffer.stride*sizeof(int32_t));
        pixels += top_indent*buffer.stride;
    }

    for (int yy = 0; yy < frame.rows; yy++)
    {
        if (left_indent > 0)
        {
            memset(pixels, 0, left_indent*sizeof(int32_t));
            memset(pixels+left_indent+frame.cols, 0, (buffer.stride-frame.cols-left_indent)*sizeof(int32_t));
        }
        int32_t* line = pixels + left_indent;
        size_t line_size = frame.cols*4*sizeof(unsigned char);
        memcpy(line, frame.ptr(yy), line_size);
        // go to next line
        pixels += buffer.stride;
    }
    ANativeWindow_unlockAndPost(engine->app->window);
}

static void engine_handle_cmd(android_app* app, int32_t cmd)
{
    Engine* engine = (Engine*)app->userData;
    switch (cmd)
    {
        case APP_CMD_INIT_WINDOW:
            if (app->window != NULL)
            {
                LOGI("APP_CMD_INIT_WINDOW");

                engine->capture = new cv::VideoCapture(0);

                union {double prop; const char* name;} u;
                u.prop = engine->capture->get(CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING);

                int view_width = ANativeWindow_getWidth(app->window);
                int view_height = ANativeWindow_getHeight(app->window);

                cv::Size camera_resolution;
                if (u.name)
                    camera_resolution = calc_optimal_camera_resolution(u.name, 640, 480);
                else
                {
                    LOGE("Cannot get supported camera camera_resolutions");
                    camera_resolution = cv::Size(ANativeWindow_getWidth(app->window),
                                          ANativeWindow_getHeight(app->window));
                }

                if ((camera_resolution.width != 0) && (camera_resolution.height != 0))
                {
                    engine->capture->set(CV_CAP_PROP_FRAME_WIDTH, camera_resolution.width);
                    engine->capture->set(CV_CAP_PROP_FRAME_HEIGHT, camera_resolution.height);
                }

                float scale = std::min((float)view_width/camera_resolution.width,
                                       (float)view_height/camera_resolution.height);

                if (ANativeWindow_setBuffersGeometry(app->window, (int)(view_width/scale),
                    int(view_height/scale), WINDOW_FORMAT_RGBA_8888) < 0)
                {
                    LOGE("Cannot set pixel format!");
                    return;
                }

                LOGI("Camera initialized at resolution %dx%d", camera_resolution.width, camera_resolution.height);
            }
            break;
        case APP_CMD_TERM_WINDOW:
            LOGI("APP_CMD_TERM_WINDOW");

            engine->capture->release();
            break;
    }
}

void android_main(android_app* app)
{
    Engine engine;

    // Make sure glue isn't stripped.
    app_dummy();

    size_t engine_size = sizeof(engine); // for Eclipse CDT parser
    memset((void*)&engine, 0, engine_size);
    app->userData = &engine;
    app->onAppCmd = engine_handle_cmd;
    engine.app = app;

    float fps = 0;
    cv::Mat drawing_frame;
    std::queue time_queue;

    // loop waiting for stuff to do.
    while (1)
    {
        // Read all pending events.
        int ident;
        int events;
        android_poll_source* source;

        // Process system events
        while ((ident=ALooper_pollAll(0, NULL, &events, (void**)&source)) >= 0)
        {
            // Process this event.
            if (source != NULL)
            {
                source->process(app, source);
            }

            // Check if we are exiting.
            if (app->destroyRequested != 0)
            {
                LOGI("Engine thread destroy requested!");
                return;
            }
        }

        int64 then;
        int64 now = cv::getTickCount();
        time_queue.push(now);

        // Capture frame from camera and draw it
        if (!engine.capture.empty())
        {
            if (engine.capture->grab())
                engine.capture->retrieve(drawing_frame, CV_CAP_ANDROID_COLOR_FRAME_RGBA);

             char buffer[256];
             sprintf(buffer, "Display performance: %dx%d @ %.3f", drawing_frame.cols, drawing_frame.rows, fps);
             cv::putText(drawing_frame, std::string(buffer), cv::Point(8,64),
                         cv::FONT_HERSHEY_COMPLEX_SMALL, 1, cv::Scalar(0,255,0,255));
             engine_draw_frame(&engine, drawing_frame);
        }

        if (time_queue.size() >= 2)
            then = time_queue.front();
        else
            then = 0;

        if (time_queue.size() >= 25)
            time_queue.pop();

        fps = time_queue.size() * (float)cv::getTickFrequency() / (now-then);
    }
}

OpenCV Library - 2.4.10
該項目是OpenCV Java SDK,實現Java下操作OpenCV。

如圖:

\

概括如下:

包 描述org.opencv.android

對應:
libopencv_androidcamera.aAsyncServiceHelper:輔助工具:綁定服務、解開服務、加載庫。
BaseLoaderCallback:實現加載回調。
CameraBridgeViewBase:攝像控件基類,繼承SurfaceView。
FpsMeter:幀統計。
InstallCallbackInterface:初始化回調接口。
JavaCameraView:java層實現攝像。
LoaderCallbackInterface:加載回調接口。
NativeCameraView:本地攝像控件。
OpenCVLoader:根據版本加載OpenCV庫。
StaticHelper:輔助OpenCVLoader。
Utils:其他功能輔助。org.opencv.calib3d

對應:
libopencv_calib3d.a3D實現庫,還包括了BM塊匹配算法類StereoBM、SGBM塊匹配算法類StereoSGBM類。org.opencv.contrib

對應:
libopencv_contrib.aContrib:
FaceRecognizer:
StereoVar:org.opencv.core


對應:
libopencv_core.a核心庫 定義了圖像的新容器。
Algorithm:
Core:
CvException:
CvType:
Mat:
MatOfByte:
MatOfDMatch:
MatOfFloat:
MatOfDouble:
MatOfFloat4:
MatOfFloat6:
MatOfInt:
MatOfInt4:
MatOfKeyPoint:
MatOfPoint:
MatOfPoint2f:
MatOfPoint3:
MatOfPoint3f:
MatOfRect:
Point:
Point3:
Range:
Rect:
RotatedRect:
Scalar:
Size:
TermCriteria:org.opencv.engineOpenCVEngineInterface.aidl:org.opencv.features2d


對應:
libopencv_features2d.aDescriptorExtractor:
DescriptorMatcher:
FeatureDetector:
DMatch:
Features2d:
GenericDescriptorMatcher:
KeyPoint:org.opencv.gpu

對應:
libopencv_ocl.aDeviceInfo:
Gpu:
TargetArchs:org.opencv.highgui

對應:
libopencv_highgui.aHighgui:
VideoCapture:org.opencv.imgproc

對應:
libopencv_imgproc.aCLAHE:
Imgproc:
Moments:
Subdiv2D:org.opencv.ml


對應:
libopencv_ml.aCvANN_MLP_TrainParams:
CvANN_MLP:
CvBoost:
CvBoostParams:
CvDTree:
CvDTreeParams:
CvERTrees:
CvGBTrees:
CvGBTreesParams:
CvKNearest:
CvNormalBayesClassifier:
CvParamGrid:
CvRTParams:
CvRTrees:
CvStatModel:
CvSVM:
CvSVMParams:
EM:
Ml:org.opencv.objdetect

對應:
libopencv_objdetect.aCascadeClassifier:
HOGDescriptor:
Objdetect:org.opencv.photo

對應:
libopencv_photo.aPhoto:org.opencv.utilsConverters:org.opencv.video

對應:
libopencv_video.a
libopencv_videostab.aBackgroundSubtractor:
BackgroundSubtractorMOG:
BackgroundSubtractorMOG2:
KalmanFilter:
Video:

其他模塊:

libopencv_flann.a     //特征匹配算法實現 
libopencv_legacy.a
libopencv_stitching.a  //圖像拼接
libopencv_superres.a
libopencv_ts.a

OpenCV.mk

#編譯前 需定義 NDK_USE_CYGPATH=1
USER_LOCAL_PATH:=$(LOCAL_PATH)	#路徑


USER_LOCAL_C_INCLUDES:=$(LOCAL_C_INCLUDES)	#定義變量
USER_LOCAL_CFLAGS:=$(LOCAL_CFLAGS)
USER_LOCAL_STATIC_LIBRARIES:=$(LOCAL_STATIC_LIBRARIES)
USER_LOCAL_SHARED_LIBRARIES:=$(LOCAL_SHARED_LIBRARIES)
USER_LOCAL_LDLIBS:=$(LOCAL_LDLIBS)


LOCAL_PATH:=$(subst ?,,$(firstword ?$(subst \, ,$(subst /, ,$(call my-dir))))) #本地路徑


OPENCV_TARGET_ARCH_ABI:=$(TARGET_ARCH_ABI)	#目標版本
OPENCV_THIS_DIR:=$(patsubst $(LOCAL_PATH)\\%,%,$(patsubst $(LOCAL_PATH)/%,%,$(call my-dir)))	#本目錄
OPENCV_MK_DIR:=$(dir $(lastword $(MAKEFILE_LIST)))	#所有的mk目錄
OPENCV_LIBS_DIR:=$(OPENCV_THIS_DIR)/libs/opencv/$(OPENCV_TARGET_ARCH_ABI)	#對應庫目錄
OPENCV_3RDPARTY_LIBS_DIR:=$(OPENCV_THIS_DIR)/libs/3rdparty/$(OPENCV_TARGET_ARCH_ABI)	#3d庫目錄
OPENCV_BASEDIR:=		#基目錄
OPENCV_LOCAL_C_INCLUDES:="$(LOCAL_PATH)/$(OPENCV_THIS_DIR)/include/opencv" "$(LOCAL_PATH)/$(OPENCV_THIS_DIR)/include" #本地包含目錄
OPENCV_MODULES:=contrib legacy stitching superres ocl objdetect ml ts videostab video photo calib3d features2d highgui imgproc flann androidcamera core #對應的庫文件
OPENCV_LIB_TYPE:=STATIC	#靜態庫


OPENCV_HAVE_GPU_MODULE:=off  #是否具備gpu
OPENCV_USE_GPU_MODULE:=on	#是否開啟


ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)  #與v7a相等
    ifeq ($(OPENCV_HAVE_GPU_MODULE),on)	#開啟gpu
        ifneq ($(CUDA_TOOLKIT_DIR),)
            OPENCV_USE_GPU_MODULE:=on
        endif
    endif
    OPENCV_DYNAMICUDA_MODULE:=
else
    OPENCV_DYNAMICUDA_MODULE:=
endif


CUDA_RUNTIME_LIBS:=


ifeq ($(OPENCV_LIB_TYPE),)	#未表明使用庫類型則使用動態
    OPENCV_LIB_TYPE:=SHARED
endif


ifeq ($(OPENCV_LIB_TYPE),SHARED)
    OPENCV_LIBS:=java
    OPENCV_LIB_TYPE:=SHARED
else
    OPENCV_LIBS:=$(OPENCV_MODULES)
    OPENCV_LIB_TYPE:=STATIC
endif


ifeq ($(OPENCV_LIB_TYPE),SHARED) #引入3D庫
    OPENCV_3RDPARTY_COMPONENTS:=
    OPENCV_EXTRA_COMPONENTS:=
else
    ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
        OPENCV_3RDPARTY_COMPONENTS:=tbb libjpeg libpng libtiff libjasper IlmImf
        OPENCV_EXTRA_COMPONENTS:=c log m dl z
    endif
    ifeq ($(TARGET_ARCH_ABI),x86)
        OPENCV_3RDPARTY_COMPONENTS:=tbb libjpeg libpng libtiff libjasper IlmImf
        OPENCV_EXTRA_COMPONENTS:=c log m dl z
    endif
    ifeq ($(TARGET_ARCH_ABI),armeabi)
        OPENCV_3RDPARTY_COMPONENTS:= libjpeg libpng libtiff libjasper IlmImf
        OPENCV_EXTRA_COMPONENTS:=c log m dl z
    endif
    ifeq ($(TARGET_ARCH_ABI),mips)
        OPENCV_3RDPARTY_COMPONENTS:=tbb libjpeg libpng libtiff libjasper IlmImf
        OPENCV_EXTRA_COMPONENTS:=c log m dl z
    endif
endif


ifeq ($(OPENCV_CAMERA_MODULES),on) #引入相機庫(只有動態庫)
    ifeq ($(TARGET_ARCH_ABI),armeabi)
        OPENCV_CAMERA_MODULES:= native_camera_r2.2.0 native_camera_r2.3.3 native_camera_r3.0.1 native_camera_r4.0.0 native_camera_r4.0.3 native_camera_r4.1.1 native_camera_r4.2.0 native_camera_r4.3.0 native_camera_r4.4.0
    endif
    ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
        OPENCV_CAMERA_MODULES:= native_camera_r2.2.0 native_camera_r2.3.3 native_camera_r3.0.1 native_camera_r4.0.0 native_camera_r4.0.3 native_camera_r4.1.1 native_camera_r4.2.0 native_camera_r4.3.0 native_camera_r4.4.0
    endif
    ifeq ($(TARGET_ARCH_ABI),x86)
        OPENCV_CAMERA_MODULES:= native_camera_r2.3.3 native_camera_r3.0.1 native_camera_r4.0.3 native_camera_r4.1.1 native_camera_r4.2.0 native_camera_r4.3.0 native_camera_r4.4.0
    endif
    ifeq ($(TARGET_ARCH_ABI),mips)
        OPENCV_CAMERA_MODULES:= native_camera_r4.0.3 native_camera_r4.1.1 native_camera_r4.2.0 native_camera_r4.3.0 native_camera_r4.4.0
    endif
else
    OPENCV_CAMERA_MODULES:=
endif


ifeq ($(OPENCV_LIB_TYPE),SHARED)
    OPENCV_LIB_SUFFIX:=so
else
    OPENCV_LIB_SUFFIX:=a
    OPENCV_INSTALL_MODULES:=on
endif


define add_opencv_module #添加opencv庫
    include $(CLEAR_VARS)
    LOCAL_MODULE:=opencv_$1
    LOCAL_SRC_FILES:=$(OPENCV_LIBS_DIR)/libopencv_$1.$(OPENCV_LIB_SUFFIX)
    include $(PREBUILT_$(OPENCV_LIB_TYPE)_LIBRARY)
endef


define add_cuda_module	#添加cuda庫
    include $(CLEAR_VARS)
    LOCAL_MODULE:=$1
    LOCAL_SRC_FILES:=$(CUDA_TOOLKIT_DIR)/targets/armv7-linux-androideabi/lib/lib$1.so
    include $(PREBUILT_SHARED_LIBRARY)
endef


define add_opencv_3rdparty_component #添加3D庫
    include $(CLEAR_VARS)
    LOCAL_MODULE:=$1
    LOCAL_SRC_FILES:=$(OPENCV_3RDPARTY_LIBS_DIR)/lib$1.a
    include $(PREBUILT_STATIC_LIBRARY)
endef


define add_opencv_camera_module #添加相機庫
    include $(CLEAR_VARS)
    LOCAL_MODULE:=$1
    LOCAL_SRC_FILES:=$(OPENCV_LIBS_DIR)/lib$1.so
    include $(PREBUILT_SHARED_LIBRARY)
endef


ifeq ($(OPENCV_MK_$(OPENCV_TARGET_ARCH_ABI)_ALREADY_INCLUDED),)  #未包含則添加
    ifeq ($(OPENCV_INSTALL_MODULES),on)
        $(foreach module,$(OPENCV_LIBS),$(eval $(call add_opencv_module,$(module))))
        ifneq ($(OPENCV_DYNAMICUDA_MODULE),)
            ifeq ($(OPENCV_LIB_TYPE),SHARED)
              $(eval $(call add_opencv_module,$(OPENCV_DYNAMICUDA_MODULE)))
            endif
        endif
    endif


    ifeq ($(OPENCV_USE_GPU_MODULE),on)
        ifeq ($(INSTALL_CUDA_LIBRARIES),on)
            $(foreach module,$(CUDA_RUNTIME_LIBS),$(eval $(call add_cuda_module,$(module))))
        endif
    endif


    $(foreach module,$(OPENCV_3RDPARTY_COMPONENTS),$(eval $(call add_opencv_3rdparty_component,$(module))))
    $(foreach module,$(OPENCV_CAMERA_MODULES),$(eval $(call add_opencv_camera_module,$(module))))


    ifneq ($(OPENCV_BASEDIR),)
        OPENCV_LOCAL_C_INCLUDES += $(foreach mod, $(OPENCV_MODULES), $(OPENCV_BASEDIR)/modules/$(mod)/include)
        ifeq ($(OPENCV_USE_GPU_MODULE),on)
            OPENCV_LOCAL_C_INCLUDES += $(OPENCV_BASEDIR)/modules/gpu/include
        endif
    endif


    #turn off module installation to prevent their redefinition
    OPENCV_MK_$(OPENCV_TARGET_ARCH_ABI)_ALREADY_INCLUDED:=on
endif


ifeq ($(OPENCV_MK_$(OPENCV_TARGET_ARCH_ABI)_GPU_ALREADY_INCLUDED),) #未包含則添加
    ifeq ($(OPENCV_USE_GPU_MODULE),on)
        include $(CLEAR_VARS)
        LOCAL_MODULE:=opencv_gpu
        LOCAL_SRC_FILES:=$(OPENCV_LIBS_DIR)/libopencv_gpu.a
        include $(PREBUILT_STATIC_LIBRARY)
    endif
    OPENCV_MK_$(OPENCV_TARGET_ARCH_ABI)_GPU_ALREADY_INCLUDED:=on
endif


ifeq ($(OPENCV_LOCAL_CFLAGS),)	#標識
    OPENCV_LOCAL_CFLAGS := -fPIC -DANDROID -fsigned-char
endif


include $(CLEAR_VARS)


LOCAL_C_INCLUDES:=$(USER_LOCAL_C_INCLUDES)  #包含路徑
LOCAL_CFLAGS:=$(USER_LOCAL_CFLAGS)	#標識
LOCAL_STATIC_LIBRARIES:=$(USER_LOCAL_STATIC_LIBRARIES)  #靜態庫
LOCAL_SHARED_LIBRARIES:=$(USER_LOCAL_SHARED_LIBRARIES)  #動態庫
LOCAL_LDLIBS:=$(USER_LOCAL_LDLIBS)		#靜態目錄


LOCAL_C_INCLUDES += $(OPENCV_LOCAL_C_INCLUDES)  #追加包含路徑
LOCAL_CFLAGS     += $(OPENCV_LOCAL_CFLAGS)	#追加標識


ifeq ($(OPENCV_USE_GPU_MODULE),on)	#使用gpu則添加cuda路徑
    LOCAL_C_INCLUDES += $(CUDA_TOOLKIT_DIR)/include
endif


ifeq ($(OPENCV_INSTALL_MODULES),on) #
    LOCAL_$(OPENCV_LIB_TYPE)_LIBRARIES += $(foreach mod, $(OPENCV_LIBS), opencv_$(mod))
    ifeq ($(OPENCV_LIB_TYPE),SHARED)
        ifneq ($(OPENCV_DYNAMICUDA_MODULE),)
            LOCAL_$(OPENCV_LIB_TYPE)_LIBRARIES += $(OPENCV_DYNAMICUDA_MODULE)
        endif
    endif
else
    LOCAL_LDLIBS += -L$(call host-path,$(LOCAL_PATH)/$(OPENCV_LIBS_DIR)) $(foreach lib, $(OPENCV_LIBS), -lopencv_$(lib)) #追加目錄
endif


ifeq ($(OPENCV_LIB_TYPE),STATIC) #靜態 追加3D庫
    LOCAL_STATIC_LIBRARIES += $(OPENCV_3RDPARTY_COMPONENTS)
endif


LOCAL_LDLIBS += $(foreach lib,$(OPENCV_EXTRA_COMPONENTS), -l$(lib)) #靜態目錄


ifeq ($(OPENCV_USE_GPU_MODULE),on)
    ifeq ($(INSTALL_CUDA_LIBRARIES),on)
        LOCAL_SHARED_LIBRARIES += $(foreach mod, $(CUDA_RUNTIME_LIBS), $(mod))
    else
        LOCAL_LDLIBS += -L$(CUDA_TOOLKIT_DIR)/targets/armv7-linux-androideabi/lib $(foreach lib, $(CUDA_RUNTIME_LIBS), -l$(lib))
    endif
    LOCAL_STATIC_LIBRARIES+=libopencv_gpu
endif


LOCAL_PATH:=$(USER_LOCAL_PATH) #恢復原路徑



結束

OpenCV在Android上的應用,使圖片、相機操作變得簡單了,接下來掌握OpenCV直接使用技巧。

  1. 上一頁:
  2. 下一頁:
熱門文章
閱讀排行版
Copyright © Android教程網 All Rights Reserved