Android教程網
  1. 首頁
  2. Android 技術
  3. Android 手機
  4. Android 系統教程
  5. Android 游戲
 Android教程網 >> Android技術 >> 關於Android編程 >> android4.0 USB Camera實例(三)UVC

android4.0 USB Camera實例(三)UVC

編輯:關於Android編程

前面我寫了兩篇文章說明了zc301的實現 具體請看 http://blog.csdn.net/hclydao/article/details/21235919

下面順便把通用的USB也寫上 前面的ZC301獲取到的格式是JPEG的所以不用進行什麼處理就可以用來顯示,但是標准的UVC獲取到的是YUV422格式的需要轉換成RGB後才能進行顯示 這裡我們在前面兩篇的代碼稍作修改就可以了

HAL層 代碼如下

/*
 * Android USB Camera zc3xx Library
 *
 * Copyright (c) 2014  Store information technology guangzhou ltd
 * Copyright (c) 2014  hclydao 
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License.
 */
#include 
#include 	
#include 	
#include 
#include 
#include     
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 

#define  LOG_TAG    "FimcGzsd"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG  , LOG_TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO   , LOG_TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN   , LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR  , LOG_TAG, __VA_ARGS__)

struct fimc_buffer {
    unsigned char *start;
    size_t  length;
};

static int fd = -1;
struct fimc_buffer *buffers=NULL;
struct v4l2_buffer v4l2_buf;
static int bufnum = 1;
static int mwidth,mheight;
static int c_type = 0;

int *rgb = NULL;
int *ybuf = NULL;

int yuv_tbl_ready=0;
int y1192_tbl[256];
int v1634_tbl[256];
int v833_tbl[256];
int u400_tbl[256];
int u2066_tbl[256];
/*
 *open usb camera device
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_open(JNIEnv * env, jclass obj, const jbyteArray devname)
{
	jbyte *dev = (jbyte*)(*env)->GetByteArrayElements(env, devname, 0);
	fd = open(dev, O_RDWR, 0);  
	if (fd<0)
	{
		LOGE("%s ++++ open error\n",dev);
		return  -1;
	}
	(*env)->ReleaseByteArrayElements(env, devname, dev, 0);
	return fd;
}
/*
 * init device
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_init(JNIEnv * env, jclass obj, jint width, jint height,jint numbuf,jint ctype)
{
	int ret;
	int i;
	bufnum = numbuf;
	mwidth = width;
	mheight = height;
	c_type = ctype;
	struct v4l2_format fmt;	
	struct v4l2_capability cap;

    ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
    if (ret < 0) {
        LOGE("%d :VIDIOC_QUERYCAP failed\n",__LINE__);
        return -1;
    }
    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
        LOGE("%d : no capture devices\n",__LINE__);
        return -1;
    }
				
	memset( &fmt, 0, sizeof(fmt));
	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if(c_type == 1)
		fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
	else
		fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565;
	fmt.fmt.pix.width = width;
	fmt.fmt.pix.height = height;					
	if (ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
	{
		LOGE("++++%d : set format failed\n",__LINE__);
		return -1;
	}

    struct v4l2_requestbuffers req;
    req.count = numbuf;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    ret = ioctl(fd, VIDIOC_REQBUFS, &req);
    if (ret < 0) {
        LOGE("++++%d : VIDIOC_REQBUFS failed\n",__LINE__);
        return -1;
    }

    buffers = calloc(req.count, sizeof(*buffers));
    if (!buffers) {
        LOGE ("++++%d Out of memory\n",__LINE__);
		return -1;
    }

	for(i = 0; i< bufnum; ++i) {
		memset(&v4l2_buf, 0, sizeof(v4l2_buf));
		v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		v4l2_buf.memory = V4L2_MEMORY_MMAP;
		v4l2_buf.index = i;
		ret = ioctl(fd , VIDIOC_QUERYBUF, &v4l2_buf);
		if(ret < 0) {
		   LOGE("+++%d : VIDIOC_QUERYBUF failed\n",__LINE__);
		   return -1;
		}
		buffers[i].length = v4l2_buf.length;
		if ((buffers[i].start = (char *)mmap(0, v4l2_buf.length,
		                                     PROT_READ | PROT_WRITE, MAP_SHARED,
		                                     fd, v4l2_buf.m.offset)) < 0) {
		     LOGE("%d : mmap() failed",__LINE__);
		     return -1;
		}
	}
	rgb = (int *)malloc(sizeof(int) * (mwidth*mheight));
	ybuf = (int *)malloc(sizeof(int) * (mwidth*mheight));
	return 0;
}
/*
 *open usb camera device
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_streamon(JNIEnv * env, jclass obj)
{
	int i;
	int ret;
	enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	for(i = 0; i< bufnum; ++i) {
		memset(&v4l2_buf, 0, sizeof(v4l2_buf));
		v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		v4l2_buf.memory = V4L2_MEMORY_MMAP;
		v4l2_buf.index = i;
		ret = ioctl(fd, VIDIOC_QBUF, &v4l2_buf);
		if (ret < 0) {
		    LOGE("%d : VIDIOC_QBUF failed\n",__LINE__);
		    return ret;
		}
	}
    ret = ioctl(fd, VIDIOC_STREAMON, &type);
    if (ret < 0) {
        LOGE("%d : VIDIOC_STREAMON failed\n",__LINE__);
        return ret;
    }
	return 0;
}

void yuyv422toABGRY(unsigned char *src)
{

	int width=0;
	int height=0;

	width = mwidth;
	height = mheight;

	int frameSize =width*height*2;

	int i;

	if((!rgb || !ybuf)){
		return;
	}
	int *lrgb = NULL;
	int *lybuf = NULL;
		
	lrgb = &rgb[0];
	lybuf = &ybuf[0];

	if(yuv_tbl_ready==0){
		for(i=0 ; i<256 ; i++){
			y1192_tbl[i] = 1192*(i-16);
			if(y1192_tbl[i]<0){
				y1192_tbl[i]=0;
			}

			v1634_tbl[i] = 1634*(i-128);
			v833_tbl[i] = 833*(i-128);
			u400_tbl[i] = 400*(i-128);
			u2066_tbl[i] = 2066*(i-128);
		}
		yuv_tbl_ready=1;
	}

	for(i=0 ; i<frameSize ; i+=4){
		unsigned char y1, y2, u, v;
		y1 = src[i];
		u = src[i+1];
		y2 = src[i+2];
		v = src[i+3];

		int y1192_1=y1192_tbl[y1];
		int r1 = (y1192_1 + v1634_tbl[v])>>10;
		int g1 = (y1192_1 - v833_tbl[v] - u400_tbl[u])>>10;
		int b1 = (y1192_1 + u2066_tbl[u])>>10;

		int y1192_2=y1192_tbl[y2];
		int r2 = (y1192_2 + v1634_tbl[v])>>10;
		int g2 = (y1192_2 - v833_tbl[v] - u400_tbl[u])>>10;
		int b2 = (y1192_2 + u2066_tbl[u])>>10;

		r1 = r1>255 ? 255 : r1<0 ? 0 : r1;
		g1 = g1>255 ? 255 : g1<0 ? 0 : g1;
		b1 = b1>255 ? 255 : b1<0 ? 0 : b1;
		r2 = r2>255 ? 255 : r2<0 ? 0 : r2;
		g2 = g2>255 ? 255 : g2<0 ? 0 : g2;
		b2 = b2>255 ? 255 : b2<0 ? 0 : b2;

		*lrgb++ = 0xff000000 | b1<<16 | g1<<8 | r1;
		*lrgb++ = 0xff000000 | b2<<16 | g2<<8 | r2;

		if(lybuf!=NULL){
			*lybuf++ = y1;
			*lybuf++ = y2;
		}
	}

}

void 
Java_com_dao_usbcam_Fimcgzsd_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap){

	jboolean bo;


	AndroidBitmapInfo  info;
	void*              pixels;
	int                ret;
	int i;
	int *colors;

	int width=0;
	int height=0;

	if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
		LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
		return;
	}
    
	width = info.width;
	height = info.height;

	if(!rgb || !ybuf) return;

	if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
		LOGE("Bitmap format is not RGBA_8888 !");
		return;
	}

	if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
		LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
	}

	colors = (int*)pixels;
	int *lrgb =NULL;
	lrgb = &rgb[0];

	for(i=0 ; i10;
		int g1 = (y1192_1 - v833_tbl[v] - u400_tbl[u])>>10;
		int b1 = (y1192_1 + u2066_tbl[u])>>10;

		int y1192_2=y1192_tbl[y2];
		int r2 = (y1192_2 + v1634_tbl[v])>>10;
		int g2 = (y1192_2 - v833_tbl[v] - u400_tbl[u])>>10;
		int b2 = (y1192_2 + u2066_tbl[u])>>10;

		r1 = r1>255 ? 255 : r1<0 ? 0 : r1;
		g1 = g1>255 ? 255 : g1<0 ? 0 : g1;
		b1 = b1>255 ? 255 : b1<0 ? 0 : b1;
		r2 = r2>255 ? 255 : r2<0 ? 0 : r2;
		g2 = g2>255 ? 255 : g2<0 ? 0 : g2;
		b2 = b2>255 ? 255 : b2<0 ? 0 : b2;

		*lrgb++ = 0xff000000 | b1<<16 | g1<<8 | r1;
		*lrgb++ = 0xff000000 | b2<<16 | g2<<8 | r2;

		if(lybuf!=NULL){
			*lybuf++ = y1;
			*lybuf++ = y2;
		}
	}

}

JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_yuvtorgb(JNIEnv * env, jclass obj,const jbyteArray yuvdata, jintArray rgbdata)
{
	jbyte *ydata = (jbyte*)(*env)->GetByteArrayElements(env, yuvdata, 0);
	jint *rdata = (jint*)(*env)->GetIntArrayElements(env, rgbdata, 0);
	yuyv422torgb(ydata,rdata);
	(*env)->ReleaseByteArrayElements(env, yuvdata, ydata, 0);
	(*env)->ReleaseIntArrayElements(env, rgbdata, rdata, 0);
	return 0;
}
/*
 *get one frame data
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_dqbuf(JNIEnv * env, jclass obj,const jbyteArray videodata)
{
    int ret;

	jbyte *data = (jbyte*)(*env)->GetByteArrayElements(env, videodata, 0);
    v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    v4l2_buf.memory = V4L2_MEMORY_MMAP;

    ret = ioctl(fd, VIDIOC_DQBUF, &v4l2_buf);
    if (ret < 0) {
        LOGE("%s : VIDIOC_DQBUF failed, dropped frame\n",__func__);
        return ret;
    }
	if(c_type == 1)
		yuyv422toABGRY((unsigned char *)buffers[v4l2_buf.index].start);
	memcpy(data,buffers[v4l2_buf.index].start,buffers[v4l2_buf.index].length);
	(*env)->ReleaseByteArrayElements(env, videodata, data, 0);
	return v4l2_buf.index;
}
/*
 *put in frame buffer to queue
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_qbuf(JNIEnv * env, jclass obj,jint index)
{
    int ret;

    v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    v4l2_buf.memory = V4L2_MEMORY_MMAP;
    v4l2_buf.index = index;

    ret = ioctl(fd, VIDIOC_QBUF, &v4l2_buf);
    if (ret < 0) {
        LOGE("%s : VIDIOC_QBUF failed\n",__func__);
        return ret;
    }

    return 0;
}
/*
 *streamoff
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_streamoff(JNIEnv * env, jclass obj,jint index)
{
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    int ret;

    ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
    if (ret < 0) {
        LOGE("%s : VIDIOC_STREAMOFF failed\n",__func__);
        return ret;
    }

    return 0;
}
/*
 *release
 */
JNIEXPORT jint JNICALL Java_com_dao_usbcam_Fimcgzsd_release(JNIEnv * env, jclass obj)
{
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    int ret;
	int i;

    ret = ioctl(fd, VIDIOC_STREAMOFF, &type);
    if (ret < 0) {
        LOGE("%s : VIDIOC_STREAMOFF failed\n",__func__);
        return ret;
    }

    for (i = 0; i < bufnum; i++) {
       ret = munmap(buffers[i].start, buffers[i].length);
		if (ret < 0) {
		    LOGE("%s : munmap failed\n",__func__);
		    return ret;
    	}
	}
	free (buffers);
	if(rgb) free(rgb);
	if(ybuf) free(ybuf);
	close(fd);
	return 0;
}
主要增加了兩個函數

yuyv422toABGRY 這個是yuv轉rgb

Java_com_dao_usbcam_Fimcgzsd_pixeltobmp 這個是rgb轉bitmap

這兩個函數是我在網上找到例子simplewebcam裡拷出來的

同時Android.mk修改如下

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE    := fimcgzsd
LOCAL_SRC_FILES := fimcgzsd.c
LOCAL_LDLIBS    := -llog -ljnigraphics
include $(BUILD_SHARED_LIBRARY)
增加了一個Application.mk

# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_PLATFORM := android-8
這樣在終端進入工程所有目錄直接執行ndk-build(環境變量已經設置)即可生成動態庫

應用層

增加一個c_type指定是zc301還是uvc 我還是把代碼貼上來吧

package com.dao.usbcam;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;

import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.text.format.Time;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.Toast;

public class MainActivity extends Activity {
	private ImageView mImag;
	private int width = 320;
	private int height = 240;
	private String devname = "/dev/video3";
	private byte[] mdata;
	private Handler mHandler;
	private int numbuf = 0;
	private int index = 0;
	private int ret = 0;
	private int ctype = 1;//0 is zc301 1 is uvc camera
	public Button mcap;
	private Bitmap bitmap;
	private Bitmap bmp;
	private int[] rgb;
	@Override
	protected void onCreate(Bundle savedInstanceState) {
		super.onCreate(savedInstanceState);
		requestWindowFeature(Window.FEATURE_NO_TITLE);
		getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
		  WindowManager.LayoutParams.FLAG_FULLSCREEN);
		setContentView(R.layout.activity_main);
		mImag = (ImageView)findViewById(R.id.mimg);
		mcap = (Button)findViewById(R.id.mcap);
		bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
		numbuf  = 4;
		mdata = new byte[width * height * numbuf];
		rgb = new int[width * height * numbuf];
		ret = Fimcgzsd.open(devname.getBytes());
		if(ret < 0)
			finish();
		ret = Fimcgzsd.init(width, height, numbuf,ctype);
		if(ret < 0)
			finish();
		ret = Fimcgzsd.streamon();
		if(ret < 0)
			finish();
		mHandler = new Handler();
		new StartThread().start();
		mcap.setOnClickListener(new CaptureListener());
	}

	final Runnable mUpdateUI = new Runnable() {

		@Override
		public void run() {
			// TODO Auto-generated method stub
			mImag.setImageBitmap(bitmap);
		    
		}
	};
	
	class StartThread extends Thread {

		@Override
		public void run() {
			// TODO Auto-generated method stub
			//super.run();
			while(true) {
				if(ctype == 1){
					index = Fimcgzsd.dqbuf(mdata);
					if((index < 0) || (mdata == null)) {
						onDestroy();
						break;
					}
					Fimcgzsd.pixeltobmp(bmp);
					mHandler.post(mUpdateUI);
					bitmap = bmp;
					Fimcgzsd.qbuf(index);
					//Fimcgzsd.yuvtorgb(mdata, rgb);
					//mHandler.post(mUpdateUI);
					//bitmap = Bitmap.createBitmap(rgb,width,height,Bitmap.Config.ARGB_8888);
					//Fimcgzsd.qbuf(index);
				} else {
					index = Fimcgzsd.dqbuf(mdata);
					if(index < 0) {
						onDestroy();
						break;
					}
					mHandler.post(mUpdateUI);
					bitmap = BitmapFactory.decodeByteArray(mdata, 0, width * height);
					Fimcgzsd.qbuf(index);
				}
			}
		}
	}
	
	public static void saveMyBitmap(Bitmap mBitmap) {
    	Time mtime = new Time();
    	mtime.setToNow();
    	File fdir = new File(Environment.getExternalStorageDirectory().getPath() + "/DCIM/" + "/gzsd/");
    	if(!fdir.exists()) {
    		fdir.mkdir();
    	}
        File f = new File(Environment.getExternalStorageDirectory().getPath() + "/DCIM/" + "/gzsd/" + mtime.year + mtime.month + mtime.monthDay + mtime.hour + mtime.minute +mtime.second+".png");
        try {
                f.createNewFile();
        } catch (IOException e) {
        	e.printStackTrace();
        }
        FileOutputStream fOut = null;
        try {
                fOut = new FileOutputStream(f);
                mBitmap.compress(Bitmap.CompressFormat.PNG, 100, fOut);
                fOut.flush();
                fOut.close();
        } catch (FileNotFoundException e) {
        	e.printStackTrace();
        } catch (IOException e) {
        	e.printStackTrace();
        }

    }
	
    class CaptureListener implements OnClickListener{

		@Override
		public void onClick(View v) {
			// TODO Auto-generated method stub
			//new CapThread().start();
			//Fimcgzsd.streamoff();
			saveMyBitmap(bitmap);
			//Fimcgzsd.streamon();
			Toast.makeText(MainActivity.this, "Capture Successfully", Toast.LENGTH_SHORT).show();
		}	
    }
    
	class CapThread extends Thread {

		@Override
		public void run() {
			// TODO Auto-generated method stub
			//super.run();
			saveMyBitmap(bitmap);
			Toast.makeText(MainActivity.this, "Capture Successfully", Toast.LENGTH_LONG).show();
		}
		
	}
	
	@Override
	protected void onStop() {
		// TODO Auto-generated method stub
		super.onStop();
		Fimcgzsd.release();
		finish();
	}

	@Override
	protected void onPause() {
		// TODO Auto-generated method stub
		super.onPause();
		Fimcgzsd.release();
		finish();
	}

	@Override
	protected void onDestroy() {
		// TODO Auto-generated method stub
		super.onDestroy();
		Fimcgzsd.release();
	}

	@Override
	public boolean onCreateOptionsMenu(Menu menu) {
		// Inflate the menu; this adds items to the action bar if it is present.
		getMenuInflater().inflate(R.menu.main, menu);
		finish();
		return true;
	}

}
原理什麼的我前面已經簡單的進行了說明這裡 我就不說了 具體效果如下

感覺有點 模糊 我發現拿來自拍 效果還不錯 模糊產生美 哈哈..




  1. 上一頁:
  2. 下一頁:
熱門文章
閱讀排行版
Copyright © Android教程網 All Rights Reserved