请教在Android手机上做视频监控的客户端
我们导师让给我的任务是:Android手机上做视频监控的客户端。我们是用H.264压缩视频的,所以得自己做个播放器,这个播放器在塞班系统上已经由学长用C++实现了,当时用的视频解码器是ffmpeg,我现在要把它做在Android上,首先碰到的第一个困惑是,如果我用jffmpeg这个解码器,是不是可以直接把这个库添加到程序里就行了?第二个困惑是,我搜索了网上的资料,有点人是把ffmpeg整合到Android了,要经过乌邦图编译什么的。请大家提点意见,对错无所谓。 --------------------编程问答-------------------- 自己顶一下 --------------------编程问答-------------------- ubuntu + ndk_R5 + ffmpeg+ 一些makefile知识。如果需要传输,研究下流媒体。 --------------------编程问答-------------------- 3Q,最近研究了下H.264压缩,和jni方面的知识 --------------------编程问答-------------------- 1、获得摄像头每幀数据2、将捕获的幀数据调用opencore的jni进行H.264编码
3、将经过编码的camera数据进行rtp包封装,上传 --------------------编程问答--------------------
--------------------编程问答--------------------
public class AndroidVideo extends Activity implements Callback,
Camera.PictureCallback {
private SurfaceView mSurfaceView = null;
private SurfaceHolder mSurfaceHolder = null;
private Camera mCamera = null;
private boolean mPreviewRunning = false;
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFormat(PixelFormat.TRANSLUCENT);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.main);
mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if (mPreviewRunning) {
mCamera.stopPreview();
}
Camera.Parameters p = mCamera.getParameters();
p.setPreviewSize(352, 288);
mCamera.setPreviewCallback(new H264Encoder(352, 288));
mCamera.setParameters(p);
try {
mCamera.setPreviewDisplay(holder);
} catch (Exception ex) {
}
mCamera.startPreview();
mPreviewRunning = true;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mPreviewRunning = false;
mCamera.release();
mCamera = null;
}
}
public void onConfigurationChanged(Configuration newConfig) {
try {
super.onConfigurationChanged(newConfig);
if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {
} else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
}
} catch (Exception ex) {
}
}
}
class H264Encoder implements Camera.PreviewCallback {
long encoder=0;
RandomAccessFile raf=null;
byte[] h264Buff =null;
static {
System.loadLibrary("H264Android");
}
private H264Encoder(){};
public H264Encoder(int width, int height) {
encoder = CompressBegin(width, height);
h264Buff = new byte[width * height *8];
try {
File file = new File("/sdcard/camera.h264");
raf = new RandomAccessFile(file, "rw");
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
};
protected void finalize()
{
CompressEnd(encoder);
if (null != raf)
{
try {
raf.close();
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
}
try {
super.finalize();
} catch (Throwable e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private native long CompressBegin(int width,int height);
private native int CompressBuffer(long encoder, int type,byte[] in, int insize,byte[] out);
private native int CompressEnd(long encoder);
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
int result=CompressBuffer(encoder, -1, data, data.length,h264Buff);
try {
if (result>0)
raf.write(h264Buff, 0, result);
} catch (Exception ex) {
Log.v("System.out", ex.toString());
}
}
}
jlong Java_com_H264Encoder_CompressBegin(JNIEnv* env, jobject thiz,
jint width, jint height) {
Encoder * en = (Encoder *) malloc(sizeof(Encoder));
en->param = (x264_param_t *) malloc(sizeof(x264_param_t));
en->picture = (x264_param_t *) malloc(sizeof(x264_picture_t));
x264_param_default(en->param); //set default param
x264_param_apply_profile(en->param,"baseline");
en->param->i_log_level = X264_LOG_NONE;
en->param->i_width = width; //set frame width
en->param->i_height = height; //set frame height
en->param->rc.i_lookahead =0;
en->param->i_fps_num =5;
en->param->i_fps_den = 1;
if ((en->handle = x264_encoder_open(en->param)) == 0) {
return 0;
}
/* Create a new pic */
x264_picture_alloc(en->picture, X264_CSP_I420, en->param->i_width,
en->param->i_height);
return (jlong) en;
}
jint Java_H264Encoder_CompressEnd(JNIEnv* env, jobject thiz,jlong handle)
{
Encoder * en = (Encoder *) handle;
if(en->picture)
{
x264_picture_clean(en->picture);
free(en->picture);
en->picture = 0;
}
if(en->param)
{
free(en->param);
en->param=0;
}
if(en->handle)
{
x264_encoder_close(en->handle);
}
free(en);
return 0;
}
jint Java_H264Encoder_CompressBuffer(JNIEnv* env, jobject thiz,jlong handle,jint type,jbyteArray in, jint insize,jbyteArray out)
{
Encoder * en = (Encoder *) handle;
x264_picture_t pic_out;
int i_data=0;
int nNal=-1;
int result=0;
int i=0,j=0;
int nPix=0;
jbyte * Buf = (jbyte*)(*env)->GetByteArrayElements(env, in, 0);
jbyte * h264Buf = (jbyte*)(*env)->GetByteArrayElements(env, out, 0);
unsigned char * pTmpOut = h264Buf;
int nPicSize=en->param->i_width*en->param->i_height;
YYYY
YYYY
UVUV
*/
jbyte * y=en->picture->img.plane[0];
jbyte * v=en->picture->img.plane[1];
jbyte * u=en->picture->img.plane[2];
memcpy(en->picture->img.plane[0],Buf,nPicSize);
for (i=0;i<nPicSize/4;i++)
{
*(u+i)=*(Buf+nPicSize+i*2);
*(v+i)=*(Buf+nPicSize+i*2+1);
}
switch (type)
{
case 0:
en->picture->i_type = X264_TYPE_P;
break;
case 1:
en->picture->i_type = X264_TYPE_IDR;
break;
case 2:
en->picture->i_type = X264_TYPE_I;
break;
default:
en->picture->i_type = X264_TYPE_AUTO;
break;
}
if( x264_encoder_encode( en->handle, &(en->nal), &nNal, en->picture ,&pic_out) < 0 )
{
return -1;
}
for (i = 0; i < nNal; i++){
memcpy(pTmpOut, en->nal[i].p_payload, en->nal[i].i_payload);
pTmpOut += en->nal[i].i_payload;
result+=en->nal[i].i_payload;
}
return result;
}
希望对你有所帮助。 --------------------编程问答-------------------- 看了下你发的代码,是做h264编码的,看了下还是学到很多东西的,谢了,大哥。不过我们现在所要做的是接收服务器发过来的h264裸数据,在android手机手接收rtp后进行解码播放。 --------------------编程问答--------------------
Android是支持h264的解码的,关于rtp服务:2.3以前的版本都是使用opencore不支持rtp服务,2.3以后才支持。 --------------------编程问答-------------------- 顶6楼 不过也可以用MediaRecorder来硬编码 可以参考LiveCast~ 去破解下看下代码 --------------------编程问答-------------------- --------------------编程问答-------------------- 呵呵,这个不难,你参考一下网络上的代码,吧ffmpeg一直到ANDROID上,然后编码发送,在客户端接收解包解码显示播放就可以了 --------------------编程问答-------------------- 嗯,找了个网上的代码 --------------------编程问答--------------------
opencore支持H264的,stagefright才不支持的 --------------------编程问答-------------------- 楼主 你解决了吗?能给我一份吗??非常感谢啊 zxd_java@163.com --------------------编程问答-------------------- 除 --------------------编程问答-------------------- 楼主 你好 看到你说你找到源代码了,能分享一下吗 我的毕设和你的差不多 谢了 --------------------编程问答-------------------- 对了 如果有的话发我邮箱里一份 非常感谢316182103@qq.com --------------------编程问答-------------------- 我有类似的课题 ,我是使用VLC来实现的,可以交流下,你有空的话发我邮箱一份,非常感谢。272234562@qq.com
补充:移动开发 , Android