栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 移动开发 > Android

Android使用MediaCodec将摄像头采集的视频编码为h264

Android 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

Android使用MediaCodec将摄像头采集的视频编码为h264

本文实例为大家分享了Android使用MediaCodec将摄像头采集的视频编码为h264,供大家参考,具体内容如下

MainActivity.java

import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import java.io.IOException;
import java.util.concurrent.ArrayBlockingQueue;

public class MainActivity extends Activity implements SurfaceHolder.Callback,PreviewCallback{

  private SurfaceView surfaceview;

  private SurfaceHolder surfaceHolder;

  private Camera camera;

  private Parameters parameters;

  int width = 1280;

  int height = 720;

  int framerate = 30;

  int biterate = 8500*1000;

  private static int yuvqueuesize = 10;

  //待解码视频缓冲队列,静态成员!
  public static ArrayBlockingQueue YUVQueue = new ArrayBlockingQueue(yuvqueuesize);

  private AvcEncoder avcCodec;


  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    surfaceview = (SurfaceView)findViewById(R.id.surfaceview);
    surfaceHolder = surfaceview.getHolder();
    surfaceHolder.addCallback(this);
  }


  @Override
  public void surfaceCreated(SurfaceHolder holder) {
    camera = getBackCamera();
    startcamera(camera);
    //创建AvEncoder对象
    avcCodec = new AvcEncoder(width,height,framerate,biterate);
    //启动编码线程
    avcCodec.StartEncoderThread();

  }

  @Override
  public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

  }

  @Override
  public void surfaceDestroyed(SurfaceHolder holder) {
    if (null != camera) {
      camera.setPreviewCallback(null);
      camera.stopPreview();
      camera.release();
      camera = null;
      avcCodec.StopThread();
    }
  }


  @Override
  public void onPreviewframe(byte[] data, android.hardware.Camera camera) {
    //将当前帧图像保存在队列中
    putYUVData(data,data.length);
  }

  public void putYUVData(byte[] buffer, int length) {
    if (YUVQueue.size() >= 10) {
      YUVQueue.poll();
    }
    YUVQueue.add(buffer);
  }


  private void startcamera(Camera mCamera){
    if(mCamera != null){
      try {
 mCamera.setPreviewCallback(this);
 mCamera.setDisplayOrientation(90);
 if(parameters == null){
   parameters = mCamera.getParameters();
 }
 //获取默认的camera配置
 parameters = mCamera.getParameters();
 //设置预览格式
 parameters.setPreviewFormat(ImageFormat.NV21);
 //设置预览图像分辨率
 parameters.setPreviewSize(width, height);
 //配置camera参数
 mCamera.setParameters(parameters);
 //将完全初始化的SurfaceHolder传入到setPreviewDisplay(SurfaceHolder)中
 //没有surface的话,相机不会开启preview预览
 mCamera.setPreviewDisplay(surfaceHolder);
 //调用startPreview()用以更新preview的surface,必须要在拍照之前start Preview
 mCamera.startPreview();

      } catch (IOException e) {
 e.printStackTrace();
      }
    }
  }

  private Camera getBackCamera() {
    Camera c = null;
    try {
      //获取Camera的实例
      c = Camera.open(0);
    } catch (Exception e) {
      e.printStackTrace();
    }
    //获取Camera的实例失败时返回null
    return c;
  }


}

2.AvcEncoder.java

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

import static android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
import static android.media.MediaCodec.BUFFER_FLAG_KEY_frame;


public class AvcEncoder
{
  private final static String TAG = "MeidaCodec";

  private int TIMEOUT_USEC = 12000;

  private MediaCodec mediaCodec;
  int m_width;
  int m_height;
  int m_framerate;

  public byte[] configbyte;


  public AvcEncoder(int width, int height, int framerate, int bitrate) {

    m_width = width;
    m_height = height;
    m_framerate = framerate;
    MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
    mediaFormat.setInteger(MediaFormat.KEY_frame_RATE, 30);
    mediaFormat.setInteger(MediaFormat.KEY_I_frame_INTERVAL, 1);
    try {
      mediaCodec = MediaCodec.createEncoderByType("video/avc");
    } catch (IOException e) {
      e.printStackTrace();
    }
    //配置编码器参数
    mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    //启动编码器
    mediaCodec.start();
    //创建保存编码后数据的文件
    createfile();
  }

  private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
  private BufferedOutputStream outputStream;

  private void createfile(){
    File file = new File(path);
    if(file.exists()){
      file.delete();
    }
    try {
      outputStream = new BufferedOutputStream(new FileOutputStream(file));
    } catch (Exception e){
      e.printStackTrace();
    }
  }

  private void StopEncoder() {
    try {
      mediaCodec.stop();
      mediaCodec.release();
    } catch (Exception e){
      e.printStackTrace();
    }
  }

  public boolean isRuning = false;

  public void StopThread(){
    isRuning = false;
    try {
      StopEncoder();
      outputStream.flush();
      outputStream.close();
    } catch (IOException e) {
      e.printStackTrace();
    }
  }

  int count = 0;

  public void StartEncoderThread(){
    Thread EncoderThread = new Thread(new Runnable() {

      @Override
      public void run() {
 isRuning = true;
 byte[] input = null;
 long pts = 0;
 long generateIndex = 0;

 while (isRuning) {
   //访问MainActivity用来缓冲待解码数据的队列
   if (MainActivity.YUVQueue.size() >0){
     //从缓冲队列中取出一帧
     input = MainActivity.YUVQueue.poll();
     byte[] yuv420sp = new byte[m_width*m_height*3/2];
     //把待编码的视频帧转换为YUV420格式
     NV21ToNV12(input,yuv420sp,m_width,m_height);
     input = yuv420sp;
   }
   if (input != null) {
     try {
long startMs = System.currentTimeMillis();
//编码器输入缓冲区
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
//编码器输出缓冲区
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
  pts = computePresentationTime(generateIndex);
  ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
  inputBuffer.clear();
  //把转换后的YUV420格式的视频帧放到编码器输入缓冲区中
  inputBuffer.put(input);
  mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
  generateIndex += 1;
}

MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
  //Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
  ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
  byte[] outData = new byte[bufferInfo.size];
  outputBuffer.get(outData);
  if(bufferInfo.flags == BUFFER_FLAG_CODEC_CONFIG){
    configbyte = new byte[bufferInfo.size];
    configbyte = outData;
  }else if(bufferInfo.flags == BUFFER_FLAG_KEY_frame){
    byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
    System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
    //把编码后的视频帧从编码器输出缓冲区中拷贝出来
    System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);

    outputStream.write(keyframe, 0, keyframe.length);
  }else{
    //写到文件中
    outputStream.write(outData, 0, outData.length);
  }

  mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
  outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}

     } catch (Throwable t) {
t.printStackTrace();
     }
   } else {
     try {
Thread.sleep(500);
     } catch (InterruptedException e) {
e.printStackTrace();
     }
   }
 }
      }
    });
    EncoderThread.start();

  }

  private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
    if(nv21 == null || nv12 == null)return;
    int framesize = width*height;
    int i = 0,j = 0;
    System.arraycopy(nv21, 0, nv12, 0, framesize);
    for(i = 0; i < framesize; i++){
      nv12[i] = nv21[i];
    }
    for (j = 0; j < framesize/2; j+=2)
    {
      nv12[framesize + j-1] = nv21[j+framesize];
    }
    for (j = 0; j < framesize/2; j+=2)
    {
      nv12[framesize + j] = nv21[j+framesize-1];
    }
  }

  
  private long computePresentationTime(long frameIndex) {
    return 132 + frameIndex * 1000000 / m_framerate;
  }
}

3.activity_main.xml



  


4.添加权限



以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持考高分网。

转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/154741.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号