您当前的位置: 首页 > 慢生活 > 程序人生 网站首页程序人生
安卓camera2使用ffmpeg進行對rtmp服務器推流的實現
发布时间:2022-04-08 21:59:35编辑:雪饮阅读()
權限
上次解決camera2使用ImageReader預覽時出現卡頓情況。那麽這次就繼續向下走,接著是實現推流。
首先既然使用的是推流,那麽肯定是要使用網絡權限的。所以咱們的AndroidManifest.xml:
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.android.qs.video_push">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.Video_push">
<activity android:name=".MainActivity" android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.android.qs.video_push">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.Video_push">
<activity android:name=".MainActivity" android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
那麽接下來就是推流的實現。
在之前那個相機打開時我們就先初始化下rtmp服務器與我們安卓設備的連接
public void initCamera2(){
CameraManager mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
//攝像頭權限判斷
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.i("沒有獲取到攝像頭權限","");
return;
}
HandlerThread handlerThread = new HandlerThread("Camera2");
handlerThread.start();
childHandler = new Handler(handlerThread.getLooper());
mainHandler = new Handler(getMainLooper());
//前置攝像頭,自拍用
String mCameraID = "" + CameraCharacteristics.LENS_FACING_BACK;
Log.i("攝像機id",mCameraID);
try{
mCameraManager.openCamera(mCameraID,stateCallback,mainHandler);
//初始化
FFmpegHandler.getInstance().init("rtmp://192.168.43.170:82/app1/name1");
}
catch (CameraAccessException e) {
Log.i("摄像机获取","摄像机获取失败:"+e.getMessage());
e.printStackTrace();
}
}
接著我們在onImageAvailable回調中就可以開始推流了
private void takePreview() {
try {
// 创建预览需要的CaptureRequest.Builder
final CaptureRequest.Builder previewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
// 将SurfaceView的surface作为CaptureRequest.Builder的目标(預覽請求1顯示到界面上的surfaceview)
previewRequestBuilder.addTarget(mSurfaceHolder.getSurface());
//mImageReader = ImageReader.newInstance(1080, 1920, ImageFormat.JPEG,1);這個不報錯,但是有警告`W/ImageReader_JNI: Unable to acquire a buffer item, very likely client tried to acquire more than maxImages buffers`
//mImageReader = ImageReader.newInstance(640, 480, ImageFormat.JPEG,1);這個好像一樣
mImageReader = ImageReader.newInstance(640, 480,ImageFormat.YUV_420_888, 1);
//新增一個surface(隱藏的不顯示)預覽請求2作爲推流用
Surface imageSurface = mImageReader.getSurface();
//預覽請求數據也發送到這個因此的surface中
previewRequestBuilder.addTarget(imageSurface);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() { //可以在这里处理拍照得到的临时照片 例如,写入本地
@Override
public void onImageAvailable(ImageReader reader) {
//消費image,若不消費就會導致图片超过缓存,就会卡住preview的展示。進而導致可能出現CameraDevice.StateCallback()的onError回調中error值為4(好像偶爾還有3的錯誤)
//即ERROR_CAMERA_DEVICE或ERROR_CAMERA_DISABLED的值
Image image = reader.acquireLatestImage();
//我们可以将这帧数据转成字节数组,类似于Camera1的PreviewCallback回调的预览帧数据
if (image == null) {
return;
}
final Image.Plane[] planes = image.getPlanes();
//数据有效宽度,一般的,图片width <= rowStride,这也是导致byte[].length <= capacity的原因
// 所以我们只取width部分
int width = image.getWidth();
int height = image.getHeight();
//此处用来装填最终的YUV数据,需要1.5倍的图片大小,因为Y U V 比例为 4:1:1
byte[] yBytes = new byte[width * height];
//目标数组的装填到的位置
int dstIndex = 0;
//临时存储uv数据的
byte uBytes[] = new byte[width * height / 4];
byte vBytes[] = new byte[width * height / 4];
int uIndex = 0;
int vIndex = 0;
int pixelsStride, rowStride;
for (int i = 0; i < planes.length; i++) {
pixelsStride = planes[i].getPixelStride();
rowStride = planes[i].getRowStride();
ByteBuffer buffer = planes[i].getBuffer();
//如果pixelsStride==2,一般的Y的buffer长度=640*480,UV的长度=640*480/2-1
//源数据的索引,y的数据是byte中连续的,u的数据是v向左移以为生成的,两者都是偶数位为有效数据
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
int srcIndex = 0;
if (i == 0) {
//直接取出来所有Y的有效区域,也可以存储成一个临时的bytes,到下一步再copy
for (int j = 0; j < height; j++) {
System.arraycopy(bytes, srcIndex, yBytes, dstIndex, width);
srcIndex += rowStride;
dstIndex += width;
}
} else if (i == 1) {
//根据pixelsStride取相应的数据
for (int j = 0; j < height / 2; j++) {
for (int k = 0; k < width / 2; k++) {
uBytes[uIndex++] = bytes[srcIndex];
srcIndex += pixelsStride;
}
if (pixelsStride == 2) {
srcIndex += rowStride - width;
} else if (pixelsStride == 1) {
srcIndex += rowStride - width / 2;
}
}
} else if (i == 2) {
//根据pixelsStride取相应的数据
for (int j = 0; j < height / 2; j++) {
for (int k = 0; k < width / 2; k++) {
vBytes[vIndex++] = bytes[srcIndex];
srcIndex += pixelsStride;
}
if (pixelsStride == 2) {
srcIndex += rowStride - width;
} else if (pixelsStride == 1) {
srcIndex += rowStride - width / 2;
}
}
}
}
FFmpegHandler.getInstance().pushCameraData(yBytes, yBytes.length, uBytes, uBytes.length, vBytes, vBytes.length);
image.close();
}
}, mainHandler);
// 创建CameraCaptureSession,该对象负责管理处理预览请求和拍照请求(在api30(安卓11)時候會過時,api28時候新增的(安卓9))
mCameraDevice.createCaptureSession(Arrays.asList(mSurfaceHolder.getSurface(), mImageReader.getSurface()), new CameraCaptureSession.StateCallback()
{
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
if (null == mCameraDevice) {
return;
}
// 当摄像头已经准备好时,开始显示预览
mCameraCaptureSession = cameraCaptureSession;
try {
// 自动对焦
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// 打开闪光灯
previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// 显示预览
CaptureRequest previewRequest = previewRequestBuilder.build();
mCameraCaptureSession.setRepeatingRequest(previewRequest, null, childHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Toast.makeText(MainActivity.this, "配置失败", Toast.LENGTH_SHORT).show();
}
}, childHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
推流類的實現:
FFmpegHandler.java:
package com.android.qs.video_push;
/**
* Created by zhaozilong on 2022/3/30.
*/
public class FFmpegHandler {
private FFmpegHandler() {
}
private static class SingletonInstance {
private static final FFmpegHandler INSTANCE = new FFmpegHandler();
}
public static FFmpegHandler getInstance() {
return SingletonInstance.INSTANCE;
}
static {
System.loadLibrary("ffmpeg-handler");
}
//初始化参数
public native int init(String outUrl);
//推流,将Y、U、V数据分开传递
public native int pushCameraData(byte[] buffer,int ylen,byte[] ubuffer,int ulen,byte[] vbuffer,int vlen);
//结束
public native int close();
}
c端實現:
ffmpeg-handler.c:
#include <jni.h>
#include<android/log.h>
#define LOG_TAG "ffmpeg-c"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libswscale/swscale.h>
#include "libavutil/time.h"
#include "libavutil/imgutils.h"
AVFormatContext *ofmt_ctx;
AVStream *video_st;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVPacket enc_pkt;
AVFrame *pFrameYUV;
int count = 0;
int yuv_width;
int yuv_height;
int y_length;
int uv_length;
int width = 640;
int height = 480;
int fps = 15;
/*
* Class: com_david_camerapush_ffmpeg_FFmpegHandler
* Method: init 初始化ffmpeg相关,准备推送
* Signature: (Ljava/lang/String;)I rmtp服务地址
*/
JNIEXPORT jint JNICALL Java_com_android_qs_video_1push_FFmpegHandler_init
(JNIEnv *jniEnv, jobject instance, jstring url) {
const char *out_url = (*jniEnv)->GetStringUTFChars(jniEnv, url, 0);
//计算yuv数据的长度
yuv_width = width;
yuv_height = height;
y_length = width * height;
uv_length = width * height / 4;
//output initialize
int ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_url);
if (ret < 0) {
LOGE("avformat_alloc_output_context2 error");
}
LOGE("正在初始化53");
//output encoder initialize
pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pCodec) {
LOGE("Can not find encoder!\n");
return -1;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
//编码器的ID号,这里为264编码器,可以根据video_st里的codecID 参数赋值
pCodecCtx->codec_id = pCodec->id;
//像素的格式,也就是说采用什么样的色彩空间来表明一个像素点
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
//编码器编码的数据类型
pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
//编码目标的视频帧大小,以像素为单位
pCodecCtx->width = width;
pCodecCtx->height = height;
pCodecCtx->framerate = (AVRational) {15, 1};
//帧率的基本单位,我们用分数来表示,
pCodecCtx->time_base = (AVRational) {1, 15};
//目标的码率,即采样的码率;显然,采样码率越大,视频大小越大
pCodecCtx->bit_rate = 400000;
pCodecCtx->gop_size = 50;
/* Some formats want stream headers to be separate. */
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
//H264 codec param
// pCodecCtx->me_range = 16;
//pCodecCtx->max_qdiff = 4;
pCodecCtx->qcompress = 0.6;
//最大和最小量化系数
pCodecCtx->qmin = 10;
pCodecCtx->qmax = 51;
//Optional Param
//两个非B帧之间允许出现多少个B帧数
//设置0表示不使用B帧,b 帧越多,图片越小
pCodecCtx->max_b_frames = 0;
AVDictionary *param = 0;
//H.264
if (pCodecCtx->codec_id == AV_CODEC_ID_H264) {
av_dict_set(¶m, "preset", "superfast", 0); //x264编码速度的选项
av_dict_set(¶m, "tune", "zerolatency", 0);
}
if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0) {
LOGE("Failed to open encoder!\n");
return -1;
}
LOGE("正在初始化106");
//Add a new stream to output,should be called by the user before avformat_write_header() for muxing
video_st = avformat_new_stream(ofmt_ctx, pCodec);
if (video_st == NULL) {
return -1;
}
video_st->time_base = (AVRational) {25, 1};
video_st->codecpar->codec_tag = 0;
avcodec_parameters_from_context(video_st->codecpar, pCodecCtx);
LOGE("正在初始化115");
int err = avio_open(&ofmt_ctx->pb, out_url, AVIO_FLAG_READ_WRITE);
if (err < 0) {
//這裏若小於0,則有可能是<uses-permission android:name="android.permission.INTERNET"/>這個權限沒有添加
LOGE("Failed to open output err:%d",err);
LOGE("Failed to open output:%s", av_err2str(err));
return -1;
}
LOGE("正在初始化121");
//Write File Header
avformat_write_header(ofmt_ctx, NULL);
av_init_packet(&enc_pkt);
LOGE("正在初始化125 完成");
return 0;
}
JNIEXPORT jint JNICALL Java_com_android_qs_video_1push_FFmpegHandler_pushCameraData
(JNIEnv *jniEnv, jobject instance, jbyteArray yArray, jint yLen, jbyteArray uArray, jint uLen,
jbyteArray vArray, jint vLen) {
jbyte *yin = (*jniEnv)->GetByteArrayElements(jniEnv, yArray, NULL);
jbyte *uin = (*jniEnv)->GetByteArrayElements(jniEnv, uArray, NULL);
jbyte *vin = (*jniEnv)->GetByteArrayElements(jniEnv, vArray, NULL);
LOGE("進來了!\n");
int ret = 0;
pFrameYUV = av_frame_alloc();
LOGE("進來了138!\n");
// LOGE("進來了 oodecCtx->width: %d", pCodecCtx->width);
int picture_size = av_image_get_buffer_size(pCodecCtx->pix_fmt, pCodecCtx->width,pCodecCtx->height, 1);
LOGE("進來了139!\n");
uint8_t *buffers = (uint8_t *) av_malloc(picture_size);
LOGE("進來了142!\n");
//将buffers的地址赋给AVFrame中的图像数据,根据像素格式判断有几个数据指针
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffers, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, 1);
memcpy(pFrameYUV->data[0], yin, (size_t) yLen); //Y
memcpy(pFrameYUV->data[1], uin, (size_t) uLen); //U
memcpy(pFrameYUV->data[2], vin, (size_t) vLen); //V
pFrameYUV->pts = count;
pFrameYUV->format = AV_PIX_FMT_YUV420P;
pFrameYUV->width = yuv_width;
pFrameYUV->height = yuv_height;
LOGE("進來了!2\n");
//例如对于H.264来说。1个AVPacket的data通常对应一个NAL
//初始化AVPacket
enc_pkt.data = NULL;
enc_pkt.size = 0;
// __android_log_print(ANDROID_LOG_WARN, "eric", "编码前时间:%lld",
// (long long) ((av_gettime() - startTime) / 1000));
//开始编码YUV数据
ret = avcodec_send_frame(pCodecCtx, pFrameYUV);
if (ret != 0) {
LOGE("avcodec_send_frame error");
return -1;
}
//获取编码后的数据
ret = avcodec_receive_packet(pCodecCtx, &enc_pkt);
av_frame_free(&pFrameYUV);
if (ret != 0 || enc_pkt.size <= 0) {
LOGE("avcodec_receive_packet error %s", av_err2str(ret));
return -2;
}
enc_pkt.stream_index = video_st->index;
enc_pkt.pts = count * (video_st->time_base.den) / ((video_st->time_base.num) * fps);
enc_pkt.dts = enc_pkt.pts;
enc_pkt.duration = (video_st->time_base.den) / ((video_st->time_base.num) * fps);
enc_pkt.pos = -1;
ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
if (ret != 0) {
LOGE("av_interleaved_write_frame failed");
}
count++;
av_packet_unref(&enc_pkt);
av_frame_free(&pFrameYUV);
av_free(buffers);
(*jniEnv)->ReleaseByteArrayElements(jniEnv, yArray, yin, 0);
(*jniEnv)->ReleaseByteArrayElements(jniEnv, uArray, uin, 0);
(*jniEnv)->ReleaseByteArrayElements(jniEnv, vArray, vin, 0);
return 0;
}
/**
* 释放资源
*/
JNIEXPORT jint JNICALL Java_com_android_qs_video_1push_FFmpegHandler_close
(JNIEnv *jniEnv, jobject instance) {
if (video_st)
avcodec_close(pCodecCtx);
if (ofmt_ctx) {
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
ofmt_ctx = NULL;
}
return 0;
}
CMakeLists.txt配置:
#聲明要求的cmake最低版本
cmake_minimum_required(VERSION 3.4.1)
#add_library :建立共享库(把工程内的cpp文件都建立成共享库文件,方便经过头文件来调用,這裏所用的native-lib.cpp文件路徑是相對於當前CMakeLists.txt的文件路徑)
#爲什麽設置為jniLibs2呢?因爲安卓com.android.tools.build:gradle:4.2.2(項目根目錄的build.gradle中的buildscript段的dependencies段中的classpath字段的配置)版本時候好像自動構建了,
#不能再占用人家這個jniLib目錄了。。。
set(JNI_LIBS_DIR ${CMAKE_SOURCE_DIR}/../jniLibs2)
add_library( # Sets the name of the library.
ffmpeg-handler
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
ffmpeg-handler.c)
add_library(avutil
SHARED
IMPORTED )
set_target_properties(avutil
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavutil-56.so )
add_library(avformat
SHARED
IMPORTED )
set_target_properties(avformat
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavformat-58.so )
#find_library的用法
#首先是語法:
#find_library (
# <VAR>
# name | NAMES name1 [name2 ...] [NAMES_PER_DIR]
# [HINTS path1 [path2 ... ENV var]]
# [PATHS path1 [path2 ... ENV var]]
# [PATH_SUFFIXES suffix1 [suffix2 ...]]
# [DOC "cache documentation string"]
# [NO_DEFAULT_PATH]
# [NO_CMAKE_ENVIRONMENT_PATH]
# [NO_CMAKE_PATH]
# [NO_SYSTEM_ENVIRONMENT_PATH]
# [NO_CMAKE_SYSTEM_PATH]
# [CMAKE_FIND_ROOT_PATH_BOTH |
# ONLY_CMAKE_FIND_ROOT_PATH |
# NO_CMAKE_FIND_ROOT_PATH]
# )
#其實看起來這麽多,就是3個參數而已,第一個參數是變量,用來存儲通過find_library查找到庫的結果的(目前我理解為so庫)
#這第二個參數就是一要查找的庫名或多個可能的庫名
#這第三個參數就是從name這行下面的這些個帶有中括號的可選的參數列表中隨便選擇一個(可能也能使用多個吧)參數
#實例:FIND_LIBRARY(RUNTIME_LIB mylib /usr/lib /usr/local/lib NO_DEFAULT_PATH)
#對於這個實例,我的理解是從/usr/lib和/usr/local/lib中查找一個名為mylib的庫,查找到之後的結果存儲在runtime_lib這個變量上
#而如果沒有查到這個mylib庫的路徑,則查找結果將使用NO_DEFAULT_PATH賦值給runtime_lib
#那麽緊接著下面這個實際應用就是查找log這個庫的位置,並將查找到的結果存儲與log-lib變量中
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log)
add_library(avcodec
SHARED
IMPORTED )
set_target_properties(avcodec
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libavcodec-58.so )
add_library(swresample
SHARED
IMPORTED )
set_target_properties(swresample
PROPERTIES IMPORTED_LOCATION
${JNI_LIBS_DIR}/${ANDROID_ABI}/libswresample-3.so )
#包含頭文件
include_directories(${JNI_LIBS_DIR}/include)
#將上面生成的(建立的)native-lib庫與這個新建的庫所要依賴的其它庫鏈接起來,這裏上面還查找到了log庫,而這裏新建的native-lib庫正好依賴log庫,log庫查找結果又正好在log-lib這個變量中
#這裏鏈接的順序貌似也很關鍵,比如我這裏是要在ffmpeg-handler中使用avformat,但是我之前這裏ffmpeg-handler之前是先有一個native-lib的。。。然後就不行。。。
target_link_libraries( # Specifies the target library.
ffmpeg-handler
avutil
avformat
avcodec
swresample
# Links the target library to the log library
# included in the NDK.
${log-lib}) pixel4a手機實機演示與vlc player互動: 不過就是現在視頻延遲挺大的,不曉得是nginx的rtmp模塊問題還是,安卓推流端的問題,有待進一步優化
完整demo(android Studio)下載: https://www.gaojiupan.cn/demo_enclosure/video_push.rar
关键字词:camera2,安卓,rtmp,推流,ffmpeg