您当前的位置: 首页 > 学无止境 > 心得笔记 网站首页心得笔记
6. 使用NDK通过GLSurfaceView双缓冲空间复制完成视频的播放~1
发布时间:2021-06-03 21:44:34编辑:雪饮阅读()
本篇文章呢我個人認爲能值1000人民幣。怎麽說呢,花了好久時間整理的。
這裏從c語言中處理之後的視頻數據回送到安卓中進行播放的時候還涉及到了安卓原生庫。 同樣的app/build.gradle中要處理庫so文件重複問題:
#include <string>
#include <android/log.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN,"testff",__VA_ARGS__)
extern "C"{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavcodec/jni.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
}
#include<iostream>
using namespace std;
static double r2d(AVRational r)
{
return r.num==0||r.den == 0 ? 0 :(double)r.num/(double)r.den;
}
//当前时间戳 clock
long long GetNowMs()
{
struct timeval tv;
gettimeofday(&tv,NULL);
int sec = tv.tv_sec%360000;
long long t = sec*1000+tv.tv_usec/1000;
return t;
}
extern "C"
JNIEXPORT
jint JNI_OnLoad(JavaVM *vm,void *res)
{
av_jni_set_java_vm(vm,0);
return JNI_VERSION_1_4;
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_example_ndk_1and_141_MainActivity_stringFromJNI(JNIEnv *env, jobject thiz) {
std::string hello = "Hello from C++ ";
hello+=avcodec_configuration();
return env->NewStringUTF(hello.c_str());
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_ndk_1and_141_XPlay_Open(JNIEnv *env, jobject instance, jstring url_, jobject surface) {
const char *path = env->GetStringUTFChars(url_, 0);
//初始化解封装
av_register_all();
//初始化网络
avformat_network_init();
avcodec_register_all();
//打开文件
AVFormatContext *ic = NULL;
//char path[] = "/sdcard/video.flv";
int re = avformat_open_input(&ic,path,0,0);
if(re != 0)
{
LOGW("avformat_open_input failed!:%s",av_err2str(re));
return;
}
LOGW("avformat_open_input %s success!",path);
//获取流信息
re = avformat_find_stream_info(ic,0);
if(re != 0)
{
LOGW("avformat_find_stream_info failed!");
}
LOGW("duration = %lld nb_streams = %d",ic->duration,ic->nb_streams);
int fps = 0;
int videoStream = 0;
int audioStream = 1;
for(int i = 0; i < ic->nb_streams; i++)
{
AVStream *as = ic->streams[i];
if(as->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
LOGW("视频数据");
videoStream = i;
fps = r2d(as->avg_frame_rate);
LOGW("fps = %d,width=%d height=%d codeid=%d pixformat=%d",fps,
as->codecpar->width,
as->codecpar->height,
as->codecpar->codec_id,
as->codecpar->format
);
}
else if(as->codecpar->codec_type ==AVMEDIA_TYPE_AUDIO )
{
LOGW("音频数据");
audioStream = i;
LOGW("sample_rate=%d channels=%d sample_format=%d",
as->codecpar->sample_rate,
as->codecpar->channels,
as->codecpar->format
);
}
}
//ic->streams[videoStream];
//获取音频流信息
audioStream = av_find_best_stream(ic,AVMEDIA_TYPE_AUDIO,-1,-1,NULL,0);
LOGW("av_find_best_stream audioStream = %d",audioStream);
//////////////////////////////////////////////////////////
//打开视频解码器
//软解码器
AVCodec *codec = avcodec_find_decoder(ic->streams[videoStream]->codecpar->codec_id);
/*
硬解码 據説這個硬解沒有上面的按解碼器id查找的方式獲取解碼器可靠 測試雷電模擬器機型:vivo V1916A,目前沒有問題,對於mp4文件
* */
codec = avcodec_find_decoder_by_name("h264_mediacodec");
if(!codec)
{
LOGW("avcodec_find failed!");
return;
}
//解码器初始化
AVCodecContext *vc = avcodec_alloc_context3(codec);
avcodec_parameters_to_context(vc,ic->streams[videoStream]->codecpar);
vc->thread_count = 8;
//在現在來説是要指定下這個值AV_SAMPLE_FMT_FLTP,參考3.4.1的ffmpeg的,而我這裏用的是3.4.據説之前2.5.2版本相比更早之前也有一次變更。可能以後還會有變更。可能是因为版本大的跳跃,计算机的处理能力增强,然后从整型到浮点型,录制音频,品质会更好。
//建議參考ffmpeg源碼中ChangeLog
vc->sample_fmt=AV_SAMPLE_FMT_FLTP;
//打开解码器
re = avcodec_open2(vc,0,0);
//vc->time_base = ic->streams[videoStream]->time_base;
LOGW("vc timebase = %d/ %d",vc->time_base.num,vc->time_base.den);
if(re != 0)
{
LOGW("avcodec_open2 video failed!:code:%d",re);
return;
}
//////////////////////////////////////////////////////////
//打开音频解码器
//软解码器
AVCodec *acodec = avcodec_find_decoder(ic->streams[audioStream]->codecpar->codec_id);
//硬解码
//codec = avcodec_find_decoder_by_name("h264_mediacodec");
if(!acodec)
{
LOGW("avcodec_find failed!");
return;
}
//音频解码器初始化
AVCodecContext *ac = avcodec_alloc_context3(acodec);
avcodec_parameters_to_context(ac,ic->streams[audioStream]->codecpar);
ac->thread_count = 8;
//打开解码器
re = avcodec_open2(ac,0,0);
if(re != 0)
{
LOGW("avcodec_open2 audio failed!");
return;
}
//读取帧数据
AVPacket *pkt = av_packet_alloc();
AVFrame *frame = av_frame_alloc();
long long start = GetNowMs();
int frameCount = 0;
//初始化像素格式转换的上下文
SwsContext *vctx = NULL;
int outWidth = 1280;
int outHeight = 720;
char *rgb = new char[1920*1080*4];
//48000
char *pcm = new char[48000*4*2];
//音频重采样上下文初始化
SwrContext *actx = swr_alloc();
actx = swr_alloc_set_opts(actx,
av_get_default_channel_layout(2),
AV_SAMPLE_FMT_S16,ac->sample_rate,
av_get_default_channel_layout(ac->channels),
ac->sample_fmt,ac->sample_rate,
0,0 );
re = swr_init(actx);
if(re != 0)
{
LOGW("swr_init failed!");
}
else
{
LOGW("swr_init success!");
}
//显示窗口初始化
ANativeWindow *nwin = ANativeWindow_fromSurface(env,surface);
if (!nwin) {
LOGW("ANativeWindow_fromSurface create failed");
return;
}
/*
* ANativeWindow_setBuffersGeometry用於设置 ANativeWindow 绘制窗口属性(緩衝區的屬性)
* 參數分別是寬、高、像素格式
* 這裏面格式我們就用正常格式WINDOW_FORMAT_RGBA_8888
* */
ANativeWindow_setBuffersGeometry(nwin,outWidth,outHeight,WINDOW_FORMAT_RGBA_8888);
//這個buffer是用來存儲從surface中讀取來的内容
ANativeWindow_Buffer wbuf;
for(;;)
{
//超过三秒
if(GetNowMs() - start >= 3000)
{
LOGW("now decode fps is %d",frameCount/3);
start = GetNowMs();
frameCount = 0;
}
int re = av_read_frame(ic,pkt);
if(re != 0)
{
LOGW("读取到结尾处!");
int pos = 20 * r2d(ic->streams[videoStream]->time_base);
//av_seek_frame(ic,videoStream,pos,AVSEEK_FLAG_BACKWARD|AVSEEK_FLAG_FRAME );
continue;
}
AVCodecContext *cc = vc;
if(pkt->stream_index == audioStream)
cc=ac;
//发送到线程中解码
re = avcodec_send_packet(cc,pkt);
//清理
int p = pkt->pts;
av_packet_unref(pkt);
if(re != 0)
{
LOGW("avcodec_send_packet failed!");
continue;
}
for(;;)
{
re = avcodec_receive_frame(cc,frame);
if(re !=0)
{
//LOGW("avcodec_receive_frame failed!");
break;
}
//LOGW("avcodec_receive_frame %lld",frame->pts);
//如果是视频帧
if(cc == vc)
{
frameCount++;
vctx = sws_getCachedContext(vctx,
frame->width,
frame->height,
(AVPixelFormat)frame->format,
outWidth,
outHeight,
AV_PIX_FMT_RGBA,
SWS_FAST_BILINEAR,
0,0,0
);
if(!vctx)
{
LOGW("sws_getCachedContext failed!");
}
else
{
uint8_t *data[AV_NUM_DATA_POINTERS] = {0};
data[0] =(uint8_t *)rgb;
int lines[AV_NUM_DATA_POINTERS] = {0};
lines[0] = outWidth * 4;
int h = sws_scale(vctx,
(const uint8_t **)frame->data,
frame->linesize,0,
frame->height,
data,lines);
LOGW("sws_scale = %d",h);
if(h > 0)
{
LOGW("H>0:%d",h);
/*
*視頻解碼(像素格式轉換)成功,開始向顯示窗口填充數據
* ANativeWindow_lock锁定窗口的写surface并获取下一个可写的显示缓冲区
* 參數1:本地窗口
* 參數2:本地窗口緩衝區
* 參數3:矩陣位置,這裏暫時不用他,就傳0
* */
ANativeWindow_lock(nwin,&wbuf,0);
//wbuf.bits一個真實的bits地址,這裏可以理解為汎型地址,存在于ANativeWindow_Buffer結構體位於頭文件native_window.h
uint8_t *dst = (uint8_t*)wbuf.bits;
//内存拷貝函數memcpy,這個就不用說了,這個是c語言中的函數,只是對於最後一個參數要特別説明下:一個像素等於4個字節
memcpy(dst,rgb,outWidth*outHeight*4);
//本次解碼出來的RGB像素拷貝完成,就解鎖顯示窗口(本地窗口),解锁并post出去
ANativeWindow_unlockAndPost(nwin);
}
}
}
else //音频
{
uint8_t *out[2] = {0};
out[0] = (uint8_t*) pcm;
//音频重采样
int len = swr_convert(actx,out,
frame->nb_samples,
(const uint8_t**)frame->data,
frame->nb_samples);
LOGW("swr_convert = %d",len);
}
}
//////////////////////
}
delete rgb;
delete pcm;
//关闭上下文
avformat_close_input(&ic);
env->ReleaseStringUTFChars(url_, path);
}
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.10.2)
# Declares and names the project.
project("ndk_and_41")
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
#添加頭文件路徑(括號中的include是相對於本文件路徑)
include_directories(../../../include)
#設置ffmpeg庫所在路徑的變量,這裏的FF是自定義的一個名字
set(FF ${CMAKE_CURRENT_SOURCE_DIR}/../../../libs/${ANDROID_ABI})
#avcodec這個是自定義的一個名字
add_library(avcodec SHARED IMPORTED)
set_target_properties(avcodec PROPERTIES IMPORTED_LOCATION ${FF}/libavcodec.so)
add_library(avformat SHARED IMPORTED)
set_target_properties(avformat PROPERTIES IMPORTED_LOCATION ${FF}/libavformat.so)
add_library(avutil SHARED IMPORTED)
set_target_properties(avutil PROPERTIES IMPORTED_LOCATION ${FF}/libavutil.so)
add_library(swscale SHARED IMPORTED)
set_target_properties(swscale PROPERTIES IMPORTED_LOCATION ${FF}/libswscale.so)
add_library(swresample SHARED IMPORTED)
set_target_properties(swresample PROPERTIES IMPORTED_LOCATION ${FF}/libswresample.so)
add_library( # Sets the name of the library.
native-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
native-lib.cpp )
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
native-lib
avcodec avformat avutil swscale swresample
#這裏將avcodec庫鏈接進來
# Links the target library to the log library
# included in the NDK.
android
${log-lib} )
plugins {
id 'com.android.application'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.3"
defaultConfig {
applicationId "com.example.ndk_and_41"
minSdkVersion 16
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-std=c++11"
}
ndk{
abiFilters "armeabi-v7a"
abiFilters "arm64-v8a"
}
}
sourceSets{
main{
jniLibs.srcDirs=['libs']
}
}
}
packagingOptions {
pickFirst 'lib/armeabi-v7a/libavcodec.so'
pickFirst 'lib/armeabi-v7a/libavutil.so'
pickFirst 'lib/armeabi-v7a/libavformat.so'
pickFirst 'lib/armeabi-v7a/libswscale.so'
pickFirst 'lib/armeabi-v7a/libswresample.so'
pickFirst 'lib/arm64-v8a/libavcodec.so'
pickFirst 'lib/arm64-v8a/libavutil.so'
pickFirst 'lib/arm64-v8a/libavformat.so'
pickFirst 'lib/arm64-v8a/libswscale.so'
pickFirst 'lib/arm64-v8a/libswresample.so'
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
version "3.10.2"
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.3.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
}然後MainActivity.java中也整理下吧,因爲我們只播放視頻,之前的沒有用的就先注釋了:
package com.example.ndk_and_41;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("native-lib");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Example of a call to a native method
//TextView tv = findViewById(R.id.sample_text);
//tv.setText(stringFromJNI());
}
/**
* A native method that is implemented by the 'native-lib' native library,
* which is packaged with this application.
*/
//public native String stringFromJNI();
}對於要在安卓上面播放視頻需要用到surfaceView實現,而這裏我們用其更高性能的子集GLSurfaceView實現。
在MainActivity.java同目錄建立如XPlay.java:
package com.example.ndk_and_41;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
public class XPlay extends GLSurfaceView implements Runnable,SurfaceHolder.Callback {
public XPlay(Context context, AttributeSet attrs) {
super( context,attrs );
}
@Override
public void run() {
Open("/sdcard/1080.mp4",getHolder().getSurface());
}
@Override
public void surfaceCreated(SurfaceHolder var1){
new Thread( this ).start();
}
@Override
public void surfaceChanged(SurfaceHolder var1, int var2, int var3, int var4){}
@Override
public void surfaceDestroyed(SurfaceHolder var1){}
public native void Open(String url,Object surface);
}不是還得有播放界面嗎?所以layout/activity_main.xml:
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<!--
<TextView
android:id="@+id/sample_text"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
-->
<!--
<com.example.ndk_and_41.XPlay
android:layout_width="match_parent"
android:layout_height="match_parent" />
-->
<com.example.ndk_and_41.XPlay
android:layout_width="match_parent"
android:layout_height="match_parent" />
</androidx.constraintlayout.widget.ConstraintLayout>這個標簽就很另類了,是完整的包名直到這個播放類的類目哈。
那麽最後這裏還要額外提醒下,就是這裏沒有處理多綫程的問題,就是說在多綫程時候avcodec_open() 或 avcodec_open2()时,有av_find_stream_info()也是同样的。需要添加全局綫程安全鎖。所以這裏測試時候要是穩妥起見就用把av->thread_count設置為1,同樣的,更穩妥就是ac->thread_count也設置為1。
不過這裏面貌似夏曹俊老師到這一步也還沒有實現聲音的播放,至少我看視頻中他的雷電模擬器裏面也是沒有聲音只有視頻。
那麽我們這裏這個代碼部署並運行在雷電模擬器上面的效果如:
关键字词:GLSurfaceView
相关文章
-
无相关信息