NDK开发汇总
文章目录
一 集成faac1 下载编译faac生成静态库和.h头文件2 项目添加libfaac.a和.h头文件,CmakeList:
二 获取音频AudioChannelLivePusher
三 音频解码与推送native-libAudioChannel
四 Demo
一 集成faac
1 下载编译faac生成静态库和.h头文件
2 项目添加libfaac.a和.h头文件,CmakeList:
cmake_minimum_required
(VERSION 3.4.1
)
add_subdirectory
(src/main/cpp/librtmp
)
add_library
(
native-lib
SHARED
src/main/cpp/native-lib.cpp
src/main/cpp/VideoChannel.cpp
src/main/cpp/AudioChannel.cpp
)
include_directories
(src/main/cpp/include
)
set
(CMAKE_CXX_FLAGS
"${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/src/main/cpp/libs/${ANDROID_ABI}")
target_link_libraries
(
native-lib
rtmp
x264
faac
log
)
二 获取音频
AudioChannel
public class AudioChannel {
private int inputSamples
;
private ExecutorService executor
;
private AudioRecord audioRecord
;
private LivePusher mLivePusher
;
private int channels
= 1;
private boolean isLiving
;
public AudioChannel(LivePusher livePusher
) {
mLivePusher
= livePusher
;
executor
= Executors
.newSingleThreadExecutor();
int channelConfig
;
if (channels
== 2) {
channelConfig
= AudioFormat
.CHANNEL_IN_STEREO
;
} else {
channelConfig
= AudioFormat
.CHANNEL_IN_MONO
;
}
mLivePusher
.native_setAudioEncInfo(44100, channels
);
inputSamples
= mLivePusher
.getInputSamples() * 2;
int minBufferSize
= AudioRecord
.getMinBufferSize(44100, channelConfig
, AudioFormat
.ENCODING_PCM_16BIT
) * 2;
audioRecord
= new AudioRecord(MediaRecorder
.AudioSource
.MIC
, 44100, channelConfig
, AudioFormat
.ENCODING_PCM_16BIT
, minBufferSize
> inputSamples
? minBufferSize
: inputSamples
);
}
public void startLive() {
isLiving
= true;
executor
.submit(new AudioTeask());
}
public void stopLive() {
isLiving
= false;
}
public void release() {
audioRecord
.release();
}
class AudioTeask implements Runnable {
@Override
public void run() {
audioRecord
.startRecording();
byte[] bytes
= new byte[inputSamples
];
while (isLiving
) {
int len
= audioRecord
.read(bytes
, 0, bytes
.length
);
if (len
> 0) {
mLivePusher
.native_pushAudio(bytes
);
}
}
audioRecord
.stop();
}
}
}
LivePusher
public class LivePusher {
private AudioChannel audioChannel
;
private VideoChannel videoChannel
;
public LivePusher(Activity activity
, int width
, int height
, int bitrate
,
int fps
, int cameraId
) {
native_init();
videoChannel
= new VideoChannel(this,activity
, width
, height
, bitrate
, fps
, cameraId
);
audioChannel
= new AudioChannel(this);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder
) {
videoChannel
.setPreviewDisplay(surfaceHolder
);
}
public void switchCamera() {
videoChannel
.switchCamera();
}
public void startLive(String path
) {
native_start(path
);
videoChannel
.startLive();
audioChannel
.startLive();
}
public void stopLive(){
videoChannel
.stopLive();
audioChannel
.stopLive();
native_stop();
}
public void release(){
videoChannel
.release();
audioChannel
.release();
native_release();
}
public native void native_init();
public native void native_start(String path
);
public native void native_setVideoEncInfo(int width
, int height
, int fps
, int bitrate
);
public native void native_setAudioEncInfo(int sampleRateInHz
, int channelConfig
);
public native void native_pushVideo(byte[] data
);
public native void native_pushAudio(byte[] data
);
public native int getInputSamples();
public native void native_stop();
public native void native_release();
}
三 音频解码与推送
native-lib
#include <jni.h>
#include <string>
#include "safe_queue.h"
#include "librtmp/rtmp.h"
#include "VideoChannel.h"
#include "AudioChannel.h"
#include "macro.h"
SafeQueue
<RTMPPacket
*> packets
;
VideoChannel
*videoChannel
= 0;
int isStart
= 0;
pthread_t pid
;
int readyPushing
= 0;
uint32_t start_time
;
AudioChannel
*audioChannel
= 0;
void releasePackets(RTMPPacket
*&packet
) {
if (packet
) {
RTMPPacket_Free(packet
);
delete packet
;
packet
= 0;
}
}
void callback(RTMPPacket
*packet
) {
if (packet
) {
packet
->m_nTimeStamp
= RTMP_GetTime() - start_time
;
packets
.push(packet
);
}
}
extern "C" JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1init(JNIEnv
*env
, jobject instance
) {
videoChannel
= new VideoChannel
;
videoChannel
->setVideoCallback(callback
);
audioChannel
= new AudioChannel
;
audioChannel
->setAudioCallback(callback
);
packets
.setReleaseCallback(releasePackets
);
}
extern "C" JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1setVideoEncInfo(JNIEnv
*env
, jobject instance
, jint width
, jint height
, jint fps
,
jint bitrate
) {
if (videoChannel
) {
videoChannel
->setVideoEncInfo(width
, height
, fps
, bitrate
);
}
}
void *start(void *args
) {
char *url
= static_cast<char *>(args
);
RTMP
*rtmp
= 0;
do {
rtmp
= RTMP_Alloc();
if (!rtmp
) {
LOGE("alloc rtmp失败");
break;
}
RTMP_Init(rtmp
);
int ret
= RTMP_SetupURL(rtmp
, url
);
if (!ret
) {
LOGE("设置地址失败:%s", url
);
break;
}
rtmp
->Link
.timeout
= 5;
RTMP_EnableWrite(rtmp
);
ret
= RTMP_Connect(rtmp
, 0);
if (!ret
) {
LOGE("连接服务器:%s", url
);
break;
}
ret
= RTMP_ConnectStream(rtmp
, 0);
if (!ret
) {
LOGE("连接流:%s", url
);
break;
}
start_time
= RTMP_GetTime();
readyPushing
= 1;
packets
.setWork(1);
callback(audioChannel
->getAudioTag());
RTMPPacket
*packet
= 0;
while (readyPushing
) {
packets
.pop(packet
);
if (!readyPushing
) {
break;
}
if (!packet
) {
continue;
}
packet
->m_nInfoField2
= rtmp
->m_stream_id
;
ret
= RTMP_SendPacket(rtmp
, packet
, 1);
releasePackets(packet
);
if (!ret
) {
LOGE("发送失败");
break;
}
}
releasePackets(packet
);
} while (0);
isStart
= 0;
readyPushing
= 0;
packets
.setWork(0);
packets
.clear();
if (rtmp
) {
RTMP_Close(rtmp
);
RTMP_Free(rtmp
);
}
delete (url
);
return 0;
}
extern "C"
JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1start(JNIEnv
*env
, jobject instance
, jstring path_
) {
if (isStart
) {
return;
}
isStart
= 1;
const char *path
= env
->GetStringUTFChars(path_
, 0);
char *url
= new char[strlen(path
) + 1];
strcpy(url
, path
);
pthread_create(&pid
, 0, start
, url
);
env
->ReleaseStringUTFChars(path_
, path
);
}
extern "C"
JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1pushVideo(JNIEnv
*env
, jobject instance
,
jbyteArray data_
) {
if (!videoChannel
|| !readyPushing
) {
return;
}
jbyte
*data
= env
->GetByteArrayElements(data_
, NULL);
videoChannel
->encodeData(data
);
env
->ReleaseByteArrayElements(data_
, data
, 0);
}
extern "C"
JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1stop(JNIEnv
*env
, jobject instance
) {
readyPushing
= 0;
packets
.setWork(0);
pthread_join(pid
, 0);
}
extern "C"
JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1release(JNIEnv
*env
, jobject instance
) {
DELETE(videoChannel
);
DELETE(audioChannel
);
}
extern "C"
JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1setAudioEncInfo(JNIEnv
*env
, jobject instance
,
jint sampleRateInHz
,
jint channelConfig
) {
if(audioChannel
){
audioChannel
->setAudioEncInfo(sampleRateInHz
,channelConfig
);
}
}extern "C"
JNIEXPORT jint JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_getInputSamples(JNIEnv
*env
, jobject instance
) {
if(audioChannel
){
audioChannel
->getInputSamples();
}
return -1;
}
extern "C"
JNIEXPORT
void JNICALL
Java_com_cn_ray_rtmpdump_LivePusher_native_1pushAudio(JNIEnv
*env
, jobject instance
,
jbyteArray data_
) {
if (!audioChannel
|| !readyPushing
) {
return;
}
jbyte
*data
= env
->GetByteArrayElements(data_
, NULL);
audioChannel
->encodeData(data
);
env
->ReleaseByteArrayElements(data_
, data
, 0);
}
AudioChannel
#include <cstring>
#include "AudioChannel.h"
#include "macro.h"
AudioChannel
::AudioChannel() {
}
AudioChannel
::~AudioChannel() {
DELETE(buffer
);
if (audioCodec
) {
faacEncClose(audioCodec
);
audioCodec
= 0;
}
}
void AudioChannel
::setAudioCallback(AudioCallback audioCallback
) {
this->audioCallback
= audioCallback
;
}
void AudioChannel
::setAudioEncInfo(int samplesInHZ
, int channels
) {
mChannels
= channels
;
audioCodec
= faacEncOpen(samplesInHZ
, channels
, &inputSamples
, &maxOutputBytes
);
faacEncConfigurationPtr config
= faacEncGetCurrentConfiguration(audioCodec
);
config
->mpegVersion
= MPEG4
;
config
->aacObjectType
= LOW
;
config
->inputFormat
= FAAC_INPUT_16BIT
;
config
->outputFormat
= 0;
faacEncSetConfiguration(audioCodec
, config
);
buffer
= new u_char
[maxOutputBytes
];
}
int AudioChannel
::getInputSamples() {
return inputSamples
;
}
RTMPPacket
*AudioChannel
::getAudioTag() {
u_char
*buf
;
u_long len
;
faacEncGetDecoderSpecificInfo(audioCodec
, &buf
, &len
);
int bodySize
= 2 + len
;
RTMPPacket
*packet
= new RTMPPacket
;
RTMPPacket_Alloc(packet
, bodySize
);
packet
->m_body
[0] = 0xAF;
if (mChannels
== 1) {
packet
->m_body
[0] = 0xAE;
}
packet
->m_body
[1] = 0x00;
memcpy(&packet
->m_body
[2], buf
, len
);
packet
->m_hasAbsTimestamp
= 0;
packet
->m_nBodySize
= bodySize
;
packet
->m_packetType
= RTMP_PACKET_TYPE_AUDIO
;
packet
->m_nChannel
= 0x11;
packet
->m_headerType
= RTMP_PACKET_SIZE_LARGE
;
return packet
;
}
void AudioChannel
::encodeData(int8_t *data
) {
int bytelen
= faacEncEncode(audioCodec
, reinterpret_cast<int32_t *>(data
), inputSamples
, buffer
,
maxOutputBytes
);
if (bytelen
> 0) {
int bodySize
= 2 + bytelen
;
RTMPPacket
*packet
= new RTMPPacket
;
RTMPPacket_Alloc(packet
, bodySize
);
packet
->m_body
[0] = 0xAF;
if (mChannels
== 1) {
packet
->m_body
[0] = 0xAE;
}
packet
->m_body
[1] = 0x01;
memcpy(&packet
->m_body
[2], buffer
, bytelen
);
packet
->m_hasAbsTimestamp
= 0;
packet
->m_nBodySize
= bodySize
;
packet
->m_packetType
= RTMP_PACKET_TYPE_AUDIO
;
packet
->m_nChannel
= 0x11;
packet
->m_headerType
= RTMP_PACKET_SIZE_LARGE
;
audioCallback(packet
);
}
}
四 Demo
RTMPDump