Android Audio代码分析7 - stream type
在看AudioTrack代码的时候,我们看到,要创建一个AudioTrack对象,需要指定一个StreamType。
今天我们只把stream type相关的代码抽取出来,详细看看stream type相关的东东。
java层的代码就不看了。
从函数android_media_AudioTrack_native_setup开始说起。
static int
android_media_AudioTrack_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
jint streamType, jint sampleRateInHertz, jint channels,
jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession)
{
...
// 获取Frame Count和Sampling Rate的依据都是stream type。
// 其实现方法是通过stream type取得output,然后取得output的描述
// 如果取得成功,则取描述中的frame count,否则通过audio flinger取得output对应的frame count。
if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) {
LOGE("Error creating AudioTrack: Could not get AudioSystem frame count.");
return AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM;
}
if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
LOGE("Error creating AudioTrack: Could not get AudioSystem sampling rate.");
return AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM;
}
...
// 下面的工作是将java 侧的stream type转换为native 侧的stream type。
// 以后使用的都是转换后的strea type。
// check the stream type
AudioSystem::stream_type atStreamType;
if (streamType == javaAudioTrackFields.STREAM_VOICE_CALL) {
atStreamType = AudioSystem::VOICE_CALL;
} else if (streamType == javaAudioTrackFields.STREAM_SYSTEM) {
atStreamType = AudioSystem::SYSTEM;
} else if (streamType == javaAudioTrackFields.STREAM_RING) {
atStreamType = AudioSystem::RING;
} else if (streamType == javaAudioTrackFields.STREAM_MUSIC) {
atStreamType = AudioSystem::MUSIC;
} else if (streamType == javaAudioTrackFields.STREAM_ALARM) {
atStreamType = AudioSystem::ALARM;
} else if (streamType == javaAudioTrackFields.STREAM_NOTIFICATION) {
atStreamType = AudioSystem::NOTIFICATION;
} else if (streamType == javaAudioTrackFields.STREAM_BLUETOOTH_SCO) {
atStreamType = AudioSystem::BLUETOOTH_SCO;
} else if (streamType == javaAudioTrackFields.STREAM_DTMF) {
atStreamType = AudioSystem::DTMF;
} else {
LOGE("Error creating AudioTrack: unknown stream type.");
return AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE;
}
...
// 将stream type保存在AudioTrackJniStorage对象中
lpJniStorage->mStreamType = atStreamType;
...
// 调用AudioTrack对象的set函数
// initialize the native AudioTrack object
if (memoryMode == javaAudioTrackFields.MODE_STREAM) {
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
status_t AudioTrack::set(
int streamType,
uint32_t sampleRate,
int format,
int channels,
int frameCount,
uint32_t flags,
callback_t cbf,
void* user,
int notificationFrames,
const sp<IMemory>& sharedBuffer,
bool threadCanCallJava,
int sessionId)
{
...
// 前面已经说过
int afSampleRate;
if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
return NO_INIT;
}
uint32_t afLatency;
if (AudioSystem::getOutputLatency(&afLatency, streamType) != NO_ERROR) {
return NO_INIT;
}
// stream type如果是DEFAULT,将其设置为MUSIC
// handle default values first.
if (streamType == AudioSystem::DEFAULT) {
streamType = AudioSystem::MUSIC;
}
...
// 获取output
audio_io_handle_t output = AudioSystem::getOutput((AudioSystem::stream_type)streamType,
sampleRate, format, channels, (AudioSystem::output_flags)flags);
if (output == 0) {
LOGE("Could not get audio output for stream type %d", streamType);
return BAD_VALUE;
}
mVolume[LEFT] = 1.0f;
mVolume[RIGHT] = 1.0f;
mSendLevel = 0;
mFrameCount = frameCount;
mNotificationFramesReq = notificationFrames;
mSessionId = sessionId;
mAuxEffectId = 0;
// 创建IAudioTrack对象
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
status_t AudioTrack::createTrack(
int streamType,
uint32_t sampleRate,
int format,
int channelCount,
int frameCount,
uint32_t flags,
const sp<IMemory>& sharedBuffer,
audio_io_handle_t output,
&n
补充:移动开发 , Android ,