android.media.AudioTrack#STATE_INITIALIZED源码实例Demo

下面列出了android.media.AudioTrack#STATE_INITIALIZED 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: android_9.0.0_r45   文件: BlockingAudioTrack.java
private AudioTrack createStreamingAudioTrack() {
    final int channelConfig = getChannelConfig(mChannelCount);

    int minBufferSizeInBytes
            = AudioTrack.getMinBufferSize(mSampleRateInHz, channelConfig, mAudioFormat);
    int bufferSizeInBytes = Math.max(MIN_AUDIO_BUFFER_SIZE, minBufferSizeInBytes);

    AudioFormat audioFormat = (new AudioFormat.Builder())
            .setChannelMask(channelConfig)
            .setEncoding(mAudioFormat)
            .setSampleRate(mSampleRateInHz).build();
    AudioTrack audioTrack = new AudioTrack(mAudioParams.mAudioAttributes,
            audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM,
            mAudioParams.mSessionId);

    if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        Log.w(TAG, "Unable to create audio track.");
        audioTrack.release();
        return null;
    }

    mAudioBufferSize = bufferSizeInBytes;

    setupVolume(audioTrack, mAudioParams.mVolume, mAudioParams.mPan);
    return audioTrack;
}
 
源代码2 项目: android-fskmodem   文件: MainActivity.java
@Override
protected void onDestroy() {
	
	mDecoder.stop();
	mEncoder.stop();
	
	if (mRecorder != null && mRecorder.getState() == AudioRecord.STATE_INITIALIZED)
	{
		mRecorder.stop();
		mRecorder.release();
	}
	
	if (mAudioTrack != null && mAudioTrack.getPlayState() == AudioTrack.STATE_INITIALIZED)
	{
		mAudioTrack.stop();
		mAudioTrack.release();
	}
	
	super.onDestroy();
}
 
源代码3 项目: Saiy-PS   文件: SaiyTextToSpeech.java
/**
 * Helper method to double check the returned {@link SaiyAudioTrack} object hasn't been released
 * elsewhere.
 *
 * @return the {@link SaiyAudioTrack} object, or null it the creation process failed.
 */
private SaiyAudioTrack getAudioTrack() {
    if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        audioTrack = SaiyAudioTrack.getSaiyAudioTrack();
        audioTrack.setListener(listener);
        return audioTrack;
    } else {
        return audioTrack;
    }
}
 
源代码4 项目: Saiy-PS   文件: SaiyTextToSpeech.java
@Override
public boolean isSpeaking() {

    if (audioTrack != null && audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
        if (DEBUG) {
            MyLog.i(CLS_NAME, "isSpeaking: audioTrack STATE_INITIALIZED");
        }

        if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING
                || audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PAUSED) {
            if (DEBUG) {
                MyLog.i(CLS_NAME, "isSpeaking: audioTrack PLAYSTATE_PLAYING/PLAYSTATE_PAUSED");
            }
            return true;
        } else {
            if (DEBUG) {
                MyLog.i(CLS_NAME, "isSpeaking: audioTrack not playing");
            }
        }
    }

    final boolean speakingSuper = super.isSpeaking();

    if (DEBUG) {
        MyLog.i(CLS_NAME, "isSpeaking: speakingSuper " + speakingSuper);
    }

    return speakingSuper;
}
 
源代码5 项目: HPlayer   文件: MediaPlayer.java
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
源代码7 项目: NetEasyNews   文件: MediaPlayer.java
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
源代码8 项目: Vitamio   文件: MediaPlayer.java
private void audioTrackRelease() {
  if (mAudioTrack != null) {
    if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
      mAudioTrack.stop();
    mAudioTrack.release();
  }
  mAudioTrack = null;
}
 
源代码9 项目: MediaPlayer-Extended   文件: AudioPlayback.java
/**
 * Initializes or reinitializes the audio track with the supplied format for playback
 * while keeping the playstate. Keeps the current configuration and skips reinitialization
 * if the new format is the same as the current format.
 */
public void init(MediaFormat format) {
    Log.d(TAG, "init");

    boolean playing = false;

    if(isInitialized()) {
        if(!checkIfReinitializationRequired(format)) {
            // Set new format that equals the old one (in case we compare references somewhere)
            mAudioFormat = format;
            return;
        }

        playing = isPlaying();
        pause();
        stopAndRelease(false);
    } else {
        // deferred creation of the audio thread until its first use
        mAudioThread = new AudioThread();
        mAudioThread.setPaused(true);
        mAudioThread.start();
    }

    mAudioFormat = format;

    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
    int bytesPerSample = 2;
    mFrameSize = bytesPerSample * channelCount;
    mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);

    int channelConfig = AudioFormat.CHANNEL_OUT_DEFAULT;
    switch(channelCount) {
        case 1:
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;
            break;
        case 2:
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
            break;
        case 4:
            channelConfig = AudioFormat.CHANNEL_OUT_QUAD;
            break;
        case 6:
            channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
            break;
        case 8:
            channelConfig = AudioFormat.CHANNEL_OUT_7POINT1;
    }

    mPlaybackBufferSize = mFrameChunkSize * channelCount;

    mAudioTrack = new AudioTrack(
            mAudioStreamType,
            mSampleRate,
            channelConfig,
            AudioFormat.ENCODING_PCM_16BIT,
            mPlaybackBufferSize, // at least twice the size to enable double buffering (according to docs)
            AudioTrack.MODE_STREAM, mAudioSessionId);

    if(mAudioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        stopAndRelease();
        throw new IllegalStateException("audio track init failed");
    }

    mAudioSessionId = mAudioTrack.getAudioSessionId();
    mAudioStreamType = mAudioTrack.getStreamType();
    setStereoVolume(mVolumeLeft, mVolumeRight);
    mPresentationTimeOffsetUs = PTS_NOT_SET;

    if(playing) {
        play();
    }
}
 
源代码10 项目: webrtc_android   文件: WebRtcAudioTrack.java
@CalledByNative
private boolean initPlayout(int sampleRate, int channels) {
  threadChecker.checkIsOnValidThread();
  Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
  final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
  byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
  Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
  emptyBytes = new byte[byteBuffer.capacity()];
  // Rather than passing the ByteBuffer with every callback (requiring
  // the potentially expensive GetDirectBufferAddress) we simply have the
  // the native class cache the address to the memory once.
  nativeCacheDirectBufferAddress(nativeAudioTrack, byteBuffer);

  // Get the minimum buffer size required for the successful creation of an
  // AudioTrack object to be created in the MODE_STREAM mode.
  // Note that this size doesn't guarantee a smooth playback under load.
  // TODO(henrika): should we extend the buffer size to avoid glitches?
  final int channelConfig = channelCountToConfiguration(channels);
  final int minBufferSizeInBytes =
      AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
  Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
  // For the streaming mode, data must be written to the audio sink in
  // chunks of size (given by byteBuffer.capacity()) less than or equal
  // to the total buffer size |minBufferSizeInBytes|. But, we have seen
  // reports of "getMinBufferSize(): error querying hardware". Hence, it
  // can happen that |minBufferSizeInBytes| contains an invalid value.
  if (minBufferSizeInBytes < byteBuffer.capacity()) {
    reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
    return false;
  }

  // Ensure that prevision audio session was stopped correctly before trying
  // to create a new AudioTrack.
  if (audioTrack != null) {
    reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
    return false;
  }
  try {
    // Create an AudioTrack object and initialize its associated audio buffer.
    // The size of this buffer determines how long an AudioTrack can play
    // before running out of data.
    if (Build.VERSION.SDK_INT >= 21) {
      // If we are on API level 21 or higher, it is possible to use a special AudioTrack
      // constructor that uses AudioAttributes and AudioFormat as input. It allows us to
      // supersede the notion of stream types for defining the behavior of audio playback,
      // and to allow certain platforms or routing policies to use this information for more
      // refined volume or routing decisions.
      audioTrack =
          createAudioTrackOnLollipopOrHigher(sampleRate, channelConfig, minBufferSizeInBytes);
    } else {
      // Use default constructor for API levels below 21.
      audioTrack =
          createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
    }
  } catch (IllegalArgumentException e) {
    reportWebRtcAudioTrackInitError(e.getMessage());
    releaseAudioResources();
    return false;
  }

  // It can happen that an AudioTrack is created but it was not successfully
  // initialized upon creation. Seems to be the case e.g. when the maximum
  // number of globally available audio tracks is exceeded.
  if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
    reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
    releaseAudioResources();
    return false;
  }
  logMainParameters();
  logMainParametersExtended();
  return true;
}
 
源代码11 项目: BambooPlayer   文件: MediaPlayer.java
private void audioTrackPause() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
    mAudioTrack.pause();
}
 
源代码12 项目: Vitamio   文件: MediaPlayer.java
private void audioTrackPause() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
    mAudioTrack.pause();
}
 
源代码13 项目: MediaPlayer-Extended   文件: AudioPlayback.java
public boolean isInitialized() {
    return mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED;
}
 
源代码14 项目: HPlayer   文件: MediaPlayer.java
private void audioTrackStart() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING)
    mAudioTrack.play();
}
 
源代码15 项目: HPlayer   文件: MediaPlayer.java
private void audioTrackPause() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
    mAudioTrack.pause();
}
 
源代码16 项目: video-player   文件: MediaPlayer.java
private void audioTrackStart() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING)
    mAudioTrack.play();
}
 
源代码17 项目: video-player   文件: MediaPlayer.java
private void audioTrackPause() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
    mAudioTrack.pause();
}
 
源代码18 项目: NetEasyNews   文件: MediaPlayer.java
private void audioTrackStart() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING)
    mAudioTrack.play();
}
 
源代码19 项目: BambooPlayer   文件: MediaPlayer.java
private void audioTrackStart() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING)
    mAudioTrack.play();
}
 
源代码20 项目: react-native-android-vitamio   文件: MediaPlayer.java
private void audioTrackPause() {
  if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
    mAudioTrack.pause();
}