android.media.AudioRecord#ERROR源码实例Demo

下面列出了android.media.AudioRecord#ERROR 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: echo   文件: SaidItService.java
@Override
public int consume(final byte[] array, final int offset, final int count) throws IOException {

    final int bytes = Math.min(readLimit, count);
    //Log.d(TAG, "READING " + bytes + " B");
    final int read = audioRecord.read(array, offset, bytes);
    if (read == AudioRecord.ERROR_BAD_VALUE) {
        Log.e(TAG, "AUDIO RECORD ERROR - BAD VALUE");
        return 0;
    }
    if (read == AudioRecord.ERROR_INVALID_OPERATION) {
        Log.e(TAG, "AUDIO RECORD ERROR - INVALID OPERATION");
        return 0;
    }
    if (read == AudioRecord.ERROR) {
        Log.e(TAG, "AUDIO RECORD ERROR - UNKNOWN ERROR");
        return 0;
    }
    if (wavFileWriter != null && read > 0) {
        wavFileWriter.write(array, offset, read);
    }
    audioHandler.post(audioReader);
    return read;
}
 
源代码2 项目: Alexa-Voice-Service   文件: RecorderConstants.java
private int getBufferSize(){
    //the function below gives min buffer size which is necesaary for audio recording
    int minBufferSizeInBytes = AudioRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    if (minBufferSizeInBytes == AudioRecord.ERROR_BAD_VALUE) {
        throw new IllegalArgumentException("SpeechRecord.getMinBufferSize: parameters not supported by hardware");
    } else if (minBufferSizeInBytes == AudioRecord.ERROR) {
        // Log.e("SpeechRecord.getMinBufferSize: unable to query hardware for output properties");
        minBufferSizeInBytes = mSampleRate * (120 / 1000) * RESOLUTION_IN_BYTES * CHANNELS;
    }
    //buffer_size_multiplier=4 ka reason nhi pata hai
    int bufferSize = BUFFER_SIZE_MUTLIPLIER * minBufferSizeInBytes;
    //Log.i("SpeechRecord buffer size: " + bufferSize + ", min size = " + minBufferSizeInBytes);
    return bufferSize;
}
 
源代码3 项目: guitar-tuner   文件: AudioProcessor.java
public void init() {
    int bufSize = 16384;
    int avalaibleSampleRates = SAMPLE_RATES.length;
    int i = 0;
    do {
        int sampleRate = SAMPLE_RATES[i];
        int minBufSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
        if (minBufSize != AudioRecord.ERROR_BAD_VALUE && minBufSize != AudioRecord.ERROR) {
            mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, Math.max(bufSize, minBufSize * 4));
        }
        i++;
    }
    while (i < avalaibleSampleRates && (mAudioRecord == null || mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED));
}
 
public static int getMaxSupportedSampleRate() {
/*
 * Valid Audio Sample rates
 *
 * @see <a
 * href="http://en.wikipedia.org/wiki/Sampling_%28signal_processing%29"
 * >Wikipedia</a>
 */
    final int validSampleRates[] = new int[]{
            47250, 44100, 44056, 37800, 32000, 22050, 16000, 11025, 4800, 8000,};
/*
 * Selecting default audio input source for recording since
 * AudioFormat.CHANNEL_CONFIGURATION_DEFAULT is deprecated and selecting
 * default encoding format.
 */
    for (int i = 0; i < validSampleRates.length; i++) {
        int result = AudioRecord.getMinBufferSize(validSampleRates[i],
                android.media.AudioFormat.CHANNEL_IN_MONO,
                android.media.AudioFormat.ENCODING_PCM_16BIT);
        if (result != AudioRecord.ERROR
                && result != AudioRecord.ERROR_BAD_VALUE && result > 0) {
            // return the mininum supported audio sample rate
            return validSampleRates[i];
        }
    }
    // If none of the sample rates are supported return -1 handle it in
    // calling method
    return -1;
}
 
public static ArrayList<Integer> getAllSupportedSampleRates() {
/*
 *get all supported sample rates
 *
 * @see <a
 * href="http://en.wikipedia.org/wiki/Sampling_%28signal_processing%29"
 * >Wikipedia</a>
 */
    final int validSampleRates[] = new int[]{ 5644800, 2822400, 352800, 192000, 176400, 96000,
            88200, 50400, 50000, 4800, 47250, 44100, 44056, 37800, 32000, 22050, 16000, 11025, 8000, };
/*
 * Selecting default audio input source for recording since
 * AudioFormat.CHANNEL_CONFIGURATION_DEFAULT is deprecated and selecting
 * default encoding format.
 */

    ArrayList<Integer> supportedSampleRates = new ArrayList<Integer>();
    for (int i = 0; i < validSampleRates.length; i++) {
        int result = AudioRecord.getMinBufferSize(validSampleRates[i],
                android.media.AudioFormat.CHANNEL_IN_MONO,
                android.media.AudioFormat.ENCODING_PCM_16BIT);
        if (result != AudioRecord.ERROR
                && result != AudioRecord.ERROR_BAD_VALUE && result > 0) {
            // return the mininum supported audio sample rate
            supportedSampleRates.add(validSampleRates[i]);
        }
    }
    // If none of the sample rates are supported return -1 handle it in
    // calling method
    return supportedSampleRates;
}
 
源代码6 项目: connectivity-samples   文件: AudioRecorder.java
@Override
protected boolean validSize(int size) {
  return size != AudioRecord.ERROR && size != AudioRecord.ERROR_BAD_VALUE;
}
 
@Override
protected boolean validSize(int size) {
	return size != AudioRecord.ERROR && size != AudioRecord.ERROR_BAD_VALUE;
}
 
源代码8 项目: Saiy-PS   文件: SaiyRecorder.java
/**
 * Calculate the buffer size.
 *
 * @return the calculated buffer size
 */
private int calculateBufferSize() {

    framePeriod = sampleRateInHz * TIMER_INTERVAL / 1000;
    int bufferSize = framePeriod * 2 * bSamples * nChannels / 8;

    if (DEBUG) {
        MyLog.i(CLS_NAME, "bufferSize: " + bufferSize);
    }

    final int minBuff = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);

    switch (minBuff) {

        case AudioRecord.ERROR:
        case AudioRecord.ERROR_BAD_VALUE:
            if (DEBUG) {
                MyLog.w(CLS_NAME, "AudioRecord.ERROR/ERROR_BAD_VALUE");
            }
            break;
        default:

            if (DEBUG) {
                MyLog.i(CLS_NAME, "minBuff: " + minBuff);
            }

            if (bufferSize < minBuff) {
                bufferSize = minBuff;

                // Unused for now
                framePeriod = bufferSize / (2 * bSamples * nChannels / 8);
            }

            break;
    }


    if (DEBUG) {
        MyLog.i(CLS_NAME, "bufferSize returning: " + bufferSize);
    }

    return bufferSize;
}
 
源代码9 项目: DeviceConnect-Android   文件: MicOpusRecorder.java
/**
 * 音声をレコードして、MediaCodec に渡します.
 */
private void recordAudio() throws NativeInterfaceException {
    int samplingRate = mSamplingRate.getValue();
    int channels = mChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO;
    int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
    int bufferSize = AudioRecord.getMinBufferSize(samplingRate, channels, audioFormat) * 4;
    int oneFrameDataCount = mSamplingRate.getValue() / mFrameSize.getFps();

    mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
            samplingRate,
            channels,
            audioFormat,
            bufferSize);

    if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
        if (mAudioRecordCallback != null) {
            mAudioRecordCallback.onEncoderError();
        }
        return;
    }

    if (mUseAEC && AcousticEchoCanceler.isAvailable()) {
        // ノイズキャンセラー
        mEchoCanceler = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId());
        if (mEchoCanceler != null) {
            int ret = mEchoCanceler.setEnabled(true);
            if (ret != AudioEffect.SUCCESS) {
                if (DEBUG) {
                    Log.w(TAG, "AcousticEchoCanceler is not supported.");
                }
            }
        }
    }

    OpusEncoder opusEncoder = null;

    try {
        opusEncoder = new OpusEncoder(mSamplingRate, mChannels, mFrameSize, mBitRate, mApplication);

        mAudioRecord.startRecording();

        short[] emptyBuffer = new short[oneFrameDataCount];
        short[] pcmBuffer = new short[oneFrameDataCount];
        byte[] opusFrameBuffer = opusEncoder.bufferAllocate();
        while (!mStopFlag) {
            int readSize = mAudioRecord.read(pcmBuffer, 0, oneFrameDataCount);
            if (readSize > 0) {
                int opusFrameBufferLength;
                if (isMute()) {
                    opusFrameBufferLength = opusEncoder.encode(emptyBuffer, readSize, opusFrameBuffer);
                } else {
                    opusFrameBufferLength = opusEncoder.encode(pcmBuffer, readSize, opusFrameBuffer);
                }

                if (opusFrameBufferLength > 0 && mAudioRecordCallback != null) {
                    mAudioRecordCallback.onPeriodicNotification(opusFrameBuffer, opusFrameBufferLength);
                }
            } else if (readSize == AudioRecord.ERROR_INVALID_OPERATION) {
                if (DEBUG) {
                    Log.e(TAG, "Invalid operation error.");
                }
                break;
            } else if (readSize == AudioRecord.ERROR_BAD_VALUE) {
                if (DEBUG) {
                    Log.e(TAG, "Bad value error.");
                }
                break;
            } else if (readSize == AudioRecord.ERROR) {
                if (DEBUG) {
                    Log.e(TAG, "Unknown error.");
                }
                break;
            }
        }
    } finally {
        if (mEchoCanceler != null) {
            mEchoCanceler.release();
            mEchoCanceler = null;
        }

        if (opusEncoder != null) {
            opusEncoder.release();
        }
    }
}