下面列出了怎么用javax.sound.sampled.AudioInputStream的API类实例代码及写法,或者点击链接到github查看源代码。
public void write(AudioInputStream stream, RIFFWriter writer)
throws IOException {
RIFFWriter fmt_chunk = writer.writeChunk("fmt ");
AudioFormat format = stream.getFormat();
fmt_chunk.writeUnsignedShort(3); // WAVE_FORMAT_IEEE_FLOAT
fmt_chunk.writeUnsignedShort(format.getChannels());
fmt_chunk.writeUnsignedInt((int) format.getSampleRate());
fmt_chunk.writeUnsignedInt(((int) format.getFrameRate())
* format.getFrameSize());
fmt_chunk.writeUnsignedShort(format.getFrameSize());
fmt_chunk.writeUnsignedShort(format.getSampleSizeInBits());
fmt_chunk.close();
RIFFWriter data_chunk = writer.writeChunk("data");
byte[] buff = new byte[1024];
int len;
while ((len = stream.read(buff, 0, buff.length)) != -1)
data_chunk.write(buff, 0, len);
data_chunk.close();
}
@Override
public AudioInputStream getAudioInputStream(final InputStream stream)
throws UnsupportedAudioFileException, IOException {
stream.mark(200); // The biggest value which was historically used
try {
final StandardFileFormat format = getAudioFileFormatImpl(stream);
// we've got everything, the stream is supported and it is at the
// beginning of the audio data, so return an AudioInputStream
return new AudioInputStream(stream, format.getFormat(),
format.getLongFrameLength());
} catch (UnsupportedAudioFileException | EOFException ignored) {
// stream is unsupported or the header is less than was expected
stream.reset();
throw new UnsupportedAudioFileException();
}
}
/**
* WAVE�t�@�C�������[�h
* @param url WAVE�t�@�C����URL
*/
public static void load(URL url) throws UnsupportedAudioFileException, IOException, LineUnavailableException {
// �I�[�f�B�I�X�g���[�����J��
AudioInputStream ais = AudioSystem.getAudioInputStream(url);
// WAVE�t�@�C���̃t�H�[�}�b�g���擾
AudioFormat format = ais.getFormat();
// ���C�����擾
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format, AudioSystem.NOT_SPECIFIED);
// WAVE�f�[�^���擾
DataClip clip = new DataClip(ais);
// WAVE�f�[�^��o�^
clips[counter] = clip;
lines[counter] = (SourceDataLine)AudioSystem.getLine(info);
// ���C�����J��
lines[counter].open(format);
counter++;
}
/**
* WAVE�t�@�C�������[�h
* @param url WAVE�t�@�C����URL
*/
public static void load(URL url) throws UnsupportedAudioFileException, IOException, LineUnavailableException {
// �I�[�f�B�I�X�g���[�����J��
AudioInputStream ais = AudioSystem.getAudioInputStream(url);
// WAVE�t�@�C���̃t�H�[�}�b�g���擾
AudioFormat format = ais.getFormat();
// ���C�����擾
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format, AudioSystem.NOT_SPECIFIED);
// WAVE�f�[�^���擾
DataClip clip = new DataClip(ais);
// WAVE�f�[�^��o�^
clips[counter] = clip;
lines[counter] = (SourceDataLine)AudioSystem.getLine(info);
// ���C�����J��
lines[counter].open(format);
counter++;
}
public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) {
AudioFileFormat.Type[] filetypes = new AudioFileFormat.Type[types.length];
System.arraycopy(types, 0, filetypes, 0, types.length);
// make sure we can write this stream
AudioFormat format = stream.getFormat();
AudioFormat.Encoding encoding = format.getEncoding();
if( (AudioFormat.Encoding.ALAW.equals(encoding)) ||
(AudioFormat.Encoding.ULAW.equals(encoding)) ||
(AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) ||
(AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) ) {
return filetypes;
}
return new AudioFileFormat.Type[0];
}
@Override
public Type[] getAudioFileTypes(AudioInputStream stream) {
Type[] filetypes = new Type[types.length];
System.arraycopy(types, 0, filetypes, 0, types.length);
// make sure we can write this stream
AudioFormat format = stream.getFormat();
AudioFormat.Encoding encoding = format.getEncoding();
if (AudioFormat.Encoding.ALAW.equals(encoding)
|| AudioFormat.Encoding.ULAW.equals(encoding)
|| AudioFormat.Encoding.PCM_SIGNED.equals(encoding)
|| AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)
|| AudioFormat.Encoding.PCM_FLOAT.equals(encoding)) {
return filetypes;
}
return new Type[0];
}
@Override
public AudioInputStream getAudioInputStream(Encoding targetEncoding,
AudioInputStream sourceStream) {
if (!isConversionSupported(targetEncoding, sourceStream.getFormat())) {
throw new IllegalArgumentException(
"Unsupported conversion: " + sourceStream.getFormat()
.toString() + " to " + targetEncoding.toString());
}
if (sourceStream.getFormat().getEncoding().equals(targetEncoding))
return sourceStream;
AudioFormat format = sourceStream.getFormat();
int channels = format.getChannels();
Encoding encoding = targetEncoding;
float samplerate = format.getSampleRate();
int bits = format.getSampleSizeInBits();
boolean bigendian = format.isBigEndian();
if (targetEncoding.equals(Encoding.PCM_FLOAT))
bits = 32;
AudioFormat targetFormat = new AudioFormat(encoding, samplerate, bits,
channels, channels * bits / 8, samplerate, bigendian);
return getAudioInputStream(targetFormat, sourceStream);
}
public void run() {
byte[] buffer = SoftAudioPusher.this.buffer;
AudioInputStream ais = SoftAudioPusher.this.ais;
SourceDataLine sourceDataLine = SoftAudioPusher.this.sourceDataLine;
try {
while (active) {
// Read from audio source
int count = ais.read(buffer);
if(count < 0) break;
// Write byte buffer to source output
sourceDataLine.write(buffer, 0, count);
}
} catch (IOException e) {
active = false;
//e.printStackTrace();
}
}
public Soundbank getSoundbank(File file)
throws InvalidMidiDataException, IOException {
try {
AudioInputStream ais = AudioSystem.getAudioInputStream(file);
ais.close();
ModelByteBufferWavetable osc = new ModelByteBufferWavetable(
new ModelByteBuffer(file, 0, file.length()), -4800);
ModelPerformer performer = new ModelPerformer();
performer.getOscillators().add(osc);
SimpleSoundbank sbk = new SimpleSoundbank();
SimpleInstrument ins = new SimpleInstrument();
ins.add(performer);
sbk.addInstrument(ins);
return sbk;
} catch (UnsupportedAudioFileException e1) {
return null;
} catch (IOException e) {
return null;
}
}
/**
* Obtains an audio stream from the File provided. The File must
* point to valid audio file data.
* @param file the File for which the <code>AudioInputStream</code> should be
* constructed
* @return an <code>AudioInputStream</code> object based on the audio file data pointed
* to by the File
* @throws UnsupportedAudioFileException if the File does not point to valid audio
* file data recognized by the system
* @throws IOException if an I/O exception occurs
*/
public AudioInputStream getAudioInputStream(File file)
throws UnsupportedAudioFileException, IOException {
FileInputStream fis = new FileInputStream(file); // throws IOException
AudioFileFormat fileFormat = null;
// part of fix for 4325421
try {
fileFormat = getCOMM(fis, false);
} finally {
if (fileFormat == null) {
fis.close();
}
}
return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength());
}
@Override
public void run() {
log("ConversionThread[" + num + "] started.");
try {
InputStream inStream = new ByteArrayInputStream(pcmBuffer);
AudioInputStream pcmStream = new AudioInputStream(
inStream, pcmFormat, AudioSystem.NOT_SPECIFIED);
AudioInputStream alawStream = AudioSystem.getAudioInputStream(alawFormat, pcmStream);
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
int read = 0;
byte[] data = new byte[4096];
while((read = alawStream.read(data)) != -1) {
outStream.write(data, 0, read);
}
alawStream.close();
resultArray = outStream.toByteArray();
} catch (Exception ex) {
log("ConversionThread[" + num + "] exception:");
log(ex);
}
log("ConversionThread[" + num + "] completed.");
}
public static AudioFloatInputStream getInputStream(AudioFormat format,
byte[] buffer, int offset, int len) {
AudioFloatConverter converter = AudioFloatConverter
.getConverter(format);
if (converter != null)
return new BytaArrayAudioFloatInputStream(converter, buffer,
offset, len);
InputStream stream = new ByteArrayInputStream(buffer, offset, len);
long aLen = format.getFrameSize() == AudioSystem.NOT_SPECIFIED
? AudioSystem.NOT_SPECIFIED : len / format.getFrameSize();
AudioInputStream astream = new AudioInputStream(stream, format, aLen);
return getInputStream(astream);
}
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) {
if (!Type.WAVE.equals(type))
throw new IllegalArgumentException("File type " + type
+ " not supported.");
if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT))
throw new IllegalArgumentException("File format "
+ stream.getFormat() + " not supported.");
}
/**
* Obtains an audio stream from the File provided. The File must
* point to valid audio file data.
* @param file the File for which the <code>AudioInputStream</code> should be
* constructed
* @return an <code>AudioInputStream</code> object based on the audio file data pointed
* to by the File
* @throws UnsupportedAudioFileException if the File does not point to valid audio
* file data recognized by the system
* @throws IOException if an I/O exception occurs
*/
public AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException {
FileInputStream fis = new FileInputStream(file); // throws IOException
AudioFileFormat fileFormat = null;
// part of fix for 4325421
try {
fileFormat = getFMT(fis, false);
} finally {
if (fileFormat == null) {
fis.close();
}
}
return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength());
}
/**
* @param audio
*/
public void setAudio(AudioInputStream audio) {
if (status == Status.PLAYING) {
throw new IllegalStateException("Cannot set audio while playing");
}
this.ais = audio;
}
public static void main(String argv[]) throws Exception {
AudioFormat format = new AudioFormat(44100, 16, 2, true, true);
InputStream is = new ByteArrayInputStream(new byte[1000]);
AudioInputStream ais = new AudioInputStream(is, format, AudioSystem.NOT_SPECIFIED);
AudioSystem.write(ais, AudioFileFormat.Type.AU, new ByteArrayOutputStream());
System.out.println("Test passed.");
}
public WeakAudioStream(AudioInputStream stream) {
this.stream = stream;
weak_stream_link = new WeakReference<AudioInputStream>(stream);
converter = AudioFloatConverter.getConverter(stream.getFormat());
samplesize = stream.getFormat().getFrameSize() / stream.getFormat().getChannels();
framesize = stream.getFormat().getFrameSize();
}
public static void testPlay(String filename)
{
try
{
File file = new File(filename);
// Get AudioInputStream from given file.
AudioInputStream in= AudioSystem.getAudioInputStream(file);
AudioInputStream din = null;
if (in != null)
{
AudioFormat baseFormat = in.getFormat();
AudioFormat decodedFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
baseFormat.getSampleRate(),
16,
baseFormat.getChannels(),
baseFormat.getChannels() * 2,
baseFormat.getSampleRate(),
false);
// Get AudioInputStream that will be decoded by underlying VorbisSPI
din = AudioSystem.getAudioInputStream(decodedFormat, in);
// Play now !
rawplay(decodedFormat, din);
in.close();
}
}
catch (Exception e)
{
e.printStackTrace();
}
}
public int write(AudioInputStream stream, Type fileType, File out)
throws IOException {
checkFormat(fileType, stream);
if (stream.getFormat().isBigEndian())
stream = toLittleEndian(stream);
RIFFWriter writer = new RIFFWriter(out, "WAVE");
write(stream, writer);
int fpointer = (int) writer.getFilePointer();
writer.close();
return fpointer;
}
/**
* Test method for
* {@link JVoiceXmlDocumentServer#getAudioInputStream(String, URI)}.
*
* @since 0.7.2
* @exception Exception
* Test failed.
* @exception JVoiceXMLEvent
* Test failed.
*/
@Test
public void testGetAudioInputStream() throws Exception, JVoiceXMLEvent {
final URL file = this.getClass().getResource("/test.wav");
final Session session = Mockito.mock(Session.class);
Mockito.when(session.getSessionId()).thenReturn(
UUID.randomUUID().toString());
final String sessionId = session.getSessionId();
final AudioInputStream in = server.getAudioInputStream(sessionId,
file.toURI());
Assert.assertNotNull(in);
}
public static AudioFloatInputStream getInputStream(AudioFormat format,
byte[] buffer, int offset, int len) {
AudioFloatConverter converter = AudioFloatConverter
.getConverter(format);
if (converter != null)
return new BytaArrayAudioFloatInputStream(converter, buffer,
offset, len);
InputStream stream = new ByteArrayInputStream(buffer, offset, len);
long aLen = format.getFrameSize() == AudioSystem.NOT_SPECIFIED
? AudioSystem.NOT_SPECIFIED : len / format.getFrameSize();
AudioInputStream astream = new AudioInputStream(stream, format, aLen);
return getInputStream(astream);
}
public Soundbank getSoundbank(AudioInputStream ais)
throws InvalidMidiDataException, IOException {
try {
byte[] buffer;
if (ais.getFrameLength() == -1) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buff = new byte[1024
- (1024 % ais.getFormat().getFrameSize())];
int ret;
while ((ret = ais.read(buff)) != -1) {
baos.write(buff, 0, ret);
}
ais.close();
buffer = baos.toByteArray();
} else {
buffer = new byte[(int) (ais.getFrameLength()
* ais.getFormat().getFrameSize())];
new DataInputStream(ais).readFully(buffer);
}
ModelByteBufferWavetable osc = new ModelByteBufferWavetable(
new ModelByteBuffer(buffer), ais.getFormat(), -4800);
ModelPerformer performer = new ModelPerformer();
performer.getOscillators().add(osc);
SimpleSoundbank sbk = new SimpleSoundbank();
SimpleInstrument ins = new SimpleInstrument();
ins.add(performer);
sbk.addInstrument(ins);
return sbk;
} catch (Exception e) {
return null;
}
}
private boolean loadAudioData(AudioInputStream as) throws IOException, UnsupportedAudioFileException {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip->openAsClip()");
// first possibly convert this stream to PCM
as = Toolkit.getPCMConvertedAudioInputStream(as);
if (as == null) {
return false;
}
loadedAudioFormat = as.getFormat();
long frameLen = as.getFrameLength();
int frameSize = loadedAudioFormat.getFrameSize();
long byteLen = AudioSystem.NOT_SPECIFIED;
if (frameLen != AudioSystem.NOT_SPECIFIED
&& frameLen > 0
&& frameSize != AudioSystem.NOT_SPECIFIED
&& frameSize > 0) {
byteLen = frameLen * frameSize;
}
if (byteLen != AudioSystem.NOT_SPECIFIED) {
// if the stream length is known, it can be efficiently loaded into memory
readStream(as, byteLen);
} else {
// otherwise we use a ByteArrayOutputStream to load it into memory
readStream(as);
}
// if everything went fine, we have now the audio data in
// loadedAudio, and the byte length in loadedAudioByteLength
return true;
}
public DataClip(AudioInputStream audioStream) throws IOException {
index = 0;
format = audioStream.getFormat();
// WAVE�t�@�C���̑傫�������߂�
int length = (int)(audioStream.getFrameLength() * format.getFrameSize());
// ���̑傫����byte�z���p��
data = new byte[length];
// data��WAVE�f�[�^���i�[����
DataInputStream is = new DataInputStream(audioStream);
is.readFully(data);
}
private void save(String fileName) throws IOException {
byte[] audioData = recordBytes.toByteArray();
final File wavFile = new File(getAudioFolder(), fileName + ".wav");
ByteArrayInputStream bais = new ByteArrayInputStream(audioData);
try (AudioInputStream audioInputStream = new AudioInputStream(bais, format, audioData.length / format.getFrameSize())) {
AudioSystem.write(audioInputStream, AudioFileFormat.Type.WAVE, wavFile);
}
recordBytes.close();
if (debug) {
LOG.log(Level.INFO, String.format("File %s.wav added to %s", fileName, getAudioFolder()));
}
addChunk.apply(fileName + ".wav");
}
/**
* Obtains an audio stream from the URL provided. The URL must
* point to valid audio file data.
* @param url the URL for which the <code>AudioInputStream</code> should be
* constructed
* @return an <code>AudioInputStream</code> object based on the audio file data pointed
* to by the URL
* @throws UnsupportedAudioFileException if the URL does not point to valid audio
* file data recognized by the system
* @throws IOException if an I/O exception occurs
*/
public AudioInputStream getAudioInputStream(URL url) throws UnsupportedAudioFileException, IOException {
InputStream urlStream = url.openStream(); // throws IOException
AudioFileFormat fileFormat = null;
try {
fileFormat = getCOMM(urlStream, false);
} finally {
if (fileFormat == null) {
urlStream.close();
}
}
return new AudioInputStream(urlStream, fileFormat.getFormat(), fileFormat.getFrameLength());
}
public static AudioFloatInputStream getInputStream(AudioFormat format,
byte[] buffer, int offset, int len) {
AudioFloatConverter converter = AudioFloatConverter
.getConverter(format);
if (converter != null)
return new BytaArrayAudioFloatInputStream(converter, buffer,
offset, len);
InputStream stream = new ByteArrayInputStream(buffer, offset, len);
long aLen = format.getFrameSize() == AudioSystem.NOT_SPECIFIED
? AudioSystem.NOT_SPECIFIED : len / format.getFrameSize();
AudioInputStream astream = new AudioInputStream(stream, format, aLen);
return getInputStream(astream);
}
@Override
public AudioInputStream getAudioInputStream(AudioFormat targetFormat, AudioInputStream sourceStream){
if (!isConversionSupported(targetFormat, sourceStream.getFormat()))
throw new IllegalArgumentException("Unsupported conversion: "
+ sourceStream.getFormat().toString() + " to "
+ targetFormat.toString());
return getConvertedStream( targetFormat, sourceStream );
}
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) {
if (!Type.WAVE.equals(type))
throw new IllegalArgumentException("File type " + type
+ " not supported.");
if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT))
throw new IllegalArgumentException("File format "
+ stream.getFormat() + " not supported.");
}
public int write(AudioInputStream stream, AudioFileFormat.Type fileType, File out) throws IOException {
// throws IllegalArgumentException if not supported
WaveFileFormat waveFileFormat = (WaveFileFormat)getAudioFileFormat(fileType, stream);
// first write the file without worrying about length fields
FileOutputStream fos = new FileOutputStream( out ); // throws IOException
BufferedOutputStream bos = new BufferedOutputStream( fos, bisBufferSize );
int bytesWritten = writeWaveFile(stream, waveFileFormat, bos );
bos.close();
// now, if length fields were not specified, calculate them,
// open as a random access file, write the appropriate fields,
// close again....
if( waveFileFormat.getByteLength()== AudioSystem.NOT_SPECIFIED ) {
int dataLength=bytesWritten-waveFileFormat.getHeaderSize();
int riffLength=dataLength + waveFileFormat.getHeaderSize() - 8;
RandomAccessFile raf=new RandomAccessFile(out, "rw");
// skip RIFF magic
raf.skipBytes(4);
raf.writeInt(big2little( riffLength ));
// skip WAVE magic, fmt_ magic, fmt_ length, fmt_ chunk, data magic
raf.skipBytes(4+4+4+WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType())+4);
raf.writeInt(big2little( dataLength ));
// that's all
raf.close();
}
return bytesWritten;
}