下面列出了javax.sound.sampled.AudioSystem#isLineSupported ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private boolean createSourceDataLine() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createSourceDataLine()");
try {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Line not supported: "+loadedAudioFormat);
// fail silently
return false;
}
SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (datapusher==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Created SourceDataLine.");
return true;
}
private boolean createSourceDataLine() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createSourceDataLine()");
try {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Line not supported: "+loadedAudioFormat);
// fail silently
return false;
}
SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (datapusher==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Created SourceDataLine.");
return true;
}
protected void openLineIn() {
final AudioFormat format = new AudioFormat(samplingRate, N_SYNTHESISER_BITS, 1, true, true);
final DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
LOGGER.atError().addArgument(info).addArgument(format).log("Line not supported '{}' format was '{}'");
throw new IllegalArgumentException("Line not supported");
}
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
if (LOGGER.isInfoEnabled()) {
LOGGER.atInfo().log("opened audio line-in, format = " + format);
}
} catch (final LineUnavailableException e) {
LOGGER.atError().setCause(e).addArgument(DATA_SOURCE_FILE).log("'{}' does not seem to be recognised as a Midi file");
}
}
@Override
public void start() {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
// Handle the error.
logger.severe("JavaSoundOutputStream - not supported." + format);
} else {
try {
line = (SourceDataLine) getDataLine(info);
int bufferSize = calculateBufferSize(suggestedOutputLatency);
line.open(format, bufferSize);
logger.fine("Output buffer size = " + bufferSize + " bytes.");
line.start();
} catch (Exception e) {
e.printStackTrace();
line = null;
}
}
}
private boolean createSourceDataLine() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createSourceDataLine()");
try {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Line not supported: "+loadedAudioFormat);
// fail silently
return false;
}
SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (datapusher==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Created SourceDataLine.");
return true;
}
private boolean createSourceDataLine() {
try {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (Printer.err) Printer.err("Line not supported: "+loadedAudioFormat);
// fail silently
return false;
}
SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength);
} catch (Exception e) {
if (Printer.err) e.printStackTrace();
// fail silently
return false;
}
if (datapusher==null) {
// fail silently
return false;
}
return true;
}
private void openOutput() throws IOException {
AudioFormat audioFormat = new AudioFormat((float) vorbisInfo.rate, 16, vorbisInfo.channels, true, false);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED);
if (!AudioSystem.isLineSupported(info))
{
throw new IOException("line format " + info + "not supported");
}
try
{
out = (SourceDataLine) AudioSystem.getLine(info);
out.open(audioFormat);
}
catch (LineUnavailableException e) {
throw new IOException("audio unavailable: " + e.toString());
}
out.start();
updateVolume(volume);
}
private boolean createSourceDataLine() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createSourceDataLine()");
try {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Line not supported: "+loadedAudioFormat);
// fail silently
return false;
}
SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info);
datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (datapusher==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Created SourceDataLine.");
return true;
}
private boolean createClip() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createClip()");
try {
DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Clip not supported: "+loadedAudioFormat);
// fail silently
return false;
}
Object line = AudioSystem.getLine(info);
if (!(line instanceof AutoClosingClip)) {
if (DEBUG || Printer.err)Printer.err("Clip is not auto closing!"+clip);
// fail -> will try with SourceDataLine
return false;
}
clip = (AutoClosingClip) line;
clip.setAutoClosing(true);
if (DEBUG || Printer.debug) clip.addLineListener(this);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (clip==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Loaded clip.");
return true;
}
/**
* Plays audio from the given audio input stream.
*
* @param stream
* the AudioInputStream to play.
* @param startTime
* the time to skip to when playing starts. A value of zero means
* this plays from the beginning, 1 means it skips one second,
* etc.
* @param listener
* an optional Listener to update.
* @param cancellable
* an optional Cancellable to consult.
* @param blocking
* whether this call is blocking or not.
* @throws LineUnavailableException
* if a line is unavailable.
* @throws UnsupportedOperationException
* if this static method doesn't support playing the stream
* argument
**/
public static SourceDataLine playAudioStream(AudioInputStream stream,
StartTime startTime, Listener listener, Cancellable cancellable,
boolean blocking) throws UnsupportedOperationException,
LineUnavailableException {
AudioFormat audioFormat = stream.getFormat();
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat);
if (!AudioSystem.isLineSupported(info)) {
throw new UnsupportedOperationException(
"AudioPlayback.playAudioStream: info=" + info);
}
final SourceDataLine dataLine = (SourceDataLine) AudioSystem
.getLine(info);
dataLine.open(audioFormat);
dataLine.start();
PlayAudioThread thread = new PlayAudioThread(stream, startTime,
dataLine, listener, cancellable);
if (blocking) {
thread.run();
} else {
thread.start();
}
return dataLine;
}
private boolean createClip() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createClip()");
try {
DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Clip not supported: "+loadedAudioFormat);
// fail silently
return false;
}
Object line = AudioSystem.getLine(info);
if (!(line instanceof AutoClosingClip)) {
if (DEBUG || Printer.err)Printer.err("Clip is not auto closing!"+clip);
// fail -> will try with SourceDataLine
return false;
}
clip = (AutoClosingClip) line;
clip.setAutoClosing(true);
if (DEBUG || Printer.debug) clip.addLineListener(this);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (clip==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Loaded clip.");
return true;
}
private boolean createClip() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createClip()");
try {
DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Clip not supported: "+loadedAudioFormat);
// fail silently
return false;
}
Object line = AudioSystem.getLine(info);
if (!(line instanceof AutoClosingClip)) {
if (DEBUG || Printer.err)Printer.err("Clip is not auto closing!"+clip);
// fail -> will try with SourceDataLine
return false;
}
clip = (AutoClosingClip) line;
clip.setAutoClosing(true);
if (DEBUG || Printer.debug) clip.addLineListener(this);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (clip==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Loaded clip.");
return true;
}
private boolean createClip() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createClip()");
try {
DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Clip not supported: "+loadedAudioFormat);
// fail silently
return false;
}
Object line = AudioSystem.getLine(info);
if (!(line instanceof AutoClosingClip)) {
if (DEBUG || Printer.err)Printer.err("Clip is not auto closing!"+clip);
// fail -> will try with SourceDataLine
return false;
}
clip = (AutoClosingClip) line;
clip.setAutoClosing(true);
if (DEBUG || Printer.debug) clip.addLineListener(this);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (clip==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Loaded clip.");
return true;
}
private byte[] record() throws LineUnavailableException {
AudioFormat format = AudioUtil.getAudioFormat(audioConf);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
// Checks if system supports the data line
if (!AudioSystem.isLineSupported(info)) {
LOGGER.error("Line not supported");
System.exit(0);
}
microphone = (TargetDataLine) AudioSystem.getLine(info);
microphone.open(format);
microphone.start();
LOGGER.info("Listening, tap enter to stop ...");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int numBytesRead;
byte[] data = new byte[microphone.getBufferSize() / 5];
// Begin audio capture.
microphone.start();
// Here, stopped is a global boolean set by another thread.
while (!stopped) {
// Read the next chunk of data from the TargetDataLine.
numBytesRead = microphone.read(data, 0, data.length);
// Save this chunk of data.
byteArrayOutputStream.write(data, 0, numBytesRead);
}
return byteArrayOutputStream.toByteArray();
}
private boolean createClip() {
try {
DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (Printer.err) Printer.err("Clip not supported: "+loadedAudioFormat);
// fail silently
return false;
}
Object line = AudioSystem.getLine(info);
if (!(line instanceof AutoClosingClip)) {
if (Printer.err) Printer.err("Clip is not auto closing!"+clip);
// fail -> will try with SourceDataLine
return false;
}
clip = (AutoClosingClip) line;
clip.setAutoClosing(true);
} catch (Exception e) {
if (Printer.err) e.printStackTrace();
// fail silently
return false;
}
if (clip==null) {
// fail silently
return false;
}
return true;
}
private boolean createClip() {
if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createClip()");
try {
DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat);
if (!(AudioSystem.isLineSupported(info)) ) {
if (DEBUG || Printer.err)Printer.err("Clip not supported: "+loadedAudioFormat);
// fail silently
return false;
}
Object line = AudioSystem.getLine(info);
if (!(line instanceof AutoClosingClip)) {
if (DEBUG || Printer.err)Printer.err("Clip is not auto closing!"+clip);
// fail -> will try with SourceDataLine
return false;
}
clip = (AutoClosingClip) line;
clip.setAutoClosing(true);
if (DEBUG || Printer.debug) clip.addLineListener(this);
} catch (Exception e) {
if (DEBUG || Printer.err)e.printStackTrace();
// fail silently
return false;
}
if (clip==null) {
// fail silently
return false;
}
if (DEBUG || Printer.debug)Printer.debug("Loaded clip.");
return true;
}
/**
* Loads and returns a Clip from an audio input stream.
* @param ref the resource name
* @param audioIn the audio input stream
* @param isMP3 true if MP3, false if WAV
* @return the loaded and opened clip
*/
private static MultiClip loadClip(String ref, AudioInputStream audioIn, boolean isMP3)
throws IOException, LineUnavailableException {
AudioFormat format = audioIn.getFormat();
if (isMP3) {
AudioFormat decodedFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED, format.getSampleRate(), 16,
format.getChannels(), format.getChannels() * 2, format.getSampleRate(), false);
AudioInputStream decodedAudioIn = AudioSystem.getAudioInputStream(decodedFormat, audioIn);
format = decodedFormat;
audioIn = decodedAudioIn;
}
DataLine.Info info = new DataLine.Info(Clip.class, format);
if (AudioSystem.isLineSupported(info))
return new MultiClip(ref, audioIn);
// try to find closest matching line
Clip clip = AudioSystem.getClip();
AudioFormat[] formats = ((DataLine.Info) clip.getLineInfo()).getFormats();
int bestIndex = -1;
float bestScore = 0;
float sampleRate = format.getSampleRate();
if (sampleRate < 0)
sampleRate = clip.getFormat().getSampleRate();
float oldSampleRate = sampleRate;
while (true) {
for (int i = 0; i < formats.length; i++) {
AudioFormat curFormat = formats[i];
AudioFormat newFormat = new AudioFormat(
sampleRate, curFormat.getSampleSizeInBits(),
curFormat.getChannels(), true, curFormat.isBigEndian());
formats[i] = newFormat;
DataLine.Info newLine = new DataLine.Info(Clip.class, newFormat);
if (AudioSystem.isLineSupported(newLine) &&
AudioSystem.isConversionSupported(newFormat, format)) {
float score = 1
+ (newFormat.getSampleRate() == sampleRate ? 5 : 0)
+ (newFormat.getSampleSizeInBits() == format.getSampleSizeInBits() ? 5 : 0)
+ (newFormat.getChannels() == format.getChannels() ? 5 : 0)
+ (newFormat.isBigEndian() == format.isBigEndian() ? 1 : 0)
+ newFormat.getSampleRate() / 11025
+ newFormat.getChannels()
+ newFormat.getSampleSizeInBits() / 8;
if (score > bestScore) {
bestIndex = i;
bestScore = score;
}
}
}
if (bestIndex < 0) {
if (oldSampleRate < 44100) {
if (sampleRate > 44100)
break;
sampleRate *= 2;
} else {
if (sampleRate < 44100)
break;
sampleRate /= 2;
}
} else
break;
}
if (bestIndex >= 0)
return new MultiClip(ref, AudioSystem.getAudioInputStream(formats[bestIndex], audioIn));
// still couldn't find anything, try the default clip format
return new MultiClip(ref, AudioSystem.getAudioInputStream(clip.getFormat(), audioIn));
}
/** Performs microphone streaming speech recognition with a duration of 1 minute. */
public static void streamingMicRecognize() throws Exception {
ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
try (SpeechClient client = SpeechClient.create()) {
responseObserver =
new ResponseObserver<StreamingRecognizeResponse>() {
ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();
public void onStart(StreamController controller) {}
public void onResponse(StreamingRecognizeResponse response) {
responses.add(response);
}
public void onComplete() {
for (StreamingRecognizeResponse response : responses) {
StreamingRecognitionResult result = response.getResultsList().get(0);
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
System.out.printf("Transcript : %s\n", alternative.getTranscript());
}
}
public void onError(Throwable t) {
System.out.println(t);
}
};
ClientStream<StreamingRecognizeRequest> clientStream =
client.streamingRecognizeCallable().splitCall(responseObserver);
RecognitionConfig recognitionConfig =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("en-US")
.setSampleRateHertz(16000)
.build();
StreamingRecognitionConfig streamingRecognitionConfig =
StreamingRecognitionConfig.newBuilder().setConfig(recognitionConfig).build();
StreamingRecognizeRequest request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build(); // The first request in a streaming call has to be a config
clientStream.send(request);
// SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
// bigEndian: false
AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
DataLine.Info targetInfo =
new Info(
TargetDataLine.class,
audioFormat); // Set the system information to read from the microphone audio stream
if (!AudioSystem.isLineSupported(targetInfo)) {
System.out.println("Microphone not supported");
System.exit(0);
}
// Target data line captures the audio stream the microphone produces.
TargetDataLine targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
targetDataLine.open(audioFormat);
targetDataLine.start();
System.out.println("Start speaking");
long startTime = System.currentTimeMillis();
// Audio Input Stream
AudioInputStream audio = new AudioInputStream(targetDataLine);
while (true) {
long estimatedTime = System.currentTimeMillis() - startTime;
byte[] data = new byte[6400];
audio.read(data);
if (estimatedTime > 60000) { // 60 seconds
System.out.println("Stop speaking.");
targetDataLine.stop();
targetDataLine.close();
break;
}
request =
StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(data))
.build();
clientStream.send(request);
}
} catch (Exception e) {
System.out.println(e);
}
responseObserver.onComplete();
}
/**
* Test harness
* @param args not used
*/
public static void main(String[] args)
{
ToneGenerator toneGenerator = new ToneGenerator();
AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
8000.0f, 16, 1, 2, 8000.0f, false);
DataLine.Info datalineinfo = new DataLine.Info(SourceDataLine.class, audioFormat);
if(AudioSystem.isLineSupported(datalineinfo))
{
try
{
SourceDataLine sourceDataLine = AudioSystem.getSourceDataLine(audioFormat);
sourceDataLine.open(audioFormat);
for(Tone tone: Tone.DTMF_TONES)
// for(Tone tone: Tone.KNOX_TONES)
// for(Tone tone: Tone.CALL_PROGRESS_TONES)
// for(Tone tone: Tone.DISCRETE_TONES)
// for(Tone tone: Tone.values())
{
for(int x = 0; x < 128; x++) //Amplitude levels 0 - 127
{
System.out.print("\rTONE [" + tone.name() + "]: " + tone + " " + tone.getFrequency1() +
(tone.hasFrequency2() ? " PLUS " + tone.getFrequency2() : "") + " AMPLITUDE:" + x);
ToneParameters toneParameters = new ToneParameters(tone, x);
float[] samples = toneGenerator.generate(toneParameters);
ByteBuffer converted = ByteBuffer.allocate(samples.length * 2);
converted.order(ByteOrder.LITTLE_ENDIAN);
for(float sample : samples)
{
converted.putShort((short)(sample * Short.MAX_VALUE));
}
byte[] bytes = converted.array();
sourceDataLine.write(bytes, 0, bytes.length);
if(x == 0)
{
sourceDataLine.start();
}
}
System.out.println("\rTONE [" + tone.name() + "]: " + tone + " " + tone.getFrequency1() +
(tone.hasFrequency2() ? " PLUS " + tone.getFrequency2() : ""));
}
}
catch(Exception e)
{
e.printStackTrace();
}
}
else
{
System.out.println("Audio Format Not Supported by Host Audio System: " + audioFormat);
}
}
/**
* Obtain sound playback line if available
*
* @throws javax.sound.sampled.LineUnavailableException If there is no line
* available
*/
private void initAudio() throws LineUnavailableException {
af = getAudioFormat();
DataLine.Info dli = new DataLine.Info(SourceDataLine.class, af);
lineAvailable = AudioSystem.isLineSupported(dli);
}