下面列出了javax.sound.sampled.Clip#close ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private static void test() throws Exception {
// Will run the test no more than 15 seconds
long endtime = System.nanoTime() + TimeUnit.SECONDS.toNanos(15);
while (failed == null && endtime - System.nanoTime() > 0) {
Clip clip = createClip();
clip.loop(Clip.LOOP_CONTINUOUSLY);
clip.stop();
if (clip.isRunning()) {
if (clip.isRunning()) {
throw new RuntimeException("Clip is running");
}
}
if (clip.isActive()) {
if (clip.isActive()) {
throw new RuntimeException("Clip is active");
}
}
clip.close();
}
}
private static void test() throws Exception {
// Will run the test no more than 15 seconds
long endtime = System.nanoTime() + TimeUnit.SECONDS.toNanos(15);
while (failed == null && endtime - System.nanoTime() > 0) {
Clip clip = createClip();
clip.loop(Clip.LOOP_CONTINUOUSLY);
clip.stop();
if (clip.isRunning()) {
if (clip.isRunning()) {
throw new RuntimeException("Clip is running");
}
}
if (clip.isActive()) {
if (clip.isActive()) {
throw new RuntimeException("Clip is active");
}
}
clip.close();
}
}
private static void test() throws Exception {
// Will run the test no more than 15 seconds
long endtime = System.nanoTime() + TimeUnit.SECONDS.toNanos(15);
while (failed == null && endtime - System.nanoTime() > 0) {
Clip clip = createClip();
clip.loop(Clip.LOOP_CONTINUOUSLY);
clip.stop();
if (clip.isRunning()) {
if (clip.isRunning()) {
throw new RuntimeException("Clip is running");
}
}
if (clip.isActive()) {
if (clip.isActive()) {
throw new RuntimeException("Clip is active");
}
}
clip.close();
}
}
/**
* Destroys the MultiClip and releases all resources.
*/
public void destroy() {
if (clips.size() > 0) {
for (Clip c : clips) {
c.stop();
c.flush();
c.close();
}
extraClips -= clips.size() - 1;
clips = new LinkedList<Clip>();
}
audioData = null;
if (audioIn != null) {
try {
audioIn.close();
} catch (IOException e) {
softErr(e, "Failed to close MultiClip %s", name);
}
}
}
private static void test() throws Exception {
// Will run the test no more than 15 seconds
long endtime = System.nanoTime() + TimeUnit.SECONDS.toNanos(15);
while (failed == null && endtime - System.nanoTime() > 0) {
Clip clip = createClip();
clip.loop(Clip.LOOP_CONTINUOUSLY);
clip.stop();
if (clip.isRunning()) {
if (clip.isRunning()) {
throw new RuntimeException("Clip is running");
}
}
if (clip.isActive()) {
if (clip.isActive()) {
throw new RuntimeException("Clip is active");
}
}
clip.close();
}
}
/**
* Destroys the MultiClip and releases all resources.
*/
public void destroy() {
if (clips.size() > 0) {
for (Clip c : clips) {
c.stop();
c.flush();
c.close();
}
extraClips -= clips.size() - 1;
clips = new LinkedList<Clip>();
}
audioData = null;
if (audioIn != null) {
try {
audioIn.close();
} catch (IOException e) {
ErrorHandler.error(String.format("Could not close AudioInputStream for MultiClip %s.", name), e, true);
}
}
}
private void doTest() throws AssetException, InterruptedException
{
Clip clip = Assets.load("cowbell.wav");
assertNotNull( clip );
clip.start();
Thread.sleep(1000);
clip.stop();
clip.close();
}
private static void test(final AudioFormat format, final byte[] data)
throws Exception {
final Line.Info info = new DataLine.Info(Clip.class, format);
final Clip clip = (Clip) AudioSystem.getLine(info);
go = new CountDownLatch(1);
clip.addLineListener(event -> {
if (event.getType().equals(LineEvent.Type.START)) {
go.countDown();
}
});
clip.open(format, data, 0, data.length);
clip.start();
go.await();
while (clip.isRunning()) {
// This loop should not hang
}
while (clip.isActive()) {
// This loop should not hang
}
clip.close();
}
public static void play(Mixer mixer) {
int res = 0;
try {
println("Getting clip from mixer...");
source = (Clip) mixer.getLine(info);
println("Opening clip...");
source.open(audioFormat, audioData, 0, audioData.length);
println("Starting clip...");
source.loop(Clip.LOOP_CONTINUOUSLY);
println("Now open your ears:");
println("- if you hear a sine wave playing,");
println(" listen carefully if you can hear clicks.");
println(" If no, the bug is fixed.");
println("- if you don't hear anything, it's possible");
println(" that this mixer is not connected to an ");
println(" amplifier, or that its volume is set to 0");
key();
} catch (IllegalArgumentException iae) {
println("IllegalArgumentException: "+iae.getMessage());
println("Sound device cannot handle this audio format.");
println("ERROR: Test environment not correctly set up.");
if (source!=null) {
source.close();
source = null;
}
return;
} catch (LineUnavailableException lue) {
println("LineUnavailableException: "+lue.getMessage());
println("This is normal for some mixers.");
} catch (Exception e) {
println("Unexpected Exception: "+e.toString());
}
if (source != null) {
println("Stopping...");
source.stop();
println("Closing...");
source.close();
println("Closed.");
source = null;
}
}
private static boolean doMixerClip(Mixer mixer, AudioFormat format) {
if (mixer==null) return false;
try {
System.out.println("Trying mixer "+mixer+":");
DataLine.Info info = new DataLine.Info(
Clip.class,
format,
(int) samplerate);
Clip clip = (Clip) mixer.getLine(info);
System.out.println(" - got clip: "+clip);
System.out.println(" - open with format "+format);
clip.open(format, buffer, 0, buffer.length);
System.out.println(" - playing...");
clip.start();
System.out.println(" - waiting while it's active...");
while (clip.isActive())
Thread.sleep(100);
System.out.println(" - waiting 100millis");
Thread.sleep(100);
System.out.println(" - drain1");
clip.drain();
System.out.println(" - drain2");
clip.drain();
System.out.println(" - stop");
clip.stop();
System.out.println(" - close");
clip.close();
System.out.println(" - closed");
} catch (Throwable t) {
System.out.println(" - Caught exception. Not failed.");
System.out.println(" - "+t.toString());
return false;
}
return true;
}
private static void doMixerClip(Mixer mixer) throws Exception {
boolean waitedEnough=false;
try {
DataLine.Info info = new DataLine.Info(Clip.class, format);
Clip clip = (Clip) mixer.getLine(info);
clip.open(format, soundData, 0, soundData.length);
// sanity
if (clip.getMicrosecondLength()/1000 < 9900) {
throw new Exception("clip's microsecond length should be at least 9900000, but it is "+clip.getMicrosecondLength());
}
long start = System.currentTimeMillis();
System.out.println(" ---------- start --------");
clip.start();
// give time to actually start it. ALSA implementation needs that...
Thread.sleep(300);
System.out.println("drain ... ");
clip.drain();
long elapsedTime = System.currentTimeMillis() - start;
System.out.println("close ... ");
clip.close();
System.out.println("... done");
System.out.println("Playback duration: "+elapsedTime+" milliseconds.");
waitedEnough = elapsedTime >= ((clip.getMicrosecondLength() / 1000) - TOLERANCE_MS);
} catch (Throwable t) {
System.out.println(" - Caught exception. Not failed.");
System.out.println(" - "+t.toString());
return;
}
if (!waitedEnough) {
throw new Exception("Drain did not wait long enough to play entire clip.");
}
successfulTests++;
}
/**
* This method allows to actually play the sound provided from the
* {@link #audioInputStream}
*
* @throws LineUnavailableException
* if the {@link Clip} object can't be created
* @throws IOException
* if the audio file can't be find
*/
protected void play() throws LineUnavailableException, IOException {
final Clip clip = AudioSystem.getClip();
clip.addLineListener(listener);
clip.open(audioInputStream);
try {
clip.start();
listener.waitUntilDone();
} catch (final InterruptedException e) {
e.printStackTrace();
} finally {
clip.close();
}
audioInputStream.close();
}
/**
* Play a wav file. Must be mono, from 8kHz to 48kHz, and 8-bit or 16-bit.
*
* @param file the 8-bit or 16-bit PWM (WAV) sample file
*/
public void playSample(final File file) {
try (AudioInputStream audioIn = AudioSystem.getAudioInputStream(file.toURI().toURL())) {
Clip clip = AudioSystem.getClip();
clip.open(audioIn);
clip.start();
Delay.usDelay(clip.getMicrosecondLength());
clip.close();
} catch (IOException | LineUnavailableException | UnsupportedAudioFileException e) {
LOGGER.error(e.getLocalizedMessage(), e);
throw new RuntimeException(e);
}
}
@Override
public void ShutdownSound() {
// Wait till all pending sounds are finished.
boolean done = false;
int i;
// FIXME (below).
//fprintf( stderr, "I_ShutdownSound: NOT finishing pending sounds\n");
//fflush( stderr );
while ( !done)
{
for( i=0 ; i<numChannels && ((channels[i]==null)||(!channels[i].isActive())) ; i++);
// FIXME. No proper channel output.
if (i==numChannels) done=true;
}
for( i=0 ; i<numChannels; i++){
if (channels[i]!=null)
channels[i].close();
}
// Free up resources taken up by cached clips.
Collection<Clip> clips=this.cachedSounds.values();
for (Clip c:clips){
c.close();
}
// Done.
return;
}
public static long start() throws Exception {
AudioFormat fmt = new AudioFormat(44100, 16, 2, true, false);
if (addLen) {
staticLen += (int) (staticLen / 5) + 1000;
} else {
staticLen -= (int) (staticLen / 5) + 1000;
}
if (staticLen > 8 * 44100 * 4) {
staticLen = 8 * 44100 * 4;
addLen = !addLen;
}
if (staticLen < 1000) {
staticLen = 1000;
addLen = !addLen;
}
int len = staticLen;
len -= (len % 4);
byte[] fakedata = new byte[len];
InputStream is = new ByteArrayInputStream(fakedata);
AudioFormat format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
44100, 16, 2, 4, 44100, false);
AudioInputStream ais = new AudioInputStream(is, format, fakedata.length
/ format.getFrameSize());
out(" preparing to play back " + len + " bytes == " + bytes2Ms(len,
format)
+ "ms audio...");
DataLine.Info info = new DataLine.Info(Clip.class, ais.getFormat());
clip = (Clip) AudioSystem.getLine(info);
clip.addLineListener(new LineListener() {
public void update(LineEvent e) {
if (e.getType() == LineEvent.Type.STOP) {
out(" calling close() from event dispatcher thread");
((Clip) e.getSource()).close();
} else if (e.getType() == LineEvent.Type.CLOSE) {
}
}
});
out(" opening...");
try {
clip.open(ais);
} catch (Throwable t) {
t.printStackTrace();
clip.close();
clip = null;
}
ais.close();
if (clip != null) {
out(" starting...");
clip.start();
}
return bytes2Ms(fakedata.length, format);
}