Java Code Examples for javax.sound.sampled.LineUnavailableException#printStackTrace()

The following examples show how to use javax.sound.sampled.LineUnavailableException#printStackTrace() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VoicePlay.java    From oim-fx with MIT License 6 votes vote down vote up
public VoicePlay() {

		try {

			format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0f, 16, 1, 2, 44100.0f, false);
			DataLine.Info listenInfo = new DataLine.Info(TargetDataLine.class, format);
			boolean l = AudioSystem.isLineSupported(listenInfo);
			if (l) {
				listenLine = (TargetDataLine) AudioSystem.getLine(listenInfo);
			}

			DataLine.Info playInfo = new DataLine.Info(SourceDataLine.class, format);
			boolean p = AudioSystem.isLineSupported(listenInfo);
			if (p) {
				playLine = (SourceDataLine) AudioSystem.getLine(playInfo);
			}
			lineSupported = l && p;
		} catch (LineUnavailableException e) {
			e.printStackTrace();
		}
	}
 
Example 2
Source File: SinkAudio.java    From FoxTelem with GNU General Public License v3.0 6 votes vote down vote up
/**
 * FIXME:
 * specify the buffer size in the open(AudioFormat,int) method. A delay of 10ms-100ms will be acceptable for realtime audio. Very low latencies like will 
 * not work on all computer systems, and 100ms or more will probably be annoying for your users. A good tradeoff is, e.g. 50ms. For your audio format, 
 * 8-bit, mono at 44100Hz, a good buffer size is 2200 bytes, which is almost 50ms
 */
void initializeOutput() {
	
	DataLine.Info dataLineInfo = new DataLine.Info(  SourceDataLine.class, audioFormat);
	//line = (TargetDataLine) AudioSystem.getLine(info);
	//Mixer m = AudioSystem.getMixer(null);
	try {
		//sourceDataLine = (SourceDataLine)m.getLine(dataLineInfo);
		sourceDataLine = (SourceDataLine)AudioSystem.getLine(dataLineInfo);
		sourceDataLine.open(audioFormat);
		sourceDataLine.start();
	} catch (LineUnavailableException e) {
		// TODO Auto-generated catch block
		e.printStackTrace(Log.getWriter());
	}

}
 
Example 3
Source File: MultiClip.java    From opsu-dance with GNU General Public License v3.0 6 votes vote down vote up
/**
* Mute the Clip (because destroying it, won't stop it)
*/
public void mute() {
	try {
		Clip c = getClip();
		if (c == null) {
			return;
		}
		float val = (float) (Math.log(Float.MIN_VALUE) / Math.log(10.0) * 20.0);
		if (val < -80.0f) {
			val = -80.0f;
		}
		((FloatControl) c.getControl(FloatControl.Type.MASTER_GAIN)).setValue(val);
	} catch (IllegalArgumentException ignored) {
	} catch (LineUnavailableException e) {
		e.printStackTrace();
	}
}
 
Example 4
Source File: VoicePlay.java    From oim-fx with MIT License 5 votes vote down vote up
public boolean startListen() {
	if (null != listenLine && lineSupported) {
		try {
			listenLine.open(format, listenLine.getBufferSize());
			listenLine.start();
			length = (int) (format.getFrameSize() * format.getFrameRate() / 2.0f);
			listenStart = true;
		} catch (LineUnavailableException e) {
			e.printStackTrace();
			listenStart = false;
		}
	}
	return listenStart;
}
 
Example 5
Source File: Demo_MultichannelAudio_NativeMultipleSoundcard.java    From haxademic with MIT License 5 votes vote down vote up
protected Clip clipFromLine() {
	try {
		// did this even work??
		int lineIndex = UI.valueInt(LINE_INDEX);
		lineIndex = P.constrain(lineIndex, 0, linesOut.size() - 1);
		Line.Info lineInfo = linesOut.get(lineIndex);
    	Line line = AudioSystem.getLine(lineInfo);
        Clip clip = (Clip)line;

		return clip;
	} catch (LineUnavailableException e) {
		e.printStackTrace();
		return null;
	}
}
 
Example 6
Source File: Demo_MultichannelAudio_NativeMultipleSoundcard.java    From haxademic with MIT License 5 votes vote down vote up
protected Clip clipFromMixer() {
	try {
		int mixerIndex = UI.valueInt(MIXER_INDEX);
		mixerIndex = P.constrain(mixerIndex, 0, mixers.size() - 1);
		Clip clip;
		clip = AudioSystem.getClip(mixers.get(mixerIndex).getMixerInfo());
		return clip;
	} catch (LineUnavailableException e) {
		e.printStackTrace();
		return null;
	}
}
 
Example 7
Source File: Demo_MultichannelAudio_NativeMultipleSoundcard.java    From haxademic with MIT License 5 votes vote down vote up
protected Clip clipFromDefault() {
	try {
    	Line.Info lineInfo = new Line.Info(Clip.class);
    	Line line = AudioSystem.getLine(lineInfo);
        Clip clip = (Clip)line;
		return clip;
	} catch (LineUnavailableException e) {
		e.printStackTrace();
		return null;
	}
}
 
Example 8
Source File: TGTuner.java    From tuxguitar with GNU Lesser General Public License v2.1 5 votes vote down vote up
public void openDataLine() {
    if (this.dataLine!=null) {
  	  synchronized(this) {
		this.computeFFTParams();
		try {
			this.dataLine.open();
			this.dataLine.start();
			//this.dataLine.open(this.settings.getAudioFormat(), this.settings.getBufferSize());
		} catch (LineUnavailableException e) {
			System.out.println("------- TGTuner: openDataLine() exception -------");
			e.printStackTrace();
		}
	}
  }
}
 
Example 9
Source File: UnexpectedIAE.java    From openjdk-jdk9 with GNU General Public License v2.0 5 votes vote down vote up
public static void main(String argv[]) throws Exception {
    boolean success = true;

    Mixer.Info [] infos = AudioSystem.getMixerInfo();

    for (int i=0; i<infos.length; i++) {
        Mixer mixer = AudioSystem.getMixer(infos[i]);
        System.out.println("Mixer is: " + mixer);
        Line.Info [] target_line_infos = mixer.getTargetLineInfo();
        for (int j = 0; j < target_line_infos.length; j++) {
            try {
                System.out.println("Trying to get:" + target_line_infos[j]);
                mixer.getLine(target_line_infos[j]);
            } catch (IllegalArgumentException iae) {
                System.out.println("Unexpected IllegalArgumentException raised:");
                iae.printStackTrace();
                success = false;
            } catch (LineUnavailableException lue) {
                System.out.println("Unexpected LineUnavailableException raised:");
                lue.printStackTrace();
                success = false;
            }
        }
    }
    if (success) {
        System.out.println("Test passed");
    } else {
        throw new Exception("Test FAILED");
    }
}
 
Example 10
Source File: NFFTStrategy.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void monitor(String query,final  int maxNumberOfResults,Set<Integer> avoid,
		final QueryResultHandler handler) {
	
	int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	AudioDispatcher d ;
	if (query.equals(Panako.DEFAULT_MICROPHONE)){
		try {
			d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap);
		} catch (LineUnavailableException e) {
			LOG.warning("Could not connect to default microphone!" + e.getMessage());
			e.printStackTrace();
			d = null;
		}
	}else{
		d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	}
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQuery(audioEvent.getFloatBuffer().clone(), maxNumberOfResults, handler,timeStamp,avoid);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();
}
 
Example 11
Source File: NFFTStrategy.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
public void monitor(String query,final SerializedFingerprintsHandler handler){
	
	int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	AudioDispatcher d ;
	if (query.equals(Panako.DEFAULT_MICROPHONE)){
		try {
			d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap);
		} catch (LineUnavailableException e) {
			LOG.warning("Could not connect to default microphone!" + e.getMessage());
			e.printStackTrace();
			d = null;
		}
	}else{
		d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	}
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQueryToSerializeFingerprints(audioEvent.getFloatBuffer().clone(), handler,timeStamp);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();
}
 
Example 12
Source File: RafsRepStrategy.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void monitor(String query, int maxNumberOfReqults, Set<Integer> avoid, QueryResultHandler handler) {
	int samplerate = Config.getInt(Key.RAFS_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	AudioDispatcher d ;
	if (query.equals(Panako.DEFAULT_MICROPHONE)){
		try {
			d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap);
		} catch (LineUnavailableException e) {
			LOG.warning("Could not connect to default microphone!" + e.getMessage());
			e.printStackTrace();
			d = null;
		}
	}else{
		d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	}
	d.setZeroPadFirstBuffer(true);
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQuery(audioEvent.getFloatBuffer().clone(), handler,timeStamp,avoid);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();
}
 
Example 13
Source File: Play.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void run(String... args) {
	String inputResource = AudioResourceUtils.sanitizeResource(args[0]);
	AudioDispatcher d;
	try {
		d = AudioDispatcherFactory.fromPipe(inputResource, TARGET_SAMPLE_RATE, 2028, 0);
		d.addAudioProcessor(new AudioPlayer(JVMAudioInputStream.toAudioFormat(d.getFormat())));
		d.run();
	}  catch (LineUnavailableException e) {
		e.printStackTrace();
		System.err.print(e.getLocalizedMessage());
	}
}
 
Example 14
Source File: VoicePlay.java    From oim-fx with MIT License 5 votes vote down vote up
public boolean startPlay() {
	if (null != playLine && lineSupported) {
		try {

			playLine.open(format);
			playLine.start();
			playStart = true;
		} catch (LineUnavailableException e) {
			e.printStackTrace();
			playStart = false;
		}
	}
	return playStart;
}
 
Example 15
Source File: DavidSFXModule.java    From mochadoom with GNU General Public License v3.0 4 votes vote down vote up
/** This one will only create datalines for common clip/audioline samples
 *  directly.
 * 
 * @param c
 * @param sfxid
 */
private final void  createDataLineForChannel(int c, int sfxid){
	
	// None? Make a new one.
	
	if (channels[c].auline == null) {
       	try {
       		DoomSound tmp=cachedSounds.get(sfxid);
       		// Sorry, Charlie. Gotta make a new one.
       		DataLine.Info info = new DataLine.Info(SourceDataLine.class, DoomSound.DEFAULT_SAMPLES_FORMAT);
			channels[c].auline = (SourceDataLine) AudioSystem.getLine(info);
			channels[c].auline.open(tmp.format);
		} catch (LineUnavailableException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
				boolean errors=false;
       			// Add individual volume control.
       			if (channels[c].auline.isControlSupported(Type.MASTER_GAIN))
       				channels[c].vc=(FloatControl) channels[c].auline
       				.getControl(Type.MASTER_GAIN);
       			else {
       			System.err.print("MASTER_GAIN, ");
       			errors=true;
       			if (channels[c].auline.isControlSupported(Type.VOLUME))
           				channels[c].vc=(FloatControl) channels[c].auline
           				.getControl(Type.VOLUME);
       			else 
       				System.err.print("VOLUME, ");
       			} 
       			

       			// Add individual pitch control.
       			if (channels[c].auline.isControlSupported(Type.SAMPLE_RATE)){
       				channels[c].pc=(FloatControl) channels[c].auline
       				.getControl(Type.SAMPLE_RATE);
       			} else {
       				errors=true;
       				System.err.print("SAMPLE_RATE, ");
       			} 
       			
       			// Add individual pan control
       			if (channels[c].auline.isControlSupported(Type.BALANCE)){
       				channels[c].bc=(FloatControl) channels[c].auline
       				.getControl(FloatControl.Type.BALANCE);
       			} else {
       				System.err.print("BALANCE, ");
       				errors=true;
       				if (channels[c].auline.isControlSupported(Type.PAN)){        					
       				channels[c].bc=(FloatControl) channels[c].auline
       				.getControl(FloatControl.Type.PAN);
       			} else {
       				System.err.print("PANNING ");
       				}
       			}

       			if (errors) System.err.printf("for channel %d NOT supported!\n",c);
       			
       			channels[c].auline.start();
       		}
}
 
Example 16
Source File: Demo_MultichannelAudio_NativeMultipleSoundcard.java    From haxademic with MIT License 4 votes vote down vote up
protected Clip clipFromCurMixerLine() {
		try {
			Line.Info[] sourceInfos = curMixer().getSourceLineInfo();
			for (int s = 0; s < sourceInfos.length; s++) {
				P.out("-- sourceInfos", sourceInfos[s]);
			}
			Line.Info lineInfo = sourceInfos[1];
			P.out("*** info: " + lineInfo);
			Line line = AudioSystem.getLine(lineInfo);
//			AudioSystem.get
			DataLine.Info dataLineInfo = ((DataLine.Info) line.getLineInfo());
//			SourceDataLine srcDataLine = AudioSystem.getSourceDataLine(format, curMixer().getMixerInfo());
//			P.out("*** srcDataLine info: " + srcDataLine.getLineInfo());
			Line lineSpecific = null;
			P.out("dataLineInfo.getFormats().length", dataLineInfo.getFormats().length);
			AudioFormat format = dataLineInfo.getFormats()[0];
//			if (line instanceof SourceDataLine) {
//				Arrays.asList(((DataLine.Info) line.getLineInfo()).getFormats()).forEach(format -> {
//					P.out("#######");
					P.out("Channels: " + format.getChannels());
					P.out("Size in Bits: " + format.getSampleSizeInBits());
					P.out("Frame Rate: " + format.getFrameRate());
					P.out("Frame Size: " + format.getFrameSize());
					P.out("Encoding: " + format.getEncoding());
					P.out("Sample Rate: " + format.getSampleRate());

//				});
//			}
			DataLine.Info info = new DataLine.Info(Clip.class, format);
//			SourceDataLine srcDataLine = AudioSystem.getSourceDataLine(format, curMixer().getMixerInfo());
			lineSpecific = curMixer().getLine(info);
//			Line lineSpecific = AudioSystem.getLine(srcDataLine.getLineInfo());
//			Line lineSpecific = curMixer().getLine(srcDataLine.getLineInfo());
			Port.Info infoooo = (Port.Info)lineSpecific.getLineInfo();
			P.out("infoooo", infoooo);

			Clip clip = (Clip)lineSpecific;
			return clip;
		} catch (LineUnavailableException e) {
			e.printStackTrace();
			return null;
		}
	}
 
Example 17
Source File: JavaInfo.java    From haxademic with MIT License 4 votes vote down vote up
public static void printAudioInfo() {
		P.out("----------------- printAudioInfo -------------------");
		Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo();
		for(int i = 0; i < mixerInfo.length; i++) {
			P.out("########## mixerInfo["+i+"]", mixerInfo[i].getName());

//			Mixer mixer = AudioSystem.getMixer(null); // default mixer
			Mixer mixer = AudioSystem.getMixer(mixerInfo[i]); // default mixer
			try {
				mixer.open();
			} catch (LineUnavailableException e) {
				e.printStackTrace();
			}
	
			P.out("Supported SourceDataLines of default mixer (%s):\n\n", mixer.getMixerInfo().getName());
			for(Line.Info info : mixer.getSourceLineInfo()) {
			    if(SourceDataLine.class.isAssignableFrom(info.getLineClass())) {
			        SourceDataLine.Info info2 = (SourceDataLine.Info) info;
			        P.out(info2);
			        System.out.printf("  max buffer size: \t%d\n", info2.getMaxBufferSize());
			        System.out.printf("  min buffer size: \t%d\n", info2.getMinBufferSize());
			        AudioFormat[] formats = info2.getFormats();
			        P.out("  Supported Audio formats: ");
			        for(AudioFormat format : formats) {
			        	P.out("    "+format);
			          System.out.printf("      encoding:           %s\n", format.getEncoding());
			          System.out.printf("      channels:           %d\n", format.getChannels());
			          System.out.printf(format.getFrameRate()==-1?"":"      frame rate [1/s]:   %s\n", format.getFrameRate());
			          System.out.printf("      frame size [bytes]: %d\n", format.getFrameSize());
			          System.out.printf(format.getSampleRate()==-1?"":"      sample rate [1/s]:  %s\n", format.getSampleRate());
			          System.out.printf("      sample size [bit]:  %d\n", format.getSampleSizeInBits());
			          System.out.printf("      big endian:         %b\n", format.isBigEndian());
			          
			          Map<String,Object> prop = format.properties();
			          if(!prop.isEmpty()) {
			        	  P.out("      Properties: ");
			              for(Map.Entry<String, Object> entry : prop.entrySet()) {
			                  System.out.printf("      %s: \t%s\n", entry.getKey(), entry.getValue());
			              }
			          }
			        }
			        P.out();
			    } else {
			    	P.out(info.toString());
			    }
			    P.out();
			}
			mixer.close();
		}
	}