Java Code Examples for javax.sound.sampled.AudioSystem#write()

The following examples show how to use javax.sound.sampled.AudioSystem#write() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AutoCloseTimeCheck.java    From dragonwell8_jdk with GNU General Public License v2.0 6 votes vote down vote up
public static void main(final String[] args) throws Exception {
    // Prepare the audio file
    File file = new File("audio.wav");
    try {
        AudioFormat format =
                new AudioFormat(PCM_SIGNED, 44100, 8, 1, 1, 44100, false);
        AudioSystem.write(getStream(format), Type.WAVE, file);
    } catch (final Exception ignored) {
        return; // the test is not applicable
    }
    try {
        testSmallDelay(file);
        testBigDelay(file);
    } finally {
        Files.delete(file.toPath());
    }
}
 
Example 2
Source File: AutoCloseTimeCheck.java    From TencentKona-8 with GNU General Public License v2.0 6 votes vote down vote up
public static void main(final String[] args) throws Exception {
    // Prepare the audio file
    File file = new File("audio.wav");
    try {
        AudioFormat format =
                new AudioFormat(PCM_SIGNED, 44100, 8, 1, 1, 44100, false);
        AudioSystem.write(getStream(format), Type.WAVE, file);
    } catch (final Exception ignored) {
        return; // the test is not applicable
    }
    try {
        testSmallDelay(file);
        testBigDelay(file);
    } finally {
        Files.delete(file.toPath());
    }
}
 
Example 3
Source File: AutoCloseTimeCheck.java    From openjdk-jdk8u with GNU General Public License v2.0 6 votes vote down vote up
public static void main(final String[] args) throws Exception {
    // Prepare the audio file
    File file = new File("audio.wav");
    try {
        AudioFormat format =
                new AudioFormat(PCM_SIGNED, 44100, 8, 1, 1, 44100, false);
        AudioSystem.write(getStream(format), Type.WAVE, file);
    } catch (final Exception ignored) {
        return; // the test is not applicable
    }
    try {
        testSmallDelay(file);
        testBigDelay(file);
    } finally {
        Files.delete(file.toPath());
    }
}
 
Example 4
Source File: AiffSampleRate.java    From openjdk-jdk9 with GNU General Public License v2.0 6 votes vote down vote up
private static boolean testSampleRate(float sampleRate) {
    boolean result = true;

    try {
        // create AudioInputStream with sample rate of 10000 Hz
        ByteArrayInputStream data = new ByteArrayInputStream(new byte[1]);
        AudioFormat format = new AudioFormat(sampleRate, 8, 1, true, true);
        AudioInputStream stream = new AudioInputStream(data, format, 1);

        // write to AIFF file
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        AudioSystem.write(stream, AudioFileFormat.Type.AIFF, outputStream);
        byte[] fileData = outputStream.toByteArray();
        InputStream inputStream = new ByteArrayInputStream(fileData);
        AudioFileFormat aff = AudioSystem.getAudioFileFormat(inputStream);
        if (! equals(sampleRate, aff.getFormat().getFrameRate())) {
            out("error for sample rate " + sampleRate);
            result = false;
        }
    } catch (Exception e) {
        out(e);
        out("Test NOT FAILED");
    }
    return result;
}
 
Example 5
Source File: AutoCloseTimeCheck.java    From jdk8u_jdk with GNU General Public License v2.0 6 votes vote down vote up
public static void main(final String[] args) throws Exception {
    // Prepare the audio file
    File file = new File("audio.wav");
    try {
        AudioFormat format =
                new AudioFormat(PCM_SIGNED, 44100, 8, 1, 1, 44100, false);
        AudioSystem.write(getStream(format), Type.WAVE, file);
    } catch (final Exception ignored) {
        return; // the test is not applicable
    }
    try {
        testSmallDelay(file);
        testBigDelay(file);
    } finally {
        Files.delete(file.toPath());
    }
}
 
Example 6
Source File: DesktopAudioRecordingService.java    From attach with GNU General Public License v3.0 5 votes vote down vote up
private void save(String fileName) throws IOException {
    byte[] audioData = recordBytes.toByteArray();
    final File wavFile = new File(getAudioFolder(), fileName + ".wav");
    ByteArrayInputStream bais = new ByteArrayInputStream(audioData);
    try (AudioInputStream audioInputStream = new AudioInputStream(bais, format, audioData.length / format.getFrameSize())) {
        AudioSystem.write(audioInputStream, AudioFileFormat.Type.WAVE, wavFile);
    }
    recordBytes.close();
    if (debug) {
        LOG.log(Level.INFO, String.format("File %s.wav added to %s", fileName, getAudioFolder()));
    }
    addChunk.apply(fileName + ".wav");
}
 
Example 7
Source File: WriteAuUnspecifiedLength.java    From openjdk-jdk9 with GNU General Public License v2.0 5 votes vote down vote up
public static void main(String argv[]) throws Exception {
    AudioFormat format = new AudioFormat(44100, 16, 2, true, true);
    InputStream is = new ByteArrayInputStream(new byte[1000]);
    AudioInputStream ais = new AudioInputStream(is, format, AudioSystem.NOT_SPECIFIED);
    AudioSystem.write(ais, AudioFileFormat.Type.AU, new ByteArrayOutputStream());
    System.out.println("Test passed.");
}
 
Example 8
Source File: PdOffline.java    From gdx-pd with Apache License 2.0 5 votes vote down vote up
/**
 * Bake a path to a wav file
 * @param patch the patch to bake
 * @param wav the wav file to write
 * @param channels how many channels (1 for mono, 2 for stereo, can be more than 2 channels)
 * @param sampleRate sample rate used by Pd
 * @param time baking duration in seconds
 * @throws IOException
 */
public static void bake(File patch, File wav, int channels, int sampleRate, float time) throws IOException {
	
	// disable Pd : does nothing if Pd alreay initialized.
	PdConfiguration.disabled = true;

	// Pause audio.
	// Does nothing in headless mode but required to 
	// have Pd static code executed (load library)
	Pd.audio.pause();
	
	int handle = PdBase.openPatch(patch);
	PdBase.openAudio(0, channels, sampleRate);
	PdBase.computeAudio(true);
	
	int frames = (int)(time * sampleRate);
	int samples = frames * channels;
	short [] data = new short[samples];
	int ticks = frames / PdBase.blockSize();
	PdBase.process(ticks, new short[]{}, data);
	
	PdBase.closePatch(handle);
	
	// save
	byte [] buf = new byte[data.length * 2];
	for(int i=0 ; i<data.length ; i++){
		buf[i*2+0] = (byte)(data[i] & 0xFF);
		buf[i*2+1] = (byte)((data[i] >> 8) & 0xFF);
	}
	
	ByteArrayInputStream stream = new ByteArrayInputStream(buf);
	AudioFormat format = new AudioFormat(sampleRate, 16, channels, true, false);
	AudioInputStream audioStream = new AudioInputStream(stream, format, data.length);
	AudioSystem.write(audioStream, Type.WAVE, wav);
	
	// resume audio
	Pd.audio.resume();
}
 
Example 9
Source File: MidiToAudioWriter.java    From tuxguitar with GNU Lesser General Public License v2.1 5 votes vote down vote up
public static void write(OutputStream out, List<MidiEvent> events, MidiToAudioSettings settings) throws Throwable {
	MidiToAudioSynth.instance().openSynth();
	MidiToAudioSynth.instance().loadSoundbank(getPatchs(events), settings.getSoundbankPath());
	
	int usqTempo = 60000000 / 120;
	long previousTick = 0;
	long timePosition = 0;
	MidiToAudioWriter.sort(events);
	Receiver receiver = MidiToAudioSynth.instance().getReceiver();
	AudioInputStream stream = MidiToAudioSynth.instance().getStream();
	
	Iterator<MidiEvent> it = events.iterator();
	while(it.hasNext()){
		MidiEvent event = (MidiEvent)it.next();
		MidiMessage msg = event.getMessage();
		
		timePosition += ( (event.getTick() - previousTick) * usqTempo) / TGDuration.QUARTER_TIME;
		
		if (msg instanceof MetaMessage) {
			if (((MetaMessage) msg).getType() == 0x51) {
				byte[] data = ((MetaMessage) msg).getData();
				usqTempo = ((data[0] & 0xff) << 16) | ((data[1] & 0xff) << 8) | (data[2] & 0xff);
			}
		} else {
			receiver.send(msg, timePosition);
		}
		previousTick = event.getTick();
	}
	
	long duration = (long) (stream.getFormat().getFrameRate() * ( (timePosition / 1000000.0) ));
	
	AudioInputStream srcStream = new AudioInputStream(stream, stream.getFormat(), duration );
	AudioInputStream dstStream = AudioSystem.getAudioInputStream(settings.getFormat(), srcStream );
	AudioSystem.write(new AudioInputStream(dstStream, dstStream.getFormat(), duration ), settings.getType(), out);
	
	dstStream.close();
	srcStream.close();
	
	MidiToAudioSynth.instance().closeSynth();
}
 
Example 10
Source File: JVoiceXmlDocumentServer.java    From JVoiceXML with GNU Lesser General Public License v2.1 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public URI storeAudio(final AudioInputStream in) throws BadFetchError {
    try {
        final File directory = getRecordingsDirectory();
        final File file = File.createTempFile("rec-", ".wav", directory);
        AudioSystem.write(in, AudioFileFormat.Type.WAVE, file);
        LOGGER.info("recorded to file '" + file.toURI() + "'");
        return file.toURI();
    } catch (IOException ex) {
        throw new BadFetchError(ex.getMessage(), ex);
    }
}
 
Example 11
Source File: StdAudio.java    From algs4 with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Saves the double array as an audio file (using .wav or .au format).
 *
 * @param  filename the name of the audio file
 * @param  samples the array of samples
 * @throws IllegalArgumentException if unable to save {@code filename}
 * @throws IllegalArgumentException if {@code samples} is {@code null}
 * @throws IllegalArgumentException if {@code filename} is {@code null}
 * @throws IllegalArgumentException if {@code filename} extension is not {@code .wav}
 *         or {@code .au}
 */
public static void save(String filename, double[] samples) {
    if (filename == null) {
        throw new IllegalArgumentException("filenameis null");
    }
    if (samples == null) {
        throw new IllegalArgumentException("samples[] is null");
    }

    // assumes 16-bit samples with sample rate = 44,100 Hz
    // use 16-bit audio, mono, signed PCM, little Endian
    AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, MONO, SIGNED, LITTLE_ENDIAN);
    byte[] data = new byte[2 * samples.length];
    for (int i = 0; i < samples.length; i++) {
        int temp = (short) (samples[i] * MAX_16_BIT);
        if (samples[i] == 1.0) temp = Short.MAX_VALUE;   // special case since 32768 not a short
        data[2*i + 0] = (byte) temp;
        data[2*i + 1] = (byte) (temp >> 8);   // little endian
    }

    // now save the file
    try {
        ByteArrayInputStream bais = new ByteArrayInputStream(data);
        AudioInputStream ais = new AudioInputStream(bais, format, samples.length);
        if (filename.endsWith(".wav") || filename.endsWith(".WAV")) {
            AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename));
        }
        else if (filename.endsWith(".au") || filename.endsWith(".AU")) {
            AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename));
        }
        else {
            throw new IllegalArgumentException("file type for saving must be .wav or .au");
        }
    }
    catch (IOException ioe) {
        throw new IllegalArgumentException("unable to save file '" + filename + "'", ioe);
    }
}
 
Example 12
Source File: AMBESynthesizer.java    From jmbe with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Debug method for generating and testing AMBE recordings
 * @param frames of AMBE encoded audio
 * @param outputFile for generated audio
 * @throws IOException for IO errors
 */
public static void makeAMBEWaves(List<byte[]> frames, File outputFile) throws IOException
{
    IAudioCodec audioCodec = new AMBEAudioCodec();

    AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
        8000.0f, 16, 1, 2, 8000.0f, false);
    ByteBuffer byteBuffer = ByteBuffer.allocate(frames.size() * 320);

    int frameCounter = 0;

    for(byte[] frame : frames)
    {
        frameCounter++;

        float[] samples = audioCodec.getAudio(frame);

        ByteBuffer converted = ByteBuffer.allocate(samples.length * 2);
        converted = converted.order(ByteOrder.LITTLE_ENDIAN);

        for(float sample : samples)
        {
            converted.putShort((short)(sample * Short.MAX_VALUE));
        }

        byte[] bytes = converted.array();
        byteBuffer.put(bytes);
    }

    AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(byteBuffer.array()), audioFormat, byteBuffer.array().length);

    if(!outputFile.exists())
    {
        outputFile.createNewFile();
    }

    AudioSystem.write(ais,AudioFileFormat.Type.WAVE, outputFile);
}
 
Example 13
Source File: AMBESynthesizer.java    From jmbe with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Debug method for generating and testing IMBE recordings
 * @param frames of IMBE encoded audio
 * @param outputFile for generated audio
 * @throws IOException for IO errors
 */
public static void makeIMBEWaves(List<byte[]> frames, File outputFile) throws IOException
{
    IAudioCodec audioCodec = new IMBEAudioCodec();

    AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
        8000.0f, 16, 1, 2, 8000.0f, false);
    ByteBuffer byteBuffer = ByteBuffer.allocate(frames.size() * 320);

    int frameCounter = 0;

    for(byte[] frame : frames)
    {
        float[] samples = audioCodec.getAudio(frame);

        ByteBuffer converted = ByteBuffer.allocate(samples.length * 2);
        converted = converted.order(ByteOrder.LITTLE_ENDIAN);

        for(float sample : samples)
        {
            converted.putShort((short)(sample * Short.MAX_VALUE));
        }

        byte[] bytes = converted.array();
        byteBuffer.put(bytes);
    }

    AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(byteBuffer.array()), audioFormat, byteBuffer.array().length);

    if(!outputFile.exists())
    {
        outputFile.createNewFile();
    }

    AudioSystem.write(ais,AudioFileFormat.Type.WAVE, outputFile);
}
 
Example 14
Source File: SimpleAudioRecorder.java    From jmg with GNU General Public License v2.0 5 votes vote down vote up
public void run()
{
	try
	{
		AudioSystem.write(
			m_audioInputStream,
			m_targetType,
			m_outputFile);
	}
	catch (IOException e)
	{
		e.printStackTrace();
	}
}
 
Example 15
Source File: TGSynthSongWriter.java    From tuxguitar with GNU Lesser General Public License v2.1 4 votes vote down vote up
public void write(TGSongWriterHandle handle) throws TGFileFormatException {
	try{
		TGSynthAudioSettings settings = handle.getContext().getAttribute(TGSynthAudioSettings.class.getName());
		if( settings == null ) {
			settings = new TGSynthAudioSettings();
		}
		
		OutputStream out = handle.getOutputStream();
		TGSong tgSong = handle.getSong();
		TGSongManager tgSongManager = new TGSongManager();
		
		MidiSequenceParser midiSequenceParser = new MidiSequenceParser(tgSong, tgSongManager, MidiSequenceParser.DEFAULT_EXPORT_FLAGS | MidiSequenceParser.ADD_BANK_SELECT);
		TGSynthSequenceHandler midiSequenceHandler = new TGSynthSequenceHandler(tgSong.countTracks());
		midiSequenceParser.parse(midiSequenceHandler);
		if(!midiSequenceHandler.getEvents().isEmpty()) {
			
			TGSynthModel synthModel = new TGSynthModel(this.context);
			TGAudioBufferProcessor audioProcessor = new TGAudioBufferProcessor(synthModel);
			ByteArrayOutputStream audioBuffer = new ByteArrayOutputStream();
			TGSynthSequencer sequence = new TGSynthSequencer(synthModel, midiSequenceHandler.getEvents());
			
			this.loadSynthPrograms(synthModel, tgSong);
			
			sequence.start();
			while(!sequence.isEnded()) {
				sequence.dispatchEvents();
				
				audioProcessor.process();
				audioBuffer.write(audioProcessor.getBuffer().getBuffer(), 0, audioProcessor.getBuffer().getLength());
				
				sequence.forward();
			}
			
			long duration = (long) (TGAudioLine.AUDIO_FORMAT.getFrameRate() * ((sequence.getLength() / 1000.00)));
			
			ByteArrayInputStream byteBuffer = new ByteArrayInputStream(audioBuffer.toByteArray());
			AudioInputStream sourceStream = new AudioInputStream(byteBuffer, TGAudioLine.AUDIO_FORMAT, duration);
			AudioInputStream targetStream = AudioSystem.getAudioInputStream(settings.getFormat(), sourceStream);
			AudioSystem.write(targetStream, settings.getType(), out);
		}
	} catch(Throwable throwable) {
		throw new TGFileFormatException(throwable);
	}
}
 
Example 16
Source File: Sample.java    From mpcmaid with GNU Lesser General Public License v2.1 4 votes vote down vote up
public void save(File file) throws Exception {
	final Type fileType = AudioFileFormat.Type.WAVE;
	final AudioInputStream stream = new AudioInputStream(new ByteArrayInputStream(bytes), format, frameLength);
	AudioSystem.write(stream, fileType, file);
}