be.tarsos.dsp.AudioEvent Java Examples

The following examples show how to use be.tarsos.dsp.AudioEvent. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: QIFFTAudioFileInfo.java    From Panako with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] currentMagnitudes = eventPointProcessor.getMagnitudes().clone();
	
	//for visualization purposes:
	//store the new max value or, decay the running max
	float currentMaxValue = max(currentMagnitudes);
	if(currentMaxValue > runningMaxMagnitude){
		runningMaxMagnitude = currentMaxValue;
	}else{
		runningMaxMagnitude = 0.9999f * runningMaxMagnitude;
	}
	normalize(currentMagnitudes);
	
	magnitudes.put((float)audioEvent.getTimeStamp(),currentMagnitudes);
	
	return true;
}
 
Example #2
Source File: FadeOut.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent)
{
	// Don't do anything before the beginning of Fade Out
	if(isFadeOut==true)
	{
		if(firstTime==-1)
			firstTime=audioEvent.getTimeStamp();

		// Decrease the gain according to time since the beginning of the Fade Out
		time=audioEvent.getTimeStamp()-firstTime;
		gp.setGain(1-time/duration);
		gp.process(audioEvent);
	}
	return true;
}
 
Example #3
Source File: FadeIn.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent)
{
	// Don't do anything after the end of the Fade In
	if(fadingIn)
	{
		if(firstTime==-1)
			firstTime=audioEvent.getTimeStamp();
		
		
		// Increase the gain according to time since the beginning of the Fade In
		time=audioEvent.getTimeStamp()-firstTime;
		gp.setGain(time/duration);
		gp.process(audioEvent);
		if(time > duration){
			fadingIn = false;
		}
	}
	return true;
}
 
Example #4
Source File: NCteQStrategy.java    From Panako with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public void monitor(String query, final int maxNumberOfReqults,Set<Integer> avoid,
		final QueryResultHandler handler) {
	
	int samplerate = Config.getInt(Key.NCTEQ_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	final ConstantQ constanQ = createConstantQ();
	
	AudioDispatcher d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQuery(audioEvent.getFloatBuffer().clone(), maxNumberOfReqults, handler,timeStamp,constanQ);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();

}
 
Example #5
Source File: DelayEffect.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] audioFloatBuffer = audioEvent.getFloatBuffer();
	int overlap = audioEvent.getOverlap();
		
	for(int i = overlap ; i < audioFloatBuffer.length ; i++){
		if(position >= echoBuffer.length){
			position = 0;
		}
		
		//output is the input added with the decayed echo 		
		audioFloatBuffer[i] = audioFloatBuffer[i] + echoBuffer[position] * decay;
		//store the sample in the buffer;
		echoBuffer[position] = audioFloatBuffer[i];
		
		position++;
	}
	
	applyNewEchoLength();
	
	return true;
}
 
Example #6
Source File: Daubechies4WaveletCoder.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {

	float[] audioBuffer = audioEvent.getFloatBuffer();
	float[] sortBuffer = new float[audioBuffer.length];

	transform.forwardTrans(audioBuffer);

	for (int i = 0; i < sortBuffer.length; i++) {
		sortBuffer[i] = Math.abs(audioBuffer[i]);
	}
	Arrays.sort(sortBuffer);

	double threshold = sortBuffer[compression];

	for (int i = 0; i < audioBuffer.length; i++) {
		if (Math.abs(audioBuffer[i]) <= threshold) {
			audioBuffer[i] = 0;
		}
	}
	return true;
}
 
Example #7
Source File: HaarWaveletCoder.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {

	float[] audioBuffer = audioEvent.getFloatBuffer();
	float[] sortBuffer = new float[audioBuffer.length];
	transform.transform(audioEvent.getFloatBuffer());

	for (int i = 0; i < sortBuffer.length; i++) {
		sortBuffer[i] = Math.abs(audioBuffer[i]);
	}
	Arrays.sort(sortBuffer);

	double threshold = sortBuffer[compression];

	for (int i = 0; i < audioBuffer.length; i++) {
		if (Math.abs(audioBuffer[i]) <= threshold) {
			audioBuffer[i] = 0;
		}
	}

	return true;
}
 
Example #8
Source File: IIRFilter.java    From cythara with GNU General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] audioFloatBuffer = audioEvent.getFloatBuffer();
	
	for (int i = audioEvent.getOverlap(); i < audioFloatBuffer.length; i++) {
		//shift the in array
		System.arraycopy(in, 0, in, 1, in.length - 1);
		in[0] = audioFloatBuffer[i];

		//calculate y based on a and b coefficients
		//and in and out.
		float y = 0;
		for(int j = 0 ; j < a.length ; j++){
			y += a[j] * in[j];
		}			
		for(int j = 0 ; j < b.length ; j++){
			y += b[j] * out[j];
		}
		//shift the out array
		System.arraycopy(out, 0, out, 1, out.length - 1);
		out[0] = y;
		
		audioFloatBuffer[i] = y;
	} 
	return true;
}
 
Example #9
Source File: MainActivity.java    From MagicLight-Controller with Apache License 2.0 6 votes vote down vote up
private void startDispatch() {
    dispatcher = AudioDispatcherFactory.fromDefaultMicrophone(22050, 1024, 0);
    uiThread = new Handler();
    PitchDetectionHandler pdh = (PitchDetectionResult result, AudioEvent audioEven) -> uiThread.post(() -> {
        final float pitchInHz = result.getPitch();
        int pitch =  pitchInHz > 0 ? (int) pitchInHz : 1;

        if(pitch > 1 && mConnected) {
            if((pitch - lastPitch) >= sensitive * 10) {
                Random random = new Random();
                byte[] rgb = getLedBytes(random.nextInt(600000000) + 50000);
                controlLed(rgb);
            }

            if(minPitch > pitch)
                minPitch = pitch;
        }

        lastPitch = pitch;
    });

    processor = new PitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, 22050, 1024, pdh);
    dispatcher.addAudioProcessor(processor);
    listeningThread = new Thread(dispatcher);
    listeningThread.start();
}
 
Example #10
Source File: NFFTAudioFileInfo.java    From Panako with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] currentMagnitudes = eventPointProcessor.getMagnitudes().clone();
	
	//for visualization purposes:
	//store the new max value or, decay the running max
	float currentMaxValue = max(currentMagnitudes);
	if(currentMaxValue > runningMaxMagnitude){
		runningMaxMagnitude = currentMaxValue;
	}else{
		runningMaxMagnitude = 0.9999f * runningMaxMagnitude;
	}
	normalize(currentMagnitudes);
	
	magnitudes.put((float)audioEvent.getTimeStamp(),currentMagnitudes);
	
	return true;
}
 
Example #11
Source File: AudioRecorder.java    From voice-pitch-analyzer with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
    Log.d("recording audio", String.valueOf(audioEvent.getTimeStamp()));
    /*int res = track.write(audioEvent.getByteBuffer(),0, audioEvent.getBufferSize());
    if(res == AudioTrack.ERROR_INVALID_OPERATION) {
        Log.d("audioRecorder", "cant write stream");
    } else if (res == AudioTrack.ERROR_BAD_VALUE) {
        Log.d("audioRecorder", "invalid data");
    } else if (res == AudioTrack.SUCCESS) {
        Log.d("audioRecorder", "written to stream");
    }*/
    if (this.firstRun) {
        android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
        this.firstRun = Boolean.TRUE;
    }


    try {
        this.file.write(audioEvent.getByteBuffer(), 0, audioEvent.getBufferSize());
    } catch (IOException e) {
        e.printStackTrace();
    }
    return true;
}
 
Example #12
Source File: CrossCorrelation.java    From Panako with GNU Affero General Public License v3.0 6 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] fftData = audioEvent.getFloatBuffer().clone();
	
	Arrays.fill(zeroPaddedData, 0);
	System.arraycopy(fftData, 0, zeroPaddedData, fftData.length/2, fftData.length);
	
	fft.forwardTransform(zeroPaddedData);

	fft.multiply(zeroPaddedData, zeroPaddedInvesedQuery);
	fft.backwardsTransform(zeroPaddedData);
	float maxVal = -100000;
	int maxIndex =  0;
	for(int i = 0 ; i<zeroPaddedData.length ; i++){
		if(zeroPaddedData[i]> maxVal){
			maxVal = zeroPaddedData[i];
			maxIndex=i;
		}
	}
	
	float time = (float) (audioEvent.getTimeStamp() - audioEvent.getBufferSize()/audioEvent.getSampleRate() + maxIndex/2 /audioEvent.getSampleRate() + 0.005);
	handler.handleCrossCorrelation((float)audioEvent.getTimeStamp(), time, maxVal);
	return true;
}
 
Example #13
Source File: SoundTouchRateTransposer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	int i, used;
	float[] src = audioEvent.getFloatBuffer();
	float[] dest = new float[(int) Math.round(audioEvent.getBufferSize() / rate)];
    used = 0;
    i = 0;

    // Process the last sample saved from the previous call first...
    while (slopeCount <= 1.0f) {
        dest[i] = (float)((1.0f - slopeCount) * prevSample + slopeCount * src[0]);
        i++;
        slopeCount += rate;
    }
    slopeCount -= 1.0f;
    end:
       while(true){
           while (slopeCount > 1.0f) {
               slopeCount -= 1.0f;
               used++;
               if (used >= src.length - 1) 
               	break end;
           }
           if(i < dest.length){
           	dest[i] = (float)((1.0f - slopeCount) * src[used] + slopeCount * src[used + 1]);
           }
           i++;
           slopeCount += rate;
       }
    
    //Store the last sample for the next round
    prevSample = src[src.length - 1];
    dispatcher.setStepSizeAndOverlap(dest.length, 0);
    audioEvent.setFloatBuffer(dest);
	return true;
}
 
Example #14
Source File: RateTransposer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] src = audioEvent.getFloatBuffer();
	//Creation of float array in loop could be prevented if src.length is known beforehand...
	//Possible optimization is to instantiate it outside the loop and get a pointer to the 
	//array here, in the process method method.
	float[] out = new float[(int) (src.length * factor)];
	r.process(factor, src, 0, src.length, false, out, 0, out.length);
	//The size of the output buffer changes (according to factor). 
	audioEvent.setFloatBuffer(out);
	//Update overlap offset to match new buffer size
	audioEvent.setOverlap((int) (audioEvent.getOverlap() * factor));
	return true;
}
 
Example #15
Source File: AndroidAudioPlayer.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public boolean process(AudioEvent audioEvent) {
    int overlapInSamples = audioEvent.getOverlap();
    int stepSizeInSamples = audioEvent.getBufferSize() - overlapInSamples;
    byte[] byteBuffer = audioEvent.getByteBuffer();

    //int ret = audioTrack.write(audioEvent.getFloatBuffer(),overlapInSamples,stepSizeInSamples,AudioTrack.WRITE_BLOCKING);
    int ret = audioTrack.write(byteBuffer,overlapInSamples*2,stepSizeInSamples*2);
    if (ret < 0) {
        Log.e(TAG, "AudioTrack.write returned error code " + ret);
    }
    return true;
}
 
Example #16
Source File: PercussionOnsetDetector.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] audioFloatBuffer = audioEvent.getFloatBuffer();
	this.processedSamples += audioFloatBuffer.length;
	this.processedSamples -= audioEvent.getOverlap();

	fft.forwardTransform(audioFloatBuffer);
	fft.modulus(audioFloatBuffer, currentMagnitudes);
	int binsOverThreshold = 0;
	for (int i = 0; i < currentMagnitudes.length; i++) {
		if (priorMagnitudes[i] > 0.f) {
			double diff = 10 * Math.log10(currentMagnitudes[i]
					/ priorMagnitudes[i]);
			if (diff >= threshold) {
				binsOverThreshold++;
			}
		}
		priorMagnitudes[i] = currentMagnitudes[i];
	}

	if (dfMinus2 < dfMinus1
			&& dfMinus1 >= binsOverThreshold
			&& dfMinus1 > ((100 - sensitivity) * audioFloatBuffer.length) / 200) {
		float timeStamp = processedSamples / sampleRate;
		handler.handleOnset(timeStamp,-1);
	}

	dfMinus2 = dfMinus1;
	dfMinus1 = binsOverThreshold;

	return true;
}
 
Example #17
Source File: GeneralizedGoertzel.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	
	float[] x = audioEvent.getFloatBuffer();
	WindowFunction f  = new HammingWindow();
	f.apply(x);
	for (int j = 0; j < frequenciesToDetect.length; j++) {
		double pik_term = 2 * Math.PI * indvec[j]/(float) audioEvent.getBufferSize(); 
		double cos_pik_term2 = Math.cos(pik_term) * 2;
		Complex cc = new Complex(0,-1*pik_term).exp();
		double s0=0;
		double s1=0;
		double s2=0;
		
		for(int i = 0 ; i < audioEvent.getBufferSize() ; i++ ){
			s0 = x[i]+cos_pik_term2*s1-s2;
			s2=s1;
			s1=s0;
		}
		s0 = cos_pik_term2 * s1 - s2;
		calculatedComplex[j] = cc.times(new Complex(-s1,0)).plus(new Complex(s0,0));
		calculatedPowers[j] = calculatedComplex[j].mod();
	}
	
	handler.handleDetectedFrequencies(audioEvent.getTimeStamp(),frequenciesToDetect.clone(), calculatedPowers.clone(),
			frequenciesToDetect.clone(), calculatedPowers.clone());
	
	return true;
}
 
Example #18
Source File: QIFFTAudioFileInfo.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
public void extractInfoFromAudio(final Component componentToRepaint){
	int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE);
	int size = Config.getInt(Key.NFFT_SIZE);
	int overlap = size - Config.getInt(Key.NFFT_STEP_SIZE);
	StopWatch w = new StopWatch();
	w.start();
	
	d = AudioDispatcherFactory.fromPipe(audioFile.getAbsolutePath(), samplerate, size, overlap);
	eventPointProcessor = new QIFFTEventPointProcessor(size,overlap,samplerate,4);
	d.addAudioProcessor(eventPointProcessor);
	d.addAudioProcessor(this);
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public void processingFinished() {
			SwingUtilities.invokeLater(new Runnable() {
				@Override
				public void run() {
					componentToRepaint.repaint();
				}
			});
			if(referenceFileInfo!=null)
				referenceFileInfo.setMatchingFingerprints(matchingPrints);
		}			
		@Override
		public boolean process(AudioEvent audioEvent) {
			return true;
		}
	});
	new Thread(d).start();
}
 
Example #19
Source File: Goertzel.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] audioFloatBuffer = audioEvent.getFloatBuffer();
	double skn0, skn1, skn2;
	int numberOfDetectedFrequencies = 0;
	for (int j = 0; j < frequenciesToDetect.length; j++) {
		skn0 = skn1 = skn2 = 0;
		for (int i = 0; i < audioFloatBuffer.length; i++) {
			skn2 = skn1;
			skn1 = skn0;
			skn0 = precalculatedCosines[j] * skn1 - skn2
					+ audioFloatBuffer[i];
		}
		double wnk = precalculatedWnk[j];
		calculatedPowers[j] = 20 * Math.log10(Math.abs(skn0 - wnk * skn1));
		if (calculatedPowers[j] > POWER_THRESHOLD) {
			numberOfDetectedFrequencies++;
		}
	}

	if (numberOfDetectedFrequencies > 0) {
		double[] frequencies = new double[numberOfDetectedFrequencies];
		double[] powers = new double[numberOfDetectedFrequencies];
		int index = 0;
		for (int j = 0; j < frequenciesToDetect.length; j++) {
			if (calculatedPowers[j] > POWER_THRESHOLD) {
				frequencies[index] = frequenciesToDetect[j];
				powers[index] = calculatedPowers[j];
				index++;
			}
		}
		handler.handleDetectedFrequencies(audioEvent.getTimeStamp(),frequencies, powers,
				frequenciesToDetect.clone(), calculatedPowers.clone());
	}

	return true;
}
 
Example #20
Source File: WriterProcessor.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
    try {
        audioLen+=audioEvent.getByteBuffer().length;
        //write audio to the output
        output.write(audioEvent.getByteBuffer());
    } catch (IOException e) {
        e.printStackTrace();
    }
    return true;
}
 
Example #21
Source File: AmplitudeLFO.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] buffer = audioEvent.getFloatBuffer();
	double sampleRate = audioEvent.getSampleRate();
	double twoPiF = 2 * Math.PI * frequency;
	double time = 0;
	for(int i = 0 ; i < buffer.length ; i++){
		time = i / sampleRate;
		float gain =  (float) (scaleParameter * Math.sin(twoPiF * time + phase));
		buffer[i] = gain * buffer[i];
	}
	phase = twoPiF * buffer.length / sampleRate + phase;
	return true;
}
 
Example #22
Source File: NoiseGenerator.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] buffer = audioEvent.getFloatBuffer();
	for(int i = 0 ; i < buffer.length ; i++){
		buffer[i] += (float) (Math.random() * gain);
	}
	return true;
}
 
Example #23
Source File: NFFTAudioFileInfo.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
public void extractInfoFromAudio(final Component componentToRepaint){
	int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE);
	int size = Config.getInt(Key.NFFT_SIZE);
	int overlap = size - Config.getInt(Key.NFFT_STEP_SIZE);
	StopWatch w = new StopWatch();
	w.start();
	
	d = AudioDispatcherFactory.fromPipe(audioFile.getAbsolutePath(), samplerate, size, overlap);
	eventPointProcessor = new NFFTEventPointProcessor(size,overlap,samplerate);
	d.addAudioProcessor(eventPointProcessor);
	d.addAudioProcessor(this);
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public void processingFinished() {
			SwingUtilities.invokeLater(new Runnable() {
				@Override
				public void run() {
					componentToRepaint.repaint();
				}
			});
			if(referenceAudioFileInfo!=null)
			referenceAudioFileInfo.setMatchingFingerprints(matchingPrints);
		}			
		@Override
		public boolean process(AudioEvent audioEvent) {
			return true;
		}
	});
	new Thread(d).start();
}
 
Example #24
Source File: MFCC.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	audioFloatBuffer = audioEvent.getFloatBuffer().clone();

       // Magnitude Spectrum
       float bin[] = magnitudeSpectrum(audioFloatBuffer);
       // get Mel Filterbank
       float fbank[] = melFilter(bin, centerFrequencies);
       // Non-linear transformation
       float f[] = nonLinearTransformation(fbank);
       // Cepstral coefficients
       mfcc = cepCoefficients(f);
       
	return true;
}
 
Example #25
Source File: RafsRepStrategy.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void monitor(String query, int maxNumberOfReqults, Set<Integer> avoid, QueryResultHandler handler) {
	int samplerate = Config.getInt(Key.RAFS_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	AudioDispatcher d ;
	if (query.equals(Panako.DEFAULT_MICROPHONE)){
		try {
			d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap);
		} catch (LineUnavailableException e) {
			LOG.warning("Could not connect to default microphone!" + e.getMessage());
			e.printStackTrace();
			d = null;
		}
	}else{
		d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	}
	d.setZeroPadFirstBuffer(true);
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQuery(audioEvent.getFloatBuffer().clone(), handler,timeStamp,avoid);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();
}
 
Example #26
Source File: TestUtilities.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
public static float[] getAudioBuffer(File file,double start,double stop){

	double sampleRate = 44100;
	int sampleStart = (int) Math.round(sampleRate * start);
	int sampleStop = (int) Math.round(sampleRate * stop);
	int diff = sampleStop - sampleStart;
	final float[] audioBuffer = new float[diff];
	
	AudioDispatcher d;
	
	d = AudioDispatcherFactory.fromPipe(file.getAbsolutePath(), 44100,diff, 0);
	d.skip(start);
	d.addAudioProcessor(new AudioProcessor() {
		boolean filled = false;
		@Override
		public void processingFinished() {
		}

		@Override
		public boolean process(AudioEvent audioEvent) {
			if(!filled){
				for (int i = 0; i < audioEvent.getFloatBuffer().length; i++) {
					audioBuffer[i] = audioEvent.getFloatBuffer()[i];
				}
				filled = true;
			}
			return false;
		}
	});
	d.run();
	
	
	
	return audioBuffer;
}
 
Example #27
Source File: SyncSinkTests.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void testPipeDecoder(){
	File reference = TestUtilities.getResource("dataset/61198.wav");
	File referenceFile = TestUtilities.getResource("dataset/61198.wav");
	final float[] referenceBuffer = TestUtilities.getAudioBuffer(reference,1.0,1.5);
	
	AudioDispatcher d = AudioDispatcherFactory.fromPipe(referenceFile.getAbsolutePath(), 44100, 22050, 0,1.0,0.5);
	d.addAudioProcessor(new AudioProcessor() {
		boolean ran = false;
		@Override
		public void processingFinished() {
		}
		
		@Override
		public boolean process(AudioEvent audioEvent) {
			if(!ran){
				float[] otherBuffer = audioEvent.getFloatBuffer();
				assertEquals("Buffers should be equal in length", referenceBuffer.length, otherBuffer.length); 
				for(int i = 0 ; i < otherBuffer.length; i++){
					assertEquals("Buffers should have the same content", referenceBuffer[i], otherBuffer[i],0.0000001);
				}
			}
			ran = true;
			return true;
		}
	});
	d.run();		
}
 
Example #28
Source File: NFFTStrategy.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
public void monitor(String query,final SerializedFingerprintsHandler handler){
	
	int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	AudioDispatcher d ;
	if (query.equals(Panako.DEFAULT_MICROPHONE)){
		try {
			d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap);
		} catch (LineUnavailableException e) {
			LOG.warning("Could not connect to default microphone!" + e.getMessage());
			e.printStackTrace();
			d = null;
		}
	}else{
		d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	}
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQueryToSerializeFingerprints(audioEvent.getFloatBuffer().clone(), handler,timeStamp);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();
}
 
Example #29
Source File: NFFTStrategy.java    From Panako with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void monitor(String query,final  int maxNumberOfResults,Set<Integer> avoid,
		final QueryResultHandler handler) {
	
	int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE);
	int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate;
	int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate;
	AudioDispatcher d ;
	if (query.equals(Panako.DEFAULT_MICROPHONE)){
		try {
			d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap);
		} catch (LineUnavailableException e) {
			LOG.warning("Could not connect to default microphone!" + e.getMessage());
			e.printStackTrace();
			d = null;
		}
	}else{
		d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap);
	}
	d.addAudioProcessor(new AudioProcessor() {
		@Override
		public boolean process(AudioEvent audioEvent) {
			double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP);
			processMonitorQuery(audioEvent.getFloatBuffer().clone(), maxNumberOfResults, handler,timeStamp,avoid);
			return true;
		}
		
		@Override
		public void processingFinished() {
		}
	});
	d.run();
}
 
Example #30
Source File: SineGenerator.java    From cythara with GNU General Public License v3.0 5 votes vote down vote up
@Override
public boolean process(AudioEvent audioEvent) {
	float[] buffer = audioEvent.getFloatBuffer();
	double sampleRate = audioEvent.getSampleRate();
	double twoPiF = 2 * Math.PI * frequency;
	double time = 0;
	for(int i = 0 ; i < buffer.length ; i++){
		time = i / sampleRate;
		buffer[i] += (float) (gain * Math.sin(twoPiF * time + phase));
	}
	phase = twoPiF * buffer.length / sampleRate + phase;
	return true;
}