package zero1hd.polyjet.audio; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.utils.FloatArray; import edu.emory.mathcs.jtransforms.fft.FloatFFT_1D; import zero1hd.polyjet.util.MiniEvents; import zero1hd.polyjet.util.MiniSender; public class AudioAnalyzer { private boolean containsData; private boolean finalized; FloatFFT_1D fft; public AudioData audioData; float[] audioPCM; float[] spectrum; float[] lastSpectrum; Runnable analysisAlgorithm; Runnable thresholdCalculator; int bassBinBegin; int bassBinEnd; private FloatArray bassSpectralFlux = new FloatArray(); private FloatArray bassThreshold = new FloatArray(); private FloatArray bassPrunned = new FloatArray(); private FloatArray bassPeaks = new FloatArray(); private float bassMaxValue; private float bassAvg; int UMBinBegin; int UMBinEnd; private FloatArray umSpectralFlux = new FloatArray(); private FloatArray umThreshold = new FloatArray(); private FloatArray umPrunned = new FloatArray(); private FloatArray umPeaks = new FloatArray(); private float UMMaxValue; private float umAvg; private FloatArray overlappedPeaks = new FloatArray(); float bassThresholdMultiplier; float umThresholdMultiplier; int UMThresholdCalcRange; int bassThresholdCalcRange; public volatile MiniSender sender; private float avgBPS; int PUID; boolean work; private volatile int progress; private float secondsPerWindow; public AudioAnalyzer() { sender = new MiniSender(); analysisAlgorithm = new Runnable() { @Override public void run() { progress = 0; int tasksDone = 0; int totalTasks = audioData.getSampleCount()/audioData.getReadWindowSize(); bassThresholdMultiplier = 1.5f; umThresholdMultiplier = 2f; bassBinBegin = 1; bassBinEnd = 15; UMBinBegin = 300; UMBinEnd = 450; UMThresholdCalcRange = thresholdRangeCalc(0.5f); bassThresholdCalcRange = thresholdRangeCalc(0.7f); Gdx.app.debug("Read freq", String.valueOf(audioData.getFormat().getSampleRate())); Gdx.app.debug("Using following bin ranges", "\nBass freq begin: " + bassBinBegin + "\nBass freq end: " + bassBinEnd + "\nMain freq begin: " + UMBinBegin + "\nMain freq end: " + UMBinEnd); Gdx.app.debug("Threshold Calc Range UM", String.valueOf(UMThresholdCalcRange)); Gdx.app.debug("Threshold Calc Range Bass", String.valueOf(bassThresholdCalcRange)); fft = new FloatFFT_1D(audioData.getReadWindowSize()); int seedDigit = 0; while (audioData.readSamples(audioPCM) > 0 && work) { fft.realForward(audioPCM); //Building a PUID (Pseudo unique ID) if (tasksDone == (seedDigit*totalTasks/9)) { float avg = 0; for (int frame = 0; frame < spectrum.length; frame++) { avg += spectrum[frame]; } avg /= spectrum.length; if (avg < 0) { avg *= -1f; } PUID +=(int) Math.pow(10, 9-seedDigit) * ((int)(avg*1000f)-(int)(avg*100f)*10); seedDigit ++; } System.arraycopy(spectrum, 0, lastSpectrum, 0, spectrum.length); System.arraycopy(audioPCM, 0, spectrum, 0, spectrum.length); float fluxVal = 0; //bass detection fluxVal = 0; for (int i = bassBinBegin; i < bassBinEnd && work; i++) { fluxVal += ((spectrum[i] - lastSpectrum[i])) > 0 ? (spectrum[i] - lastSpectrum[i]) : 0; } bassSpectralFlux.add(fluxVal); //main detection fluxVal = 0; for (int i = UMBinBegin; i < UMBinEnd && work; i++) { fluxVal += ((spectrum[i] - lastSpectrum[i])) > 0 ? (spectrum[i] - lastSpectrum[i]) : 0; } umSpectralFlux.add(fluxVal); tasksDone++; progress = (int) (100f*tasksDone/totalTasks); sender.send(MiniEvents.ANALYZER_ITERATED); } if (work) { Gdx.app.debug("Audio Analyzer", "Done getting spectral flux."); shrinkData(); containsData = true; Gdx.app.debug("Audio Analyzer", "USING SEED: " + PUID); sender.send(MiniEvents.SPECTRAL_FLUX_DONE); } } }; thresholdCalculator = new Runnable() { @Override public void run() { //threshold calculation for (int i = 0; i < umSpectralFlux.size && work; i++) { int UMStart = Math.max(0, i - UMThresholdCalcRange/2); int UMEnd = Math.min(umSpectralFlux.size - 1, i + UMThresholdCalcRange/2); int bassStart = Math.max(0, i - bassThresholdCalcRange/2); int bassEnd = Math.min(umSpectralFlux.size - 1, i + bassThresholdCalcRange/2); float average = 0; for (int j = bassStart; j <= bassEnd; j++) { average += bassSpectralFlux.get(j); } average /= (bassEnd - bassStart); bassThreshold.add(average * bassThresholdMultiplier); average = 0; for (int j = UMStart; j <= UMEnd; j++) { average+= umSpectralFlux.get(j); } average /= (UMEnd - UMStart); umThreshold.add(average*umThresholdMultiplier); } Gdx.app.debug("Audio Analyzer", "Threshold calculated."); //pruning data float prunnedCurrentVal; for (int i = 0; i < umSpectralFlux.size && work; i++) { prunnedCurrentVal = bassSpectralFlux.get(i) - bassThreshold.get(i); if (prunnedCurrentVal >= 0) { bassPrunned.add(prunnedCurrentVal); } else { bassPrunned.add(0); } prunnedCurrentVal = umSpectralFlux.get(i) - umThreshold.get(i); if (prunnedCurrentVal >= 0 ) { umPrunned.add(prunnedCurrentVal); } else { umPrunned.add(0); } } Gdx.app.debug("Audio Analyzer", "Data prunned."); secondsPerWindow = audioData.getReadWindowSize()/audioData.getFormat().getSampleRate(); //peak detection int lastID = 0; float bassBeats = 0; float umBeats = 0; for (int i = 0; i < umPrunned.size-1 && work; i++) { bassPeaks.add((bassPrunned.get(i) > bassPrunned.get(i+1) ? bassPrunned.get(i) : 0)); if (bassPeaks.get(i) > bassMaxValue) { bassMaxValue = bassPeaks.get(i); } umPeaks.add((umPrunned.get(i) > umPrunned.get(i+1) ? umPrunned.get(i) : 0)); if (umPeaks.get(i) > UMMaxValue) { UMMaxValue = umPeaks.get(i); } //overlapping beats if (bassPeaks.get(i) != 0 && umPeaks.get(i) != 0) { overlappedPeaks.add(bassPeaks.get(i)+umPeaks.get(i)/2); } else { overlappedPeaks.add(0); } avgBPS = -1f; if (avgBPS != -1) { if (bassPeaks.get(i) == 0) { avgBPS ++; } else { bassBeats ++; lastID = i; } } else if (bassPeaks.get(i) != 0) { avgBPS = 0; } if (bassPeaks.get(i) != 0) { bassAvg += bassPeaks.get(i); } if (umPeaks.get(i) != 0) { umAvg += umPeaks.get(i); umBeats++; } } //then we minus one from the beats so it actually works out avgBPS -= umPrunned.size-lastID; avgBPS *= secondsPerWindow; avgBPS /= bassBeats; bassAvg /= bassBeats; umBeats /= umBeats; if (work) { Gdx.app.debug("Audio Analyzer", "overlapped beats checked."); finalized = true; sender.send(MiniEvents.MUSIC_DATA_CLEANED); } } }; } public void shrinkData() { bassSpectralFlux.shrink(); bassThreshold.shrink(); bassPrunned.shrink(); bassPeaks.shrink(); umSpectralFlux.shrink(); umThreshold.shrink(); umPrunned.shrink(); umPeaks.shrink(); overlappedPeaks.shrink(); } public void startAnalyticalThread(AudioData audiofile) { audioPCM = new float[audiofile.getReadWindowSize()]; spectrum = new float[(audiofile.getReadWindowSize()/2)+1]; lastSpectrum = new float[(audiofile.getReadWindowSize()/2)+1]; this.audioData = audiofile; work = true; Thread analyticalThread = new Thread(analysisAlgorithm); analyticalThread.start(); } public void runThresholdCleaning(float rangeModifier) { this.bassThresholdMultiplier -= rangeModifier; this.umThresholdMultiplier -= rangeModifier; work = true; Thread thresholdClean = new Thread(thresholdCalculator); thresholdClean.start(); } public void runThresholdCleaning() { Thread thresholdClean = new Thread(thresholdCalculator); thresholdClean.start(); } public FloatArray getBassPeaks() { return bassPeaks; } public FloatArray getUMPeaks() { return umPeaks; } private int thresholdRangeCalc(float durationOfRange) { float timePerWindow = (float)audioData.getReadWindowSize()/audioData.getFormat().getSampleRate(); return (int) (durationOfRange/timePerWindow); } public float getBassMaxValue() { return bassMaxValue; } public float getUMMaxValue() { return UMMaxValue; } public int getReadIndex() { if (audioData.getReadIndex() < umPeaks.size) { return audioData.getReadIndex(); } else { return 0; } } public boolean containsData() { return containsData; } public synchronized int getProgress() { return progress; } public boolean isFinalized() { return finalized; } public void stop() { work = false; } public int getPUID() { return PUID; } public FloatArray getOverlappedPeaks() { return overlappedPeaks; } public AudioData getAudioData() { return audioData; } public float getAvgBPS() { return avgBPS; } public float getsecondsPerWindow() { return secondsPerWindow; } }