wav decoding optimization works
This commit is contained in:
parent
a47cd1993b
commit
107b0e2553
@ -71,7 +71,7 @@ public class AudioAnalyzer {
|
||||
public void run() {
|
||||
progress = 0;
|
||||
int tasksDone = 0;
|
||||
int totalTasks = audioData.getSampleCount()/audioData.getReadWindowSize();
|
||||
long totalTasks = audioData.getFrameCount()/audioData.getReadWindowSize();
|
||||
|
||||
bassThresholdMultiplier = 1.5f;
|
||||
mThresholdMultiplier = 1.4f;
|
||||
@ -93,6 +93,7 @@ public class AudioAnalyzer {
|
||||
Gdx.app.debug("Read freq", String.valueOf(audioData.getSampleRate()));
|
||||
Gdx.app.debug("Using following bin ranges", "\nBass freq begin: " + bassBinBegin + "\nBass freq end: " + bassBinEnd + "\nMain freq begin: " + umBinBegin + "\nMain freq end: " + umBinEnd);
|
||||
|
||||
Gdx.app.debug("Total tasks", String.valueOf(totalTasks));
|
||||
|
||||
Gdx.app.debug("Threshold Calc Range UM", String.valueOf(umThresholdCalcRange));
|
||||
Gdx.app.debug("Threshold Calc Range M", String.valueOf(umThresholdCalcRange));
|
||||
@ -100,6 +101,7 @@ public class AudioAnalyzer {
|
||||
|
||||
fft = new FloatFFT_1D(audioData.getReadWindowSize());
|
||||
int seedDigit = 0;
|
||||
|
||||
while (audioData.readSamples(audioPCM) > 0 && work) {
|
||||
|
||||
fft.realForward(audioPCM);
|
||||
|
@ -44,7 +44,7 @@ public interface AudioData extends Disposable {
|
||||
* returns sample count
|
||||
* @return
|
||||
*/
|
||||
public int getSampleCount();
|
||||
public long getFrameCount();
|
||||
|
||||
/**
|
||||
* returns duration of song in seconds
|
||||
|
@ -10,10 +10,12 @@ import org.jaudiotagger.tag.TagException;
|
||||
import com.badlogic.gdx.Gdx;
|
||||
import com.badlogic.gdx.audio.Music;
|
||||
import com.badlogic.gdx.files.FileHandle;
|
||||
import com.badlogic.gdx.utils.GdxRuntimeException;
|
||||
|
||||
import javazoom.jl.decoder.Bitstream;
|
||||
import javazoom.jl.decoder.BitstreamException;
|
||||
import javazoom.jl.decoder.DecoderException;
|
||||
import javazoom.jl.decoder.Header;
|
||||
import javazoom.jl.decoder.MP3Decoder;
|
||||
import javazoom.jl.decoder.OutputBuffer;
|
||||
|
||||
@ -24,27 +26,39 @@ public class Mp3AudioData implements AudioData {
|
||||
private int readIndex;
|
||||
|
||||
private int sampleRate;
|
||||
private int sampleCount;
|
||||
private long sampleCount;
|
||||
private float durationInSeconds;
|
||||
|
||||
Bitstream bitStream;
|
||||
Bitstream bitstream;
|
||||
MP3Decoder decoder;
|
||||
OutputBuffer sampleBuffer;
|
||||
Mp3.Music test;
|
||||
public Mp3AudioData(FileHandle audioFile) {
|
||||
try {
|
||||
MP3File mp3File = new MP3File(audioFile.file());
|
||||
|
||||
sampleCount = (int) mp3File.getMP3AudioHeader().getNumberOfFrames();
|
||||
sampleCount = mp3File.getMP3AudioHeader().getNumberOfFrames();
|
||||
durationInSeconds = mp3File.getMP3AudioHeader().getNumberOfFrames()/readWindowSize;
|
||||
sampleRate = mp3File.getMP3AudioHeader().getSampleRateAsNumber();
|
||||
|
||||
bitStream = new Bitstream(audioFile.read());
|
||||
decoder = new MP3Decoder();
|
||||
} catch (IOException | TagException | ReadOnlyFileException | InvalidAudioFrameException e1) {
|
||||
e1.printStackTrace();
|
||||
} catch (IOException | TagException | ReadOnlyFileException | InvalidAudioFrameException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
reset();
|
||||
|
||||
|
||||
bitstream = new Bitstream(audioFile.read());
|
||||
decoder = new MP3Decoder();
|
||||
|
||||
try {
|
||||
Header header = bitstream.readFrame();
|
||||
if (header == null) throw new GdxRuntimeException("Empty MP3");
|
||||
int channels = header.mode() == Header.SINGLE_CHANNEL ? 1 : 2;
|
||||
sampleBuffer = new OutputBuffer(channels, false);
|
||||
decoder.setOutputBuffer(sampleBuffer);
|
||||
} catch (BitstreamException e) {
|
||||
throw new GdxRuntimeException("error while preloading mp3", e);
|
||||
}
|
||||
|
||||
playbackMusic = Gdx.audio.newMusic(audioFile);
|
||||
|
||||
reset();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -73,7 +87,7 @@ public class Mp3AudioData implements AudioData {
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSampleCount() {
|
||||
public long getFrameCount() {
|
||||
return sampleCount;
|
||||
}
|
||||
|
||||
@ -81,6 +95,11 @@ public class Mp3AudioData implements AudioData {
|
||||
public void dispose() {
|
||||
reset();
|
||||
playbackMusic.dispose();
|
||||
try {
|
||||
bitstream.close();
|
||||
} catch (BitstreamException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -90,31 +109,44 @@ public class Mp3AudioData implements AudioData {
|
||||
|
||||
@Override
|
||||
public int readSamples(float[] samples) {
|
||||
int samplesRead = 0;
|
||||
try {
|
||||
OutputBuffer samplesBuffer = decoder.decodeFrame(bitStream.readFrame(), bitStream);
|
||||
byte[] frameBuffer = samplesBuffer.getBuffer();
|
||||
for (int sampleID = 0; sampleID < samples.length; sampleID++) {
|
||||
int totalRead = 0;
|
||||
|
||||
for (int sid = 0; sid < samples.length; sid++) {
|
||||
try {
|
||||
Header header = bitstream.readFrame();
|
||||
|
||||
samples[sampleID] += (short) ((frameBuffer[1] << 8) + (frameBuffer[0] & 0xff));
|
||||
if (samplesBuffer.isStereo()) {
|
||||
samples[sampleID] = (short) ((frameBuffer[3] << 8) + (frameBuffer[2] & 0xff));
|
||||
|
||||
samples[sampleID] /= 2;
|
||||
if (header == null) break;
|
||||
|
||||
try {
|
||||
decoder.decodeFrame(header, bitstream);
|
||||
} catch (ArrayIndexOutOfBoundsException | DecoderException e) {
|
||||
System.out.println(e);
|
||||
}
|
||||
bitstream.closeFrame();
|
||||
sampleBuffer.reset();
|
||||
byte[] buffer = sampleBuffer.getBuffer();
|
||||
|
||||
samples[sampleID] /= Short.MAX_VALUE+1;
|
||||
samplesRead ++;
|
||||
samples[sid] += (buffer[1] << 8) + (buffer[0] & 0x00ff);
|
||||
if (sampleBuffer.isStereo()) {
|
||||
samples[sid] += (buffer[3] << 8) + (buffer[2] & 0x00ff);
|
||||
samples[sid] /= 2;
|
||||
}
|
||||
totalRead++;
|
||||
|
||||
samples[sid] /= Short.MAX_VALUE + 1;
|
||||
} catch (BitstreamException e1) {
|
||||
e1.printStackTrace();
|
||||
}
|
||||
} catch (DecoderException | BitstreamException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return samplesRead;
|
||||
|
||||
return totalRead;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getSampleRate() {
|
||||
return sampleRate;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -69,8 +69,8 @@ public class WavAudioData implements AudioData {
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSampleCount() {
|
||||
return decoder.getDataSize()/(2*decoder.getChannels());
|
||||
public long getFrameCount() {
|
||||
return decoder.getFrameCount();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -6,6 +6,7 @@ import javax.sound.sampled.AudioInputStream;
|
||||
import javax.sound.sampled.AudioSystem;
|
||||
import javax.sound.sampled.UnsupportedAudioFileException;
|
||||
|
||||
import com.badlogic.gdx.Gdx;
|
||||
import com.badlogic.gdx.files.FileHandle;
|
||||
|
||||
public class WavDecoder {
|
||||
@ -13,16 +14,20 @@ public class WavDecoder {
|
||||
|
||||
private int channels;
|
||||
private double sampleRate;
|
||||
private int dataSize;
|
||||
private int byteRate;
|
||||
private String fileName;
|
||||
private byte[] buffer;
|
||||
private AudioInputStream audioInputStream;
|
||||
|
||||
public WavDecoder(FileHandle file) throws IOException {
|
||||
this.file = file;
|
||||
try {
|
||||
audioInputStream = AudioSystem.getAudioInputStream(file.read());
|
||||
audioInputStream = AudioSystem.getAudioInputStream(file.file());
|
||||
Gdx.app.debug("WAVDecoder", String.valueOf(audioInputStream.getFormat().getFrameSize()));
|
||||
buffer = new byte[audioInputStream.getFormat().getFrameSize()];
|
||||
|
||||
channels = audioInputStream.getFormat().getChannels();
|
||||
sampleRate = audioInputStream.getFormat().getSampleRate();
|
||||
fileName = file.name();
|
||||
} catch (UnsupportedAudioFileException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
@ -32,20 +37,16 @@ public class WavDecoder {
|
||||
return channels;
|
||||
}
|
||||
|
||||
public int getByteRate() {
|
||||
return byteRate;
|
||||
}
|
||||
|
||||
public int getDataSize() {
|
||||
return dataSize;
|
||||
}
|
||||
|
||||
public double getSampleRate() {
|
||||
return sampleRate;
|
||||
}
|
||||
|
||||
public long getDurationInSeconds() {
|
||||
return (long) (dataSize/byteRate);
|
||||
public float getDurationInSeconds() {
|
||||
return audioInputStream.getFrameLength()/audioInputStream.getFormat().getFrameRate();
|
||||
}
|
||||
|
||||
public long getFrameCount() {
|
||||
return audioInputStream.getFrameLength();
|
||||
}
|
||||
|
||||
public String getFileName() {
|
||||
@ -57,22 +58,23 @@ public class WavDecoder {
|
||||
}
|
||||
|
||||
public int readSamples(float[] samples) throws IOException {
|
||||
int samplesRead = 0;
|
||||
int framesRead = 0;
|
||||
|
||||
for (int sampleID = 0; sampleID < samples.length; sampleID++) {
|
||||
audioInputStream.read(buffer);
|
||||
|
||||
samples[sampleID] += (short) ((buffer[1] << 8) + (buffer[0] & 0xff));
|
||||
if (audioInputStream.getFormat().getChannels() > 1) {
|
||||
samples[sampleID] = (short) ((buffer[3] << 8) + (buffer[2] & 0xff));
|
||||
samples[sampleID] /= 2;
|
||||
if (audioInputStream.read(buffer) > 0) {
|
||||
samples[sampleID] += (buffer[1] << 8) + (buffer[0] & 0x00ff);
|
||||
if (audioInputStream.getFormat().getChannels() > 1) {
|
||||
samples[sampleID] += (buffer[3] << 8) + (buffer[2] & 0x00ff);
|
||||
samples[sampleID] /= 2;
|
||||
}
|
||||
framesRead ++;
|
||||
}
|
||||
|
||||
|
||||
samples[sampleID] /= Short.MAX_VALUE+1;
|
||||
samplesRead ++;
|
||||
}
|
||||
|
||||
return samplesRead;
|
||||
return framesRead;
|
||||
}
|
||||
|
||||
public void cleanAndClose() {
|
||||
|
@ -14,7 +14,7 @@ public class DesktopLauncher {
|
||||
|
||||
config.resizable = false;
|
||||
config.useHDPI = true;
|
||||
System.setProperty("org.lwjgl.opengl.Window.undecorated", "true");
|
||||
// System.setProperty("org.lwjgl.opengl.Window.undecorated", "true");
|
||||
|
||||
|
||||
new LwjglApplication(new RhythmBullet(), config);
|
||||
|
Loading…
Reference in New Issue
Block a user