With monday.com’s project management tool, you can see what everyone on your team is working in a single glance. Its intuitive dashboards are customizable, so you can create systems that work for you.
private void handleAudioBytes() {
left = new float[sampleSize];
right = new float[sampleSize];
int c = (int) ((long) (framesRead * frameSize) - (long) (audioDataBuffer.length * offset));
if (c > 0) {
for (int a = 0; a < sampleSize; a++, c += 4) {
if (c >= audioDataBuffer.length) {
c = (c - audioDataBuffer.length);
offset++;
}
// channelMode == CHANNEL_MODE_STEREO && sampleType ==
// SAMPLE_TYPE_SIXTEEN_BIT
left[a] = ((audioDataBuffer[c + 1] << 8) + audioDataBuffer[c]) / 32767.0f;
right[a] = ((audioDataBuffer[c + 3] << 8) + audioDataBuffer[c + 2]) / 32767.0f;
}
}
}
public class FlashFFT {
private float[] left;
private float[] right;
private int sampleSize = 2048;
private byte[] audioDataBuffer = null;
private KJFFT fft = new KJFFT(512);
private float[] old_FFT = new float[512];
private int desiredBands = 12;
private int framesRead = 0;
private int position = 0;
private AudioFormat baseFormat;
private int offset = 0;
private int frameSize;
private FlashFFTDataConsumer consumer;
public FlashFFT() {
}
/**
* @param inputFile
* @throws IOException
* @throws UnsupportedAudioFileException
*/
public void generateFFT(File inputFile, FlashFFTDataConsumer consumer, int desiredFPS,
int desiredBands) {
this.consumer = consumer;
AudioInputStream din = null;
AudioInputStream in = null;
FileOutputStream fos = null;
try {
long time = new Date().getTime();
this.desiredBands = desiredBands;
AudioFileFormat baseFileFormat = null;
baseFormat = null;
baseFileFormat = AudioSystem.getAudioFileFormat(inputFile);
baseFormat = baseFileFormat.getFormat();
in = AudioSystem.getAudioInputStream(inputFile);
AudioFormat decodedFormat = new AudioFormat(
AudioFormat.Encoding.PCM_SIGNED,
baseFormat.getSampleRate(), 16, baseFormat.getChannels(),
baseFormat.getChannels() * 2, baseFormat.getSampleRate(),
false);
din = AudioSystem.getAudioInputStream(decodedFormat, in);
frameSize = decodedFormat.getFrameSize();
byte[] data = new byte[4096];
System.out.println("Input sample rate: "
+ baseFormat.getSampleRate() + "Hz");
audioDataBuffer = new byte[(int) baseFormat.getSampleRate() << 1];
int nBytesRead = 0;
float passCount = 0;
int sampleCount = 0;
System.out.println("Processing input at " + desiredFPS
+ "fps... (one mark for 100 frames)");
while (nBytesRead != -1) {
nBytesRead = din.read(data, 0, data.length);
if (nBytesRead != -1) {
framesRead += (nBytesRead / decodedFormat.getFrameSize());
storeAudioData(data, 0, nBytesRead);
handleAudioBytes();
passCount += 1000 / (baseFormat.getSampleRate() / 1000);
if (passCount >= (1000 / desiredFPS) * sampleCount) {
computeFFT(mergeStereo(left, right));
sampleCount++;
if (sampleCount % 100 == 0) {
System.out.print("#");
if (sampleCount % 5000 == 0)
System.out.print("\n");
}
}
}
}
consumer.processFinished();
long duration = new Date().getTime() - time;
System.out.println("\nFinished!");
System.out.println("Processed " + (int) sampleCount / desiredFPS
+ "s of samples in " + (int) duration / 1000 + "s ("
+ (int) (sampleCount / desiredFPS)
/ (int) (duration / 1000) + "x real time)");
} catch (IOException e) {
throw new FFTException(e);
} catch (UnsupportedAudioFileException e) {
throw new FFTException(e);
} finally {
try {
if (din != null)
din.close();
if (in != null)
in.close();
if (fos != null) {
fos.flush();
fos.close();
}
} catch (IOException e1) {
throw new FFTException(e1);
}
}
}
private void storeAudioData(byte[] pAudioData, int pOffset, int pLength) {
int wOverrun = 0;
if (position + pLength > audioDataBuffer.length - 1) {
wOverrun = (position + pLength) - audioDataBuffer.length;
pLength = audioDataBuffer.length - position;
}
System.arraycopy(pAudioData, pOffset, audioDataBuffer, position,
pLength);
if (wOverrun > 0) {
System.arraycopy(pAudioData, pOffset + pLength, audioDataBuffer, 0,
wOverrun);
position = wOverrun;
} else {
position += pLength;
}
}
private void handleAudioBytes() {
left = new float[sampleSize];
right = new float[sampleSize];
int c = (int) ((long) (framesRead * frameSize) - (long) (audioDataBuffer.length * offset));
if (c > 0) {
for (int a = 0; a < sampleSize; a++, c += 4) {
if (c > (audioDataBuffer.length - 4)) {
c = (c - audioDataBuffer.length);
offset++;
}
// channelMode == CHANNEL_MODE_STEREO && sampleType ==
// SAMPLE_TYPE_SIXTEEN_BIT
left[a] = ((audioDataBuffer[c + 1] << 8) + audioDataBuffer[c]) / 32767.0f;
// System.out.println("a: " + a + " c: " + c + " length: " + audioDataBuffer.length );
right[a] = ((audioDataBuffer[c + 3] << 8) + audioDataBuffer[c + 2]) / 32767.0f;
}
}
}
private float[] mergeStereo(float[] pLeft, float[] pRight) {
for (int a = 0; a < pLeft.length; a++) {
pLeft[a] = (pLeft[a] + pRight[a]) / 2.0f;
}
return pLeft;
}
private void computeFFT(float[] pSample) {
int saMultiplier = (512 / 2) / desiredBands;
float c = 0;
float[] wFFT = fft.calculate(pSample);
float wSadfrr = (0.03f);
float result[] = new float[desiredBands];
for (int a = 0, bd = 0; bd < desiredBands; a += saMultiplier, bd++) {
float wFs = 0;
// -- Average out nearest bands.
for (int b = 0; b < saMultiplier; b++) {
wFs += wFFT[a + b];
}
// -- Log filter.
wFs = (wFs * (float) Math.log(bd + 2));
if (wFs > 1.0f) {
wFs = 1.0f;
}
// -- Compute SA decay...
if (wFs >= (old_FFT[a] /* - wSadfrr */)) {
old_FFT[a] = wFs;
} else {
old_FFT[a] -= wSadfrr;
if (old_FFT[a] < 0) {
old_FFT[a] = 0;
}
wFs = old_FFT[a];
}
result[bd] = wFs;
}
consumer.handleFFT(result);
}
}
private void handleAudioBytes(int numChannels) {
left = new float[sampleSize];
right = new float[sampleSize];
int c = (int) ((long) (framesRead * frameSize) - (long) (audioDataBuffer.length * offset));
if (c > 0) {
for (int a = 0; a < sampleSize; a++, c += 4) {
if (c >= audioDataBuffer.length) {
c = (c - audioDataBuffer.length);
offset++;
}
if(numChannels == 2)
{
// channelMode == CHANNEL_MODE_STEREO && sampleType ==
// SAMPLE_TYPE_SIXTEEN_BIT
left[a] = ((audioDataBuffer[c + 1] << 8) + audioDataBuffer[c]) / 32767.0f;
right[a] = ((audioDataBuffer[c + 3] << 8) + audioDataBuffer[c + 2]) / 32767.0f;
}
if(numChannels == 1)
{
// channelMode == CHANNEL_MODE_MONO && sampleType ==
// SAMPLE_TYPE_SIXTEEN_BIT
left[a] = ((audioDataBuffer[c + 1] << 8) + audioDataBuffer[c]) / 32767.0f;
right[a] = left[a];
}
}
}
}
handleAudioBytes(baseFormat.getChannels());
If you are experiencing a similar issue, please ask a related question
Join the community of 500,000 technology professionals and ask your questions.