//don't need hirez data for waveform display...
AudioFormat format = new AudioFormat(8000, 16, 1, MEAPUtil.signed,
MEAPUtil.bigEndian);
AudioReader reader = null;
try
{
//reader = new AudioReader(fCTD.srcFile, format);
reader = AudioReaderFactory.getAudioReader(fCTD.srcFile, format);
}
catch (IOException e1)
{
e1.printStackTrace();
return;
}
catch (UnsupportedAudioFileException e1)
{
e1.printStackTrace();
return;
}
//double startTime = fC.startTime;
int frameSize = format.getFrameSize();
double frameRate = (double)format.getFrameRate();
long fileFrameLength = reader.getFrameLength();
double fileTimeLength = ((double)fileFrameLength / frameRate);// / frameSize;
int framesPerPixel = (int) Math.ceil((double)fileFrameLength/(double)w);
double timePerPixel = ((double)framesPerPixel / frameRate);// / frameSize;
firstEventTime = localFirstEventTime;
timeRange = fileTimeLength - localFirstEventTime;
//System.out.println("waveform seyz: timeRange: " + timeRange + " fileTL: " + fileTimeLength +
// " firstET: " + firstEventTime);
//System.out.println("waveform seyz: frameSize: " + frameSize + " frameRate: " + frameRate +
// " fileFrameLength: " + fileFrameLength + " fileTimeLength: " + fileTimeLength +
// " framesPerPixel: " + framesPerPixel + " timePerPixel: " + timePerPixel + " w: " + w);
try
{
// One sample per frame because we converted the file to mono.
long n = reader.skipSamples((long)(localFirstEventTime * frameRate));
}
catch (Exception e)
{
e.printStackTrace();
}
int x = 0;
while (x < w)
{
double[] samples = new double[framesPerPixel];
//double[] samples = new double[100];
try
{
int n = reader.readSamples(samples);
}
catch (Exception e)
{
e.printStackTrace();
}