package edu.purdue.wind.rtsj;
import javax.realtime.PriorityParameters;
import javax.realtime.PeriodicParameters;
import javax.realtime.SchedulingParameters;
import javax.realtime.ReleaseParameters;
import javax.realtime.RealtimeThread;
import java.io.IOException;
import edu.purdue.wind.Wave;
import edu.purdue.wind.rtsj.Watchdog;
import edu.purdue.wind.Log;
/**
* This class implements an audio codec simulator task. It reads a WAV
* file at construction time, then simulates recording it (by copying
* data out of it and sleeping for the appropriate period of time) on
* each release of its handler.
*
* There is a lot of room for improvement in simulation accuracy, but
* probably it is better to implement the actual audio I/O instead.
*/
public class AudioCodecRTSJ {
private static AudioCodecRTSJ codec;
private Wave waveFile;
private int duration;
private double[][] data;
private Object lock;
private boolean dataFree;
private boolean hasData;
private RealtimeThread thread;
private final int watchdogService;
/**
* Create the singleton codec and start its periodic thread.
*
* @throws NullPointerException if {@code CodecTrigger} has not been
* initialized.
*/
static void start() {
codec = new AudioCodecRTSJ();
PeriodicParameters periodic = new PeriodicParameters(Configuration.AUDIOCODEC_PERIOD);
PriorityParameters prio = new PriorityParameters(Configuration.CODEC_PRIORITY);
RealtimeThread t = new RealtimeThread((SchedulingParameters)prio,
(ReleaseParameters)periodic,
null, null, null,
new Runnable() {
public void run() {
codec.run();
}
});
codec.setThread(t);
t.start();
}
/**
* Return the singleton codec.
*
* @return the instantiated singleton codec, or {@code null}.
*/
public static AudioCodecRTSJ instance() {
return codec;
}
/**
* Construct the audio codec, parsing the WAV file that it will
* return on each release. Note that this file does not change for
* the lifetime of this codec simulator.
*/
private AudioCodecRTSJ() {
// FIXME: Handle miss/overrun
try {
waveFile = Wave.readFile(Configuration.CODEC_FILE);
} catch (IOException e) {
throw new RuntimeException(e);
}
// Approximate duration in ms
duration = (waveFile.samples() / waveFile.sampleRate()) * 1000;
data = new double[waveFile.channels()][waveFile.samples()];
lock = new Object();
dataFree = true;
// FIXME: Real reset handler
Watchdog.WatchdogResetHandler wrh = new Watchdog.WatchdogResetHandler() {
public void reset() {
Log.instance().log(Log.LogLevel.ERROR, "AudioCodecRTSJ", "Resetting");
}
};
// FIXME: Reset handler
watchdogService = Watchdog.instance().register(Configuration.AUDIOCODEC_WATCHDOG_PERIODS, wrh);
}
private void setThread(RealtimeThread t) {
this.thread = t;
}
RealtimeThread getThread() {
return thread;
}
private void run() {
// FIXME: Handle shutdown?
Watchdog.instance().activate(watchdogService);
while (true) {
Watchdog.instance().checkin(watchdogService);
acquire();
thread.waitForNextPeriod();
}
// Currently unreachable
// Watchdog.instance().deactivate(watchdogService);
}
/**
* Return the sampling rate of the (notional) sampling process
* attached to this codec.
*
* @return sample rate of the codec
*/
public int sampleRate() {
return waveFile.sampleRate();
}
/**
* Check to ensure that the codec buffers have been released by the
* receiving entity, then process the "incoming" data into those
* buffers. This release will block for the duration of the
* incoming data, to simulate actual recording.
* <p>
* If the previous release has not yet been processed, its data will
* be "overwritten". Any attempt to retrieve the samples during the
* release of this event (via {@link #retrieveSamples()}) will block
* until this release has completed.
*/
private void acquire() {
synchronized (lock) {
if (!dataFree) {
// FIXME: Handle the fact that this can't run
return;
}
}
// Ordinarily we would trigger the generation of a probing
// frequency here, as well as recording of the returning
// spectrum. Instead, we're going to copy some buffers and then
// sleep.
// A better approximation would do this in smaller chunks with a
// periodic release handler.
for (short c = 0; c < waveFile.channels(); c++) {
int[] samples = waveFile.sampleData(c);
for (int i = 0; i < samples.length; i++) {
data[c][i] = (double)samples[i];
}
}
try {
Thread.sleep(duration);
} catch (InterruptedException e) {
// Not real sure what to do about this, so ignore it.
}
// Hand those buffers ... somewhere
synchronized (lock) {
hasData = true;
lock.notify();
}
}
/**
* Block until data is available, then retrieve it. This prevents
* the codec from "receiving" additional data until
* {@link #releaseSamples()} is invoked.
*/
public double[][] retrieveSamples() {
synchronized (lock) {
while (!hasData) {
try {
lock.wait();
} catch (InterruptedException e) {
}
}
dataFree = false;
return data;
}
}
/**
* Return the probing frequency used for the currently recorded
* sample data. This method MUST be called between invocations
* of {@link #retrieveSamples()} and {@link #releaseSamples()}.
* Failure to meet this requirement will result in an exception.
*
* @return probing frequency for currently available sample data
*
* @throws IllegalStateException if called at an inappropriate time
*/
public double probingFrequency() {
synchronized(lock) {
if (dataFree) {
throw new IllegalStateException("cannot retrieve probing frequency without retrieving samples");
}
}
return Configuration.CODEC_PROBING_FREQ;
}
/**
* Release the samples from the previous event release to the event
* handler, and block an attempt to retrive samples until a
* subsequent release of the handler. Any attempt to access the
* previously-retrieved samples after this call is invalid.
*/
public void releaseSamples() {
synchronized (lock) {
dataFree = true;
hasData = false;
}
}
}