final int videoTrackIndex = filterGraph.getVideoTrackIndex();
if (mode == PLAYER && videoTrackIndex >= 0) // if it has a video track
{
final RendererNode rendererNode = filterGraph.getRendererNode(videoTrackIndex);
if (rendererNode != null) {
final VideoRenderer videoRenderer = (VideoRenderer) rendererNode.getRenderer();
final VideoFormat videoRendererInputFormat = (VideoFormat) rendererNode.getInputFormat();
// TODO: we need to start the demux
visualComponent = videoRenderer.getComponent();
visualComponent.setSize(videoRendererInputFormat.getSize());
//logger.fine("Video size: " + videoRendererInputFormat.getSize());
videoRenderer.setBounds(new Rectangle(videoRendererInputFormat.getSize()));
}
}
// TODO:
// Sun's AudioRenderer implements Prefetchable, Drainable, Clock
// This causes their handler to call some extra methods during initialization.
// here we have a somewhat hard-coded attempt to recreate this.
// For one, if it is Prefetchable, then we keep passing it buffers while isPrefetching is true,
// then call syncStart (which is a Clock method)
// TODO: determine which of these is to be called in our prefetch, realize, start, etc.
final int audioTrackIndex = filterGraph.getAudioTrackIndex();
if (mode == PLAYER && audioTrackIndex >= 0) // if it has a audio track
{
final RendererNode rendererNode = filterGraph.getRendererNode(audioTrackIndex);
if (rendererNode != null) {
final Renderer renderer = rendererNode.getRenderer();
if (renderer instanceof Clock)
{
final Clock rendererAsClock = (Clock) renderer;
try
{
TimeBase timeBase = rendererAsClock.getTimeBase();
// With JMF, this ends up as a com.sun.media.renderer.audio.AudioRenderer$AudioTimeBase@49bdc9d8
// TODO: what do we do in between getting and setting?
// probably what we need to do is somehow use this clock as our clock.
// TODO: this is starting to make sense to me. An audio renderer differs from a video renderer in that
// the audio renderer has to determine time, therefore it is the master clock. The video has to be synched with
// the audio, not the other way around.
rendererAsClock.setTimeBase(timeBase); // this seems unnecessary, but it does cause the audio renderer to set its master clock.
} catch (IncompatibleTimeBaseException e)
{
logger.log(Level.WARNING, "" + e, e);
postControllerErrorEvent("" + e);
return false;
}
}
}
}
try
{
// root was already opened in setDataSource.
// TODO: JMF calls open on the parser during realize
filterGraph.openExcludeDemux();
}
catch (ResourceUnavailableException e)
{
logger.log(Level.WARNING, "" + e, e);
postControllerErrorEvent("" + e);
return false;
}
// now that nodes are open, add controls:
// mgodehardt: query processor output datasource for controls
// kenlars99: JMF does not include DataSource controls in its own list of controls.
/*{
final Object[] controls = getSource().getControls();
if (controls != null)
{
for (Object o : controls)
addControl((Control) o);
}
}*/
// add controls from demux
{
final Object[] controls = filterGraph.getRoot().getDemux().getControls();
if (controls != null)
{
for (Object o : controls)
{
addControl((Control) o);
}
}
}
// add all controls from the codec chain ( inclusive mux )
addCodecChainControls(filterGraph.getRoot());
// add all controls from renderers
if ( mode == PLAYER )
{
for (int trackIndex = 0; trackIndex < filterGraph.getNumTracks(); ++trackIndex)
{
final RendererNode rendererNode = filterGraph.getRendererNode(trackIndex);
if (rendererNode != null) {
final Renderer renderer = rendererNode.getRenderer();
// add any controls from renderer
final Object[] controls = renderer.getControls();
if (controls != null)
{
for (Object o : controls)