Package javax.media

Examples of javax.media.Buffer


          td.decodeYUVout(yuv);

          final BufferedImage bi = YUVConverter.toBufferedImage(yuv, ti);

          final Buffer b = ImageToBuffer.createBuffer(bi, format.getFrameRate());

          buffer.setData(b.getData());
          buffer.setLength(b.getLength());
          buffer.setOffset(b.getOffset());
          buffer.setEOM(false);
          buffer.setDiscard(false);
          buffer.setTimeStamp((long) secondsToNanos(videobuf_time));

          //System.out.println("Generated video buffer");
View Full Code Here


          THEORA.theora_decode_YUVout(td, yuv);

          final BufferedImage bi = YUVConverter.toBufferedImage(yuv, ti);

          final Buffer b = ImageToBuffer.createBuffer(bi, format.getFrameRate());

          buffer.setData(b.getData());
          buffer.setLength(b.getLength());
          buffer.setOffset(b.getOffset());
          buffer.setEOM(false);
          buffer.setDiscard(false);
          buffer.setTimeStamp((long) secondsToNanos(videobuf_time));

          //System.out.println("Generated video buffer");
View Full Code Here

    {

     
      final Format rendererInputFormat;
      final javax.media.format.AudioFormat format;
      final Buffer initialBuffer;
     
      if (codec != null)
      {

        format = (javax.media.format.AudioFormat) track.getFormat();
        logger.fine("codec input format=" + format);
       
        codec.setInputFormat(format);
        final Format[] supportedOutputFormats = codec.getSupportedOutputFormats(format);
        if (supportedOutputFormats.length == 0)
        {
          logger.warning("No supported output formats for the codec");
          return;
        }
        final Format codecOutputFormat = supportedOutputFormats[0];
        codec.setOutputFormat(codecOutputFormat);
        rendererInputFormat = codecOutputFormat;
       
        logger.fine("codecOutputFormat=" + codecOutputFormat);
 
        try
        {
          codec.open();
        } catch (ResourceUnavailableException e2)
        {
          logger.log(Level.WARNING, "" + e2, e2)// TODO
          return;
        }
       
        initialBuffer = new Buffer();
      }
      else
      {
        format = (javax.media.format.AudioFormat) track.getFormat();
        rendererInputFormat = format;
        initialBuffer = new Buffer();
      }
     

      renderer.setInputFormat(rendererInputFormat);
     
      try
      {
        renderer.open();
      } catch (ResourceUnavailableException e1)
      {
        logger.log(Level.WARNING, "" + e1, e1);
        return;     // TODO
      }
      renderer.start();
     
      // TODO: we are feeding the renderer with data from the raw input stream, rather than the AudioInputStream.
      // in the case of formats like MP3, will this work?
     
      final Buffer buffer = initialBuffer;//is2.getBuffer();  // this allows us to pick up any unread data from TrackInputStream reading
      Buffer buffer2 = new Buffer();//is.getBuffer();  // this allows us to pick up any unread data from TrackInputStream reading
      while (!buffer.isEOM())
      {
        track.readFrame(buffer);
        logger.fine("read buffer from track in loop: " + buffer.getLength() + " " + bufferToString((byte[]) buffer.getData()));
        if (buffer.getFormat() == null)
          buffer.setFormat(format);
       
        if (buffer.isDiscard())
          continue;
       
        if (codec != null)
        {
          int codecResult = codec.process(buffer, buffer2);
          if (codecResult == Codec.OUTPUT_BUFFER_NOT_FILLED)
          {
            logger.fine("Codec.OUTPUT_BUFFER_NOT_FILLED");
            continue;
            // TODO:
          }
          else if (codecResult == Codec.BUFFER_PROCESSED_FAILED)
          {
            logger.warning("Codec.BUFFER_PROCESSED_FAILED");
            return;
          }
          if (buffer2.getFormat() == null)
            buffer2.setFormat(rendererInputFormat);

         
          logger.fine("got buffer from codec: " + buffer2.getLength());
        }
        else
          buffer2 = buffer;
       
        final int result = renderer.process(buffer2);
        if (result == Renderer.BUFFER_PROCESSED_FAILED)
        {  logger.warning("Renderer.BUFFER_PROCESSED_FAILED");
          return;
        }
        // TODO: handle errors, incomplete processing
       
      }
     
      if (codec != null)
      {
        logger.fine("Codec still contains data, continuing processing");
        while (!buffer2.isEOM())
        {
          // we must have data we still have to get out of the codec
          buffer.setLength(0);
         
          int codecResult = codec.process(buffer, buffer2);
          if (codecResult == Codec.OUTPUT_BUFFER_NOT_FILLED)
          {
            logger.fine("Codec.OUTPUT_BUFFER_NOT_FILLED");
            continue;
            // TODO:
          }
          else if (codecResult == Codec.BUFFER_PROCESSED_FAILED)
          {
            logger.warning("Codec.BUFFER_PROCESSED_FAILED");
            return;
          }
          if (buffer2.getFormat() == null)
            buffer2.setFormat(rendererInputFormat);

         
          logger.fine("got buffer from codec: " + buffer2.getLength());
         
          final int result = renderer.process(buffer2);
          if (result == Renderer.BUFFER_PROCESSED_FAILED)
          {  logger.warning("Renderer.BUFFER_PROCESSED_FAILED");
            return;
View Full Code Here

  private Buffer currentBuffer;
  private Object currentBufferMutex = new Object();

  public Buffer grabFrame()
  {
    Buffer aBuffer = null;
   
    synchronized ( currentBufferMutex )
    {
      if ( null != currentBuffer )
      {
        aBuffer = (Buffer)currentBuffer.clone();
        aBuffer.setFormat((Format)currentBuffer.getFormat().clone());
      }
    }
   
    return aBuffer;
  }
View Full Code Here

          if (TRACE) logger.fine("jitterbuflen=" + jitterbuflen);
        }
       
        synchronized ( currentBufferMutex )
        {
          currentBuffer = new Buffer();
         
          // live data has no duration, timestamp is a high resolution timer
          currentBuffer.setFlags(Buffer.FLAG_LIVE_DATA | Buffer.FLAG_SYSTEM_TIME);
          currentBuffer.setOffset(0);
          currentBuffer.setData(image.getBytes());
View Full Code Here

      }
     
      try
      {
        // will block until data is available
        Buffer aBuffer = (Buffer)jitterBuffer.get();
       
        // live data has no duration, timestamp is a high resolution timer
        buffer.setFlags(aBuffer.getFlags());
        buffer.setOffset(0);
        buffer.setData(aBuffer.getData());
        buffer.setLength(aBuffer.getLength());
        buffer.setFormat(aBuffer.getFormat());
        buffer.setSequenceNumber(aBuffer.getSequenceNumber());
        buffer.setTimeStamp(aBuffer.getTimeStamp());
      }
      catch ( InterruptedException ex )
      {
      }
    }
View Full Code Here

      System.err.println("Who the hell stream is this?");
  }
    }

    void doneAllStreams() {
  Buffer buffer = new Buffer();
  updateTime();
  buffer.setEOM(true);
  for (int i = 0; i < outputStreams.length; i++) {
      buffer.setFormat(outputStreams[i].getFormat());
      outputStreams[i].pushData(buffer);
  }
  connected = false;
    }
View Full Code Here

    throw new IncompatibleSourceException();

      }

      this.source = source;
      readBuffer = new Buffer();
  }
View Full Code Here

        }
      }

      public void read(Buffer buffer) throws IOException
      {
        Buffer nextBuffer = null;
        try
        {
          nextBuffer = (Buffer) bufferQueue.get();
        } catch (InterruptedException e)
        {
          throw new InterruptedIOException("" + e);
        }
        if (nextBuffer.isEOM())
          eos = true;
       
        buffer.copy(nextBuffer);
       
       
View Full Code Here

            assembler.put((Buffer)input.clone());
            ///dump(input, "Input");

            if ( assembler.complete() )
            {
                Buffer bComplete = baselineCodec == null ? output : new Buffer();
                final int offsetAfterHeaders = assembler.copyToBuffer(bComplete);
               
                frameAssemblers.remove(timestamp);
                frameAssemblers.removeOlderThan(timestamp);    // we have a complete frame, so any earlier fragments are not needed, as they are for older (incomplete) frames.
               
                if (TRACE)
                {
                    System.out.println("COMPLETE: ts=" + timestamp + " bComplete.getLength()=" + bComplete.getLength());
                }
               
                if ( lastRTPtimestamp == -1 )
                {
                    lastRTPtimestamp = input.getTimeStamp();
View Full Code Here

TOP

Related Classes of javax.media.Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.