Package javax.media

Examples of javax.media.Buffer


    if (o instanceof IOException)
    {
      fullFormat = s.getFormat();
      return;
    }
    final Buffer input = (Buffer) o;
   
   
    final Format f = s.getFormat();   // TODO
    if (f.getEncoding().equals(VideoFormat.JPEG_RTP) && f.getClass() == VideoFormat.class)
    {  final VideoFormat vf = (VideoFormat) f;
      final JpegRTPHeader jpegRtpHeader = input.getLength() >= JpegRTPHeader.HEADER_SIZE ? JpegRTPHeader.parse((byte[]) input.getData(), input.getOffset()) : null;
      if (jpegRtpHeader== null)
      {  logger.warning("Expected buffer to be large enough for JPEG RTP Header")// TODO: we could read buffers until we get one big enough.
        fullFormat = f;
      }
      fullFormat = new VideoFormat(vf.getEncoding(), new Dimension(jpegRtpHeader.getWidthInPixels(), jpegRtpHeader.getHeightInPixels()), vf.getMaxDataLength(), vf.getDataType(), vf.getFrameRate());
View Full Code Here


    try
      {
        final Object o = q.get();
        if (o instanceof IOException)
          throw (IOException) o;
        final Buffer b = (Buffer) o;
        buffer.copy(b);
        return;
      }
      catch (InterruptedException e)
      {  throw new InterruptedIOException("" + e);
View Full Code Here

    public void transferData(PushBufferStream stream)
    {
      if (fullFormat == null)
      {
        try
        final Buffer b = new Buffer();
         
          try
          {
            s.read(b);
          }
View Full Code Here

        // data in the buffer, the stream should not allocate it.
        // See http://java.sun.com/products/java-media/jmf/2.1.1/apidocs/javax/media/protocol/PushBufferStream.html
        // The original implementation here assumed this.  But in JMF there are some badly behaving PushBufferStreams,
                // namely those in RTPSyncBufferMux, which give us a new buffer.  So we'll check for that below, and
                // copy it back to buffer.  kenlars99 7/13/07.
            Buffer recvBuffer = new Buffer();
            try {
                // Make sure that buffer is big enough.  kenlars99 6/3/07.
                if (buffer.length < MAX_PUSHBUFFER_DATA_SIZE + RTPHeader.SIZE) {
                    buffer = new byte[MAX_PUSHBUFFER_DATA_SIZE + RTPHeader.SIZE];
                }
               
                // According to the API, if the caller sets the
        // data in the buffer, the stream should not allocate it.
        // See http://java.sun.com/products/java-media/jmf/2.1.1/apidocs/javax/media/protocol/PushBufferStream.html
        // The original implementation here assumed this.  But in JMF there are some badly behaving PushBufferStreams,
                // namely those in RTPSyncBufferMux, which give us a new buffer.  So we'll check for that below, and
                // copy it back to buffer.  kenlars99 7/13/07.
                recvBuffer.setData(buffer);
                recvBuffer.setOffset(RTPHeader.SIZE);
                recvBuffer.setLength(buffer.length - RTPHeader.SIZE);
                stream.read(recvBuffer);
                if (recvBuffer.getLength() > 0)
                {
                   
                    // copy the data back into buffer, if the stream put it in a different byte array. kenlars99 7/13/07.
                    if (recvBuffer.getData() != buffer)
                      System.arraycopy(recvBuffer.getData(), recvBuffer.getOffset(), buffer, RTPHeader.SIZE, recvBuffer.getLength());

                    /* We set the marker flag if we're at the end of a video frame */
                    boolean marker = (recvBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0;
                    //printFlags(recvBuffer);
                    //System.out.println("MARKER : "+recvBuffer.getFlags()+" / "+ recvBuffer.FLAG_RTP_MARKER +" => "+marker);
                    //System.out.println("TIME "+recvBuffer.getTimeStamp());

                    long rtpTimestamp = 0;

                    // mgodehardt: the timestamp value in the rtp packet is dependent on the payload type, for ulaw
                    // its easy 160 bytes in a packet means, add 160 to the timestamp, see below

                    // TODO: add other formats, at the moment only ULAW works
                    if ( (stream.getFormat().getEncoding() == AudioFormat.ULAW_RTP) || (stream.getFormat().getEncoding() == AudioFormat.GSM_RTP) )
                    {
                        rtpTimestamp = lastTimestamp;
                    }
                    else if ( stream.getFormat().getEncoding() == VideoFormat.JPEG_RTP )
                    {
                        rtpTimestamp = lastTimestamp;

                        if ( 0 == lastBufferTimestamp )
                        {
                            lastBufferTimestamp = System.nanoTime() / 1000000L;
                        }
                    }

                    // RFC 3550 Page 13, The timestamp reflects the sampling instant of the first octet in the buffer
                    writeHeaderToBuffer(marker, rtpTimestamp);

                    // copy the data back into buffer, if the stream put it in a different byte array. kenlars99 7/13/07.
                    if (recvBuffer.getData() != buffer)
                    {
                        System.arraycopy(recvBuffer.getData(), recvBuffer.getOffset(), buffer, RTPHeader.SIZE, recvBuffer.getLength());
                    }
                   
                    ///long encodeTime = (System.nanoTime() - recvBuffer.getTimeStamp()) / 1000000L;
                    ///System.out.println("### " + (recvBuffer.getLength() + RTPHeader.SIZE) + " " + marker + " rtpTimestamp=" + rtpTimestamp + " encodeTime=" + encodeTime);
                    rtpDataStream.write(buffer, 0, recvBuffer.getLength() + RTPHeader.SIZE);
                   
                    // TODO: add other formats
                    if ( stream.getFormat().getEncoding() == AudioFormat.ULAW_RTP )
                    {
                        // RFC 3550 Page 13, fixed rate audio should increment timestamp by one for each sampling period
                        // we use 8000Hz for ULAW/PCMU
                        lastTimestamp += recvBuffer.getLength();
                    }
                    else if ( stream.getFormat().getEncoding() == AudioFormat.GSM_RTP )
                    {
                        // RFC 3550 Page 13, fixed rate audio should increment timestamp by one for each sampling period
                        // we use 8000Hz for GSM 6.10
                        lastTimestamp += (recvBuffer.getLength() * 8000) / 1650;
                    }
                    else if ( stream.getFormat().getEncoding() == VideoFormat.JPEG_RTP )
                    {
                        if ( (recvBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) > 0 )
                        {
                            long currentTime = (System.nanoTime() / 1000000L);
                       
                            // we use 90000Hz for JPEG ( see RFC 3551 )
                            long diffTime = currentTime - lastBufferTimestamp;
                            lastTimestamp += (diffTime * 90);
                           
                            lastBufferTimestamp = currentTime;
                        }
                    }

                    // must be done here, lastTimestamp and lastSendTime must have a relationship
                    lastSendTime = System.currentTimeMillis() - RTCPSenderInfo.MSB_1_BASE_TIME;
                    if ( initialSendTime == -1 )
                    {
                        initialSendTime = lastSendTime;
                    }
                }
            } catch (IOException e) {
              logger.log(Level.WARNING, "" + e, e);
            }
           
            stats.addPDUTransmitted();
            RTPHeader header=null;
            try
            {
                header = new RTPHeader(buffer, 0, RTPHeader.SIZE);
                //System.out.println("PADDING for "+header.getSsrc()+" : "+header.getPadding()+" ["+(int)buffer[buffer.length-1]+"]");
                int len = recvBuffer.getLength() - (header.getPadding() > 0 ? (int)buffer[buffer.length-1]:0);
                //System.out.println("ADDING LEN "+len);
                stats.addBytesTransmitted(len);
            } catch (IOException ex)
            {
                //ex.printStackTrace();
              // TODO: why don't we log this?
            }
           
            rtpMgr.RTPPacketSent(lastSendTime, recvBuffer.getLength()+RTPHeader.SIZE);
        }
    }
View Full Code Here

     * Removes the first buffer from the queue
     * @return the buffer removed
     */
    public synchronized Buffer remove() {
        if (firstElement != -1) {
            Buffer value = queue[firstElement];
            maxEmpty++;
            emptyPos[maxEmpty] = firstElement;
            firstElement = nextPos[firstElement];
            if (firstElement != -1) {
                prevPos[firstElement] = -1;
            }
            if (statistics != null) {
                statistics.addPDUProcessed();
            }
            long seq = value.getSequenceNumber();
            if (lastSequenceSent == -1) {
                lastSequenceSent = seq;
            } else if ((seq < lastSequenceSent) && (seq != 0) &&
                    (lastSequenceSent != RTPHeader.MAX_SEQUENCE)) {
                if (statistics != null) {
View Full Code Here

    RateConverter c = new RateConverter();
    c.setInputFormat(inputFormat);
    c.setOutputFormat(outputFormat);

    c.open();
    Buffer b = new Buffer();
    b.setFormat(inputFormat);
    b.setData(inputBufferData);
    b.setOffset(0);
    b.setLength(inputBufferData.length);
    Buffer outputBuffer = new Buffer();
   
    assertEquals(c.process(b, outputBuffer), Codec.BUFFER_PROCESSED_OK);
   
    assertEquals(outputBuffer.getOffset(), 0);
    byte[] outputBufferData = (byte[]) outputBuffer.getData();
   
    if (outputBuffer.getLength() != targetOutputBufferData.length)
    {  System.out.print("output:");
      dump(outputBufferData, outputBuffer.getLength());
      System.out.print("target:");
      dump(targetOutputBufferData, targetOutputBufferData.length);
    }
    assertEquals(outputBuffer.getLength(), targetOutputBufferData.length);
    assertEquals(outputBuffer.getFlags(), 0);
    assertEquals(outputBuffer.getFormat(), outputFormat);
   
   
    boolean eq = true;
    for (int i = 0; i < outputBuffer.getLength(); ++i)
    {  if (outputBufferData[i] != targetOutputBufferData[i])
      {  eq = false;
        break;
      }
    }
   
    if (!eq)
    {
      Buffer bClone = (Buffer) outputBuffer.clone();
      bClone.setData(targetOutputBufferData);
      System.err.println("Target: " + LoggingStringUtils.bufferToStr(bClone));
      System.err.println("Actual: " + LoggingStringUtils.bufferToStr(outputBuffer));
    }
   
    for (int i = 0; i < outputBuffer.getLength(); ++i)
View Full Code Here

       
        int sequence = header.getSequence();
        long timestamp = header.getTimestamp();
        calculateJitter(timestamp);
       
        Buffer buffer = new Buffer();
        buffer.setHeader(header);
        buffer.setData(data);
        buffer.setOffset(offset);
        buffer.setLength(length);
        buffer.setTimeStamp(timestamp);
        buffer.setSequenceNumber(sequence);

        buffer.setDuration(Math.round((double)length * durationMultiplier * 1000000.0d));

        int flags = Buffer.FLAG_RTP_TIME | Buffer.FLAG_RELATIVE_TIME;
        if (header.getMarker() == 1)
        {
            flags |= Buffer.FLAG_RTP_MARKER;
        }
        buffer.setFlags(flags);

        if ( null != handler )
        {
            if ( !itsRTPBuffer.add(buffer) )
            {
View Full Code Here

    @Override
    public void read(Buffer buffer) throws IOException
    {
        try
        {
            Buffer data = itsRTPBuffer.remove();
           
            if ((data != null) && (buffer != null))
            {
                buffer.setData(data.getData());
                buffer.setOffset(data.getOffset());
                buffer.setLength(data.getLength());
                buffer.setTimeStamp(data.getTimeStamp());
                buffer.setSequenceNumber(data.getSequenceNumber());
                buffer.setFlags(data.getFlags());
                buffer.setFormat(format);
   
                // mgodehardt: will measure thruput in bits per second for the BitRateControl
                // dont know if this the right place to measure the thruput
                long currentTimestamp = System.nanoTime();
                if ( -1 == lastTimestamp )
                {
                    lastTimestamp = currentTimestamp;
                }
               
                bytesProcessed += data.getLength();
               
                if ( (currentTimestamp - lastTimestamp) > 1000000000L )
                {
                    bitsPerSecond = bytesProcessed << 3;
                    bytesProcessed = 0;
View Full Code Here

          // underlying buffer, and queue it to ourself and all other cloned streams of the same
          // underlying stream (same stream index):
         
          if (bufferQueue.isEmpty())
          {
            final Buffer originalBuffer = new Buffer()// TODO: find a way to reuse buffers/avoid allocating new memory each time
            stream.read(originalBuffer);
            try
            {
              for (ClonedDataSource clone : clones)
              {
                final ClonedDataSource.ClonedPullBufferStream clonedStream = (ClonedDataSource.ClonedPullBufferStream) clone.getStreams()[streamIndex];
                clonedStream.getBufferQueue().put((Buffer) originalBuffer.clone());
              }
            }
            catch (InterruptedException e)
            {
              logger.log(Level.WARNING, "" + e, e);
              throw new InterruptedIOException();
            }
          }
        }
       
        Buffer nextBuffer = null;
        try
        {
          nextBuffer = (Buffer) bufferQueue.get();
        } catch (InterruptedException e)
        {
          throw new InterruptedIOException("" + e);
        }
        if (nextBuffer.isEOM())
          eos = true;
       
        buffer.copy(nextBuffer);
       
       
View Full Code Here

{
 
  public void testVideoCodec3()
  {
    MyVideoCodec c = new MyVideoCodec();
    Buffer b = new Buffer();
    VideoFormat f = new VideoFormat("xyz", new Dimension(1, 2), 1000, byte[].class, 2.f);
    c.doUpdateOutput(b, f, 3333, 44);
    assertTrue(b.getFormat() == f);
    assertTrue(b.getLength() == 3333);
    assertTrue(b.getOffset() == 44);
    assertFalse(b.isDiscard());
    assertFalse(b.isEOM());
   
    c.doUpdateOutput(b, null, 3335, 45);
    assertTrue(b.getFormat() == null);
    assertTrue(b.getLength() == 3335);
    assertTrue(b.getOffset() == 45);

   
   
  }
View Full Code Here

TOP

Related Classes of javax.media.Buffer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.