Package com.xuggle.xuggler

Examples of com.xuggle.xuggler.IVideoPicture


  public void testGetDataLineSize()
  {
    final int WIDTH = 420;
    final int HEIGHT= 360;
    final IPixelFormat.Type TYPE = IPixelFormat.Type.YUV420P;
    IVideoPicture pic = IVideoPicture.make(TYPE, WIDTH, HEIGHT);
    pic.setComplete(true, TYPE, WIDTH, HEIGHT, 0);
    assertEquals(WIDTH, pic.getDataLineSize(0));
    assertEquals(WIDTH/2, pic.getDataLineSize(1));
    assertEquals(WIDTH/2, pic.getDataLineSize(2));
    assertEquals(0, pic.getDataLineSize(3));
    assertEquals(0, pic.getDataLineSize(4));
    assertEquals(0, pic.getDataLineSize(5));
    assertEquals(0, pic.getDataLineSize(6));
    assertEquals(0, pic.getDataLineSize(7));
    assertEquals(-1, pic.getDataLineSize(8));
    assertEquals(-1, pic.getDataLineSize(-1));
  }
View Full Code Here


  {
    final int WIDTH = 420;
    final int HEIGHT= 360;
    final IPixelFormat.Type TYPE = IPixelFormat.Type.YUV420P;
    final IBuffer buf = IBuffer.make(null, (int) (WIDTH*HEIGHT*1.5));
    IVideoPicture pic = IVideoPicture.make(buf, TYPE, WIDTH, HEIGHT);
    pic.setComplete(true, TYPE, WIDTH, HEIGHT, 0);
    assertEquals(WIDTH, pic.getDataLineSize(0));
    assertEquals(WIDTH/2, pic.getDataLineSize(1));
    assertEquals(WIDTH/2, pic.getDataLineSize(2));
    assertEquals(0, pic.getDataLineSize(3));
    assertEquals(0, pic.getDataLineSize(4));
    assertEquals(0, pic.getDataLineSize(5));
    assertEquals(0, pic.getDataLineSize(6));
    assertEquals(0, pic.getDataLineSize(7));
    assertEquals(-1, pic.getDataLineSize(8));
    assertEquals(-1, pic.getDataLineSize(-1));
  }
View Full Code Here

      if (packet.getStreamIndex() == videoStreamId)
      {
        /*
         * We allocate a new picture to get the data out of Xuggler
         */
        IVideoPicture picture = IVideoPicture.make(videoCoder.getPixelType(),
            videoCoder.getWidth(), videoCoder.getHeight());

        int offset = 0;
        while(offset < packet.getSize())
        {
          /*
           * Now, we decode the video, checking for any errors.
           *
           */
          int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset);
          if (bytesDecoded < 0)
            throw new RuntimeException("got error decoding video in: "
                + filename);
          offset += bytesDecoded;

          /*
           * Some decoders will consume data in a packet, but will not be able to construct
           * a full video picture yet.  Therefore you should always check if you
           * got a complete picture from the decoder
           */
          if (picture.isComplete())
          {
            IVideoPicture newPic = picture;
            /*
             * If the resampler is not null, that means we didn't get the
             * video in BGR24 format and
             * need to convert it into BGR24 format.
             */
            if (resampler != null)
            {
              // we must resample
              newPic = IVideoPicture.make(resampler.getOutputPixelFormat(),
                  picture.getWidth(), picture.getHeight());
              if (resampler.resample(newPic, picture) < 0)
                throw new RuntimeException("could not resample video from: "
                    + filename);
            }
            if (newPic.getPixelType() != IPixelFormat.Type.BGR24)
              throw new RuntimeException("could not decode video" +
                  " as BGR 24 bit data in: " + filename);

            /**
             * We could just display the images as quickly as we decode them,
View Full Code Here

      System.out.println(e.getMessage());
      e.printStackTrace(System.out);
    }

    long timeStamp = (now - firstTimeStamp)*1000; // convert to microseconds
    IVideoPicture outFrame = converter.toPicture(worksWithXugglerBufferedImage,
        timeStamp);

    outFrame.setQuality(0);
    int retval = outStreamCoder.encodeVideo(packet, outFrame, 0);
    if (retval < 0)
      throw new RuntimeException("could not encode video");
    if (packet.isComplete())
    {
View Full Code Here

    // create the video picture and get it's underling buffer

    final AtomicReference<JNIReference> ref =
      new AtomicReference<JNIReference>(null);
    IVideoPicture resamplePicture = null;
    try
    {
      IVideoPicture picture = IVideoPicture.make(getRequiredPictureType(), image.getWidth(),
          image.getHeight());
      ByteBuffer pictureByteBuffer = picture.getByteBuffer(ref);

      if (imageInts != null)
      {
        pictureByteBuffer.order(ByteOrder.BIG_ENDIAN);
        IntBuffer pictureIntBuffer = pictureByteBuffer.asIntBuffer();
        pictureIntBuffer.put(imageInts);
      }
      else
      {
        pictureByteBuffer.put(imageBytes);
      }
      pictureByteBuffer = null;
      picture.setComplete(true, getRequiredPictureType(), image.getWidth(),
          image.getHeight(), timestamp);

      // resample as needed
      if (willResample())
      {
View Full Code Here

    // test that the picture is valid

    validatePicture(picture);

    // resample as needed
    IVideoPicture resamplePicture = null;
    AtomicReference<JNIReference> ref =
      new AtomicReference<JNIReference>(null);
    try
    {
    if (willResample())
    {
      resamplePicture = resample(picture, mToImageResampler);
      picture = resamplePicture;
    }

    // get picture parameters
   
    final int w = picture.getWidth();
    final int h = picture.getHeight();
   
    // make a copy of the raw bytes int a DataBufferByte which the
    // writable raster can operate on

    final ByteBuffer byteBuf = picture.getByteBuffer(ref);
    final byte[] bytes = new byte[picture.getSize()];
    byteBuf.get(bytes, 0, bytes.length);
  
    // create the data buffer from the bytes
   
    final DataBufferByte db = new DataBufferByte(bytes, bytes.length);
   
    // create an a sample model which matches the byte layout of the
    // image data and raster which contains the data which now can be
    // properly interpreted
   
    final SampleModel sm = new PixelInterleavedSampleModel(
      db.getDataType(), w, h, 3, 3 * w, mBandOffsets);
    final WritableRaster wr = Raster.createWritableRaster(sm, db, null);
   
    // create a color model
   
    final ColorModel colorModel = new ComponentColorModel(
      mColorSpace, false, false, ColorModel.OPAQUE, db.getDataType());
   
    // return a new image created from the color model and raster
   
    return new BufferedImage(colorModel, wr, false, null);
    }
    finally
    {
      if (resamplePicture!=null)
        resamplePicture.delete();
      if (ref.get()!=null)
        ref.get().delete();
    }
  }
View Full Code Here

  protected static IVideoPicture resample(IVideoPicture picture1,
    IVideoResampler resampler)
  {
    // create new picture object

    IVideoPicture picture2 = IVideoPicture.make(
      resampler.getOutputPixelFormat(),
      resampler.getOutputWidth(),
      resampler.getOutputHeight());

    // resample

    if (resampler.resample(picture2, picture1) < 0)
      throw new RuntimeException(
        "could not resample from " + resampler.getInputPixelFormat() +
        " to " + resampler.getOutputPixelFormat() +
        " for picture of type " + picture1.getPixelType());

    // test that it worked

    if (picture2.getPixelType() != resampler.getOutputPixelFormat()
      || !picture2.isComplete())
    {
      throw new RuntimeException(
        "did not resample from " + resampler.getInputPixelFormat() +
        " to " + resampler.getOutputPixelFormat() +
        " for picture of type " + picture1.getPixelType());
View Full Code Here

    }

    // create the video picture and get it's underling buffer

    final AtomicReference<JNIReference> ref = new AtomicReference<JNIReference>(null);
    IVideoPicture resamplePicture = null;
    try
    {
      IVideoPicture picture = IVideoPicture.make(getRequiredPictureType(), image.getWidth(),
          image.getHeight());

      ByteBuffer pictureByteBuffer = picture.getByteBuffer(ref);

      if (imageInts != null)
      {
        pictureByteBuffer.order(ByteOrder.BIG_ENDIAN);
        IntBuffer pictureIntBuffer = pictureByteBuffer.asIntBuffer();
        pictureIntBuffer.put(imageInts);
      }
      else
      {
        pictureByteBuffer.put(imageBytes);
      }
      pictureByteBuffer = null;
      picture.setComplete(true, getRequiredPictureType(), image.getWidth(),
          image.getHeight(), timestamp);

      // resample as needed
      if (willResample()) {
        resamplePicture = picture;
View Full Code Here

    validatePicture(picture);

    // resample as needed

    IVideoPicture resamplePic = null;
    final AtomicReference<JNIReference> ref =
      new AtomicReference<JNIReference>(null);
   
    ByteBuffer byteBuf = null;
    IntBuffer intBuf = null;
    int[] ints = null;
    DataBufferInt db = null;
    SampleModel sm = null;
    WritableRaster wr = null;
   
   
    try
    {
      if (willResample())
      {
        resamplePic = resample(picture, mToImageResampler);
        picture = resamplePic;
      }
      // get picture parameters

      final int w = picture.getWidth();
      final int h = picture.getHeight();

      // make a copy of the raw bytes in the picture and convert those to
      // integers

      byteBuf = picture.getByteBuffer(ref);

      // now, for this class of problems, we don't want the code
      // to switch byte order, so we'll pretend it's in native java order

      byteBuf.order(ByteOrder.BIG_ENDIAN);
      intBuf = byteBuf.asIntBuffer();
      ints = new int[picture.getSize() / 4];
      intBuf.get(ints, 0, ints.length);

      // create the data buffer from the ints

      db = new DataBufferInt(ints, ints.length);

      // create an a sample model which matches the byte layout of the
      // image data and raster which contains the data which now can be
      // properly interpreted

      sm = new SinglePixelPackedSampleModel(db.getDataType(),
          w, h, mBitMasks);
      wr = Raster.createWritableRaster(sm, db, null);

      // return a new image created from the color model and raster

      return new BufferedImage(mColorModel, wr, false, null);
    }
    finally
    {
      if (resamplePic != null)
        resamplePic.delete();
      if (ref.get() != null)
        ref.get().delete();
     
//      clean this stuff up
        if (byteBuf != null) {
View Full Code Here

        mAudoStreamIndex));
    }

    public void onVideoPicture(IVideoPictureEvent event)
    {
      IVideoPicture picture = event.getMediaData();
      long originalTimeStamp = picture.getTimeStamp();

      // set the new time stamp to the original plus the offset established
      // for this media file

      long newTimeStamp = originalTimeStamp + mOffset;

      // keep track of predicted time of the next video picture, if the end
      // of the media file is encountered, then the offset will be adjusted
      // to this this time.
      //
      // You'll note in the audio samples listener above we used
      // a method called getNextPts().  Video pictures don't have
      // a similar method because frame-rates can be variable, so
      // we don't now.  The minimum thing we do know though (since
      // all media containers require media to have monotonically
      // increasing time stamps), is that the next video timestamp
      // should be at least one tick ahead.  So, we fake it.
     
      mNextVideo = originalTimeStamp + 1;

      // set the new timestamp on video samples

      picture.setTimeStamp(newTimeStamp);

      // create a new video picture event with the one true video stream
      // index

      super.onVideoPicture(new VideoPictureEvent(this, picture,
View Full Code Here

TOP

Related Classes of com.xuggle.xuggler.IVideoPicture

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.