Package com.xuggle.xuggler

Examples of com.xuggle.xuggler.IStreamCoder


  public void testGetCodecTagArray()
  {
    IContainer container = IContainer.make();
    assertTrue("should be able to open",
        container.open("fixtures/subtitled_video.mkv", IContainer.Type.READ, null) >= 0);
    IStreamCoder coder = container.getStream(0).getStreamCoder();
  
    char[] tag = coder.getCodecTagArray();
    assertNotNull("should exist", tag);
    assertEquals("should always be 4", 4, tag.length);
    for(int i = 0; i < tag.length; i++)
      assertEquals("should be 0 by default", (char)0, tag[i]);
    coder.setCodecTag(0xDEADBEEF);
    assertEquals("should be set now", 0xDEADBEEF, coder.getCodecTag());
    tag = coder.getCodecTagArray();
    assertNotNull("should exist", tag);
    assertEquals("should always be 4", 4, tag.length);
    assertEquals("test value", 0xDE, tag[3]);
    assertEquals("test value", 0xAD, tag[2]);
    assertEquals("test value", 0xBE, tag[1]);
View Full Code Here


  public void testSetCodecTagArray()
  {
    IContainer container = IContainer.make();
    assertTrue("should be able to open",
        container.open("fixtures/subtitled_video.mkv", IContainer.Type.READ, null) >= 0);
    IStreamCoder coder = container.getStream(0).getStreamCoder();
  
    char[] tag = new char[4];
    tag[3] = 0xDE;
    tag[2] = 0xAD;
    tag[1] = 0xBE;
    tag[0] = 0xEF;
    coder.setCodecTag(tag);
    assertEquals("should be set now", 0xDEADBEEF, coder.getCodecTag());
  }
View Full Code Here

  }

  @Test
  public void testGetDefaultAudioFrameSize()
  {
    IStreamCoder coder = getStreamCoder(sampleFile, 0);
    assertNotNull(coder);
   
    assertEquals(coder.getDefaultAudioFrameSize(), 576);
    coder.setDefaultAudioFrameSize(3);
    assertEquals(coder.getDefaultAudioFrameSize(), 3);
    // sample file has nellymoser audio, which has a non default frame size
    assertTrue(coder.getAudioFrameSize() != coder.getDefaultAudioFrameSize());
   
    coder = IStreamCoder.make(IStreamCoder.Direction.ENCODING);
    coder.setCodec(ICodec.ID.CODEC_ID_PCM_S16LE);
    coder.setSampleRate(22050);
    coder.setChannels(1);
    assertTrue(coder.open() >= 0);
    assertEquals(coder.getAudioFrameSize(), coder.getDefaultAudioFrameSize());
    coder.setDefaultAudioFrameSize(3);
    assertEquals(coder.getAudioFrameSize(), coder.getDefaultAudioFrameSize());
   
  }
View Full Code Here

   
  }
 
  private IStreamCoder getStreamCoder(String url, int index)
  {
    IStreamCoder retval = null;
    int errorVal = 0;
    mContainer = IContainer.make();
    assertTrue(mContainer != null);
   
    errorVal = mContainer.open(url, IContainer.Type.READ, null);
View Full Code Here

  }
 
  @Test
  public void testGetPropertyNames()
  {
    IStreamCoder coder = IStreamCoder.make(Direction.ENCODING);
    Collection<String> properties = coder.getPropertyNames();
    assertTrue(properties.size() > 0);
    for(String name : properties)
    {
      String value = coder.getPropertyAsString(name);
      log.debug("{}={}", name, value);
    }
  }
View Full Code Here

  }
 
  @Test
  public void testGetAutomaticallyStampOutputStream()
  {
    IStreamCoder coder = IStreamCoder.make(Direction.ENCODING);
    assertTrue(coder.getAutomaticallyStampPacketsForStream());
  }
View Full Code Here

    // get the coder, and stream index

    IContainer container = event.getSource().getContainer();
    IStream stream = container.getStream(event.getStreamIndex());
    IStreamCoder coder = stream.getStreamCoder();
    int streamIndex = event.getStreamIndex();

    // if video stream and showing video, configure video stream

    if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO &&
      getMode().showVideo())
    {
      // create a converter for this video stream

      IConverter converter = mConverters.get(streamIndex);
      if (null == converter)
      {
        converter = ConverterFactory.createConverter(
            ConverterFactory.XUGGLER_BGR_24, coder.getPixelType(), coder
                .getWidth(), coder.getHeight());
        mConverters.put(streamIndex, converter);
      }

      // get a frame for this stream

      MediaFrame frame = mFrames.get(streamIndex);
      if (null == frame)
      {
        frame = new MediaFrame(mDefaultCloseOperation, stream, this);
        mFrames.put(streamIndex, frame);
        mFrameIndex.put(frame, mNextFrameIndex++);
      }

      // if real time establish video queue

      if (getMode().isRealTime())
        getVideoQueue(streamIndex, frame);
    }

    // if audio stream and playing audio, configure audio stream

    else if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO &&
      getMode().playAudio() &&
      getMode().isRealTime())

    {
      // if real time establish audio queue
View Full Code Here

    /**
     * And keep some convenience pointers for the specific stream we're working
     * on for a packet.
     */
    IStreamCoder ic = null;
    IStreamCoder oc = null;
    IAudioResampler as = null;
    IVideoResampler vs = null;
    IVideoPicture inFrame = null;
    IVideoPicture reFrame = null;

    /**
     * Now, we've already opened the files in #setupStreams(CommandLine). We
     * just keep reading packets from it until the IContainer returns <0
     */
    while (mIContainer.readNextPacket(iPacket) == 0)
    {
      /**
       * Find out which stream this packet belongs to.
       */
      int i = iPacket.getStreamIndex();
      int offset = 0;

      /**
       * Find out if this stream has a starting timestamp
       */
      IStream stream = mIContainer.getStream(i);
      long tsOffset = 0;
      if (stream.getStartTime() != Global.NO_PTS && stream.getStartTime() > 0
          && stream.getTimeBase() != null)
      {
        IRational defTimeBase = IRational.make(1,
            (int) Global.DEFAULT_PTS_PER_SECOND);
        tsOffset = defTimeBase.rescale(stream.getStartTime(), stream
            .getTimeBase());
      }
      /**
       * And look up the appropriate objects that are working on that stream.
       */
      ic = mICoders[i];
      oc = mOCoders[i];
      as = mASamplers[i];
      vs = mVSamplers[i];
      inFrame = mIVideoPictures[i];
      reFrame = mOVideoPictures[i];
      inSamples = mISamples[i];
      reSamples = mOSamples[i];

      if (oc == null)
        // we didn't set up this coder; ignore the packet
        continue;

      /**
       * Find out if the stream is audio or video.
       */
      ICodec.Type cType = ic.getCodecType();

      if (cType == ICodec.Type.CODEC_TYPE_AUDIO && mHasAudio)
      {
        /**
         * Decoding audio works by taking the data in the packet, and eating
         * chunks from it to create decoded raw data.
         *
         * However, there may be more data in a packet than is needed to get one
         * set of samples (or less), so you need to iterate through the byts to
         * get that data.
         *
         * The following loop is the standard way of doing that.
         */
        while (offset < iPacket.getSize())
        {
          retval = ic.decodeAudio(inSamples, iPacket, offset);
          if (retval <= 0)
            throw new RuntimeException("could not decode audio.  stream: " + i);

          if (inSamples.getTimeStamp() != Global.NO_PTS)
            inSamples.setTimeStamp(inSamples.getTimeStamp() - tsOffset);

          log.trace("packet:{}; samples:{}; offset:{}", new Object[]
          {
              iPacket, inSamples, tsOffset
          });

          /**
           * If not an error, the decodeAudio returns the number of bytes it
           * consumed. We use that so the next time around the loop we get new
           * data.
           */
          offset += retval;
          int numSamplesConsumed = 0;
          /**
           * If as is not null then we know a resample was needed, so we do that
           * resample now.
           */
          if (as != null && inSamples.getNumSamples() > 0)
          {
            retval = as.resample(reSamples, inSamples, inSamples
                .getNumSamples());

            outSamples = reSamples;
          }
          else
          {
            outSamples = inSamples;
          }

          /**
           * Include call a hook to derivied classes to allow them to alter the
           * audio frame.
           */

          outSamples = alterAudioFrame(outSamples);

          /**
           * Now that we've resampled, it's time to encode the audio.
           *
           * This workflow is similar to decoding; you may have more, less or
           * just enough audio samples available to encode a packet. But you
           * must iterate through.
           *
           * Unfortunately (don't ask why) there is a slight difference between
           * encodeAudio and decodeAudio; encodeAudio returns the number of
           * samples consumed, NOT the number of bytes. This can be confusing,
           * and we encourage you to read the IAudioSamples documentation to
           * find out what the difference is.
           *
           * But in any case, the following loop encodes the samples we have
           * into packets.
           */
          while (numSamplesConsumed < outSamples.getNumSamples())
          {
            retval = oc.encodeAudio(oPacket, outSamples, numSamplesConsumed);
            if (retval <= 0)
              throw new RuntimeException("Could not encode any audio: "
                  + retval);
            /**
             * Increment the number of samples consumed, so that the next time
             * through this loop we encode new audio
             */
            numSamplesConsumed += retval;
            log.trace("out packet:{}; samples:{}; offset:{}", new Object[]{
                oPacket, outSamples, tsOffset
            });

            writePacket(oPacket);
          }
        }

      }
      else if (cType == ICodec.Type.CODEC_TYPE_VIDEO && mHasVideo)
      {
        /**
         * This encoding workflow is pretty much the same as the for the audio
         * above.
         *
         * The only major delta is that encodeVideo() will always consume one
         * frame (whereas encodeAudio() might only consume some samples in an
         * IAudioSamples buffer); it might not be able to output a packet yet,
         * but you can assume that you it consumes the entire frame.
         */
        IVideoPicture outFrame = null;
        while (offset < iPacket.getSize())
        {
          retval = ic.decodeVideo(inFrame, iPacket, offset);
          if (retval <= 0)
            throw new RuntimeException("could not decode any video.  stream: "
                + i);

          log.trace("decoded vid ts: {}; pkts ts: {}", inFrame.getTimeStamp(),
              iPacket.getTimeStamp());
          if (inFrame.getTimeStamp() != Global.NO_PTS)
            inFrame.setTimeStamp(inFrame.getTimeStamp() - tsOffset);

          offset += retval;
          if (inFrame.isComplete())
          {
            if (vs != null)
            {
              retval = vs.resample(reFrame, inFrame);
              if (retval < 0)
                throw new RuntimeException("could not resample video");
              outFrame = reFrame;
            }
            else
            {
              outFrame = inFrame;
            }

            /**
             * Include call a hook to derivied classes to allow them to alter
             * the audio frame.
             */

            outFrame = alterVideoFrame(outFrame);

            outFrame.setQuality(0);
            retval = oc.encodeVideo(oPacket, outFrame, 0);
            if (retval < 0)
              throw new RuntimeException("could not encode video");
            writePacket(oPacket);
          }
        }
View Full Code Here

   * @return the line
   */

  private SourceDataLine getJavaSoundLine(IStream stream)
  {
    IStreamCoder audioCoder = stream.getStreamCoder();
    int streamIndex = stream.getIndex();
    SourceDataLine line = mAudioLines.get(streamIndex);
    if (line == null)
    {
      try
      {
        // estabish the audio format, NOTE: xuggler defaults to signed 16 bit
        // samples

        AudioFormat audioFormat = new AudioFormat(audioCoder.getSampleRate(),
          (int) IAudioSamples
          .findSampleBitDepth(audioCoder.getSampleFormat()), audioCoder
          .getChannels(), true, false);
       
        // create the audio line out
       
        DataLine.Info info = new DataLine.Info(SourceDataLine.class,
View Full Code Here

       *
       * You can create IStreamCoders yourself using
       * IStreamCoder#make(IStreamCoder.Direction), but then you have to set all
       * parameters yourself.
       */
      IStreamCoder ic = is.getStreamCoder();

      /**
       * Find out what Codec Xuggler guessed the input stream was encoded with.
       */
      ICodec.Type cType = ic.getCodecType();

      mIStreams[i] = is;
      mICoders[i] = ic;
      mOStreams[i] = null;
      mOCoders[i] = null;
      mASamplers[i] = null;
      mVSamplers[i] = null;
      mIVideoPictures[i] = null;
      mOVideoPictures[i] = null;
      mISamples[i] = null;
      mOSamples[i] = null;

      if (cType == ICodec.Type.CODEC_TYPE_AUDIO && mHasAudio
          && (astream == -1 || astream == i))
      {
        /**
         * So it looks like this stream as an audio stream. Now we add an audio
         * stream to the output container that we will use to encode our
         * resampled audio.
         */
        IStream os = mOContainer.addNewStream(i);

        /**
         * And we ask the IStream for an appropriately configured IStreamCoder
         * for output.
         *
         * Unfortunately you still need to specify a lot of things for
         * outputting (because we can't really guess what you want to encode
         * as).
         */
        IStreamCoder oc = os.getStreamCoder();
        String apreset = cmdLine.getOptionValue("apreset");
        if (apreset != null)
          Configuration.configure(apreset, oc);

        mOStreams[i] = os;
        mOCoders[i] = oc;

        /**
         * First, did the user specify an audio codec?
         */
        if (acodec != null)
        {
          ICodec codec = null;
          /**
           * Looks like they did specify one; let's look it up by name.
           */
          codec = ICodec.findEncodingCodecByName(acodec);
          if (codec == null || codec.getType() != cType)
            throw new RuntimeException("could not find encoder: " + acodec);
          /**
           * Now, tell the output stream coder that it's to use that codec.
           */
          oc.setCodec(codec);
        }
        else
        {
          /**
           * Looks like the user didn't specify an output coder for audio.
           *
           * So we ask Xuggler to guess an appropriate output coded based on the
           * URL, container format, and that it's audio.
           */
          ICodec codec = ICodec.guessEncodingCodec(oFmt, null, outputURL, null,
              cType);
          if (codec == null)
            throw new RuntimeException("could not guess " + cType
                + " encoder for: " + outputURL);
          /**
           * Now let's use that.
           */
          oc.setCodec(codec);
        }

        /**
         * In general a IStreamCoder encoding audio needs to know: 1) A ICodec
         * to use. 2) The sample rate and number of channels of the audio. Most
         * everything else can be defaulted.
         */

        /**
         * If the user didn't specify a sample rate to encode as, then just use
         * the same sample rate as the input.
         */
        if (sampleRate == 0)
          sampleRate = ic.getSampleRate();
        oc.setSampleRate(sampleRate);
        /**
         * If the user didn't specify a bit rate to encode as, then just use the
         * same bit as the input.
         */
        if (abitrate == 0)
          abitrate = ic.getBitRate();
        if (abitrate == 0)
          // some containers don't give a bit-rate
          abitrate = 64000;
        oc.setBitRate(abitrate);
       
        /**
         * If the user didn't specify the number of channels to encode audio as,
         * just assume we're keeping the same number of channels.
         */
        if (channels == 0)
          channels = ic.getChannels();
        oc.setChannels(channels);

        /**
         * And set the quality (which defaults to 0, or highest, if the user
         * doesn't tell us one).
         */
        oc.setGlobalQuality(aquality);

        /**
         * Now check if our output channels or sample rate differ from our input
         * channels or sample rate.
         *
         * If they do, we're going to need to resample the input audio to be in
         * the right format to output.
         */
        if (oc.getChannels() != ic.getChannels()
            || oc.getSampleRate() != ic.getSampleRate())
        {
          /**
           * Create an audio resampler to do that job.
           */
          mASamplers[i] = IAudioResampler.make(oc.getChannels(), ic
              .getChannels(), oc.getSampleRate(), ic.getSampleRate());
          if (mASamplers[i] == null)
          {
            throw new RuntimeException(
                "could not open audio resampler for stream: " + i);
          }
        }
        else
        {
          mASamplers[i] = null;
        }
        /**
         * Finally, create some buffers for the input and output audio
         * themselves.
         *
         * We'll use these repeated during the #run(CommandLine) method.
         */
        mISamples[i] = IAudioSamples.make(1024, ic.getChannels());
        mOSamples[i] = IAudioSamples.make(1024, oc.getChannels());
      }
      else if (cType == ICodec.Type.CODEC_TYPE_VIDEO && mHasVideo
          && (vstream == -1 || vstream == i))
      {
        /**
         * If you're reading these commends, this does the same thing as the
         * above branch, only for video. I'm going to assume you read those
         * comments and will only document something substantially different
         * here.
         */
        IStream os = mOContainer.addNewStream(i);
        IStreamCoder oc = os.getStreamCoder();
        String vpreset = cmdLine.getOptionValue("vpreset");
        if (vpreset != null)
          Configuration.configure(vpreset, oc);

        mOStreams[i] = os;
        mOCoders[i] = oc;

        if (vcodec != null)
        {
          ICodec codec = null;
          codec = ICodec.findEncodingCodecByName(vcodec);
          if (codec == null || codec.getType() != cType)
            throw new RuntimeException("could not find encoder: " + vcodec);
          oc.setCodec(codec);
          oc.setGlobalQuality(0);
        }
        else
        {
          ICodec codec = ICodec.guessEncodingCodec(oFmt, null, outputURL, null,
              cType);
          if (codec == null)
            throw new RuntimeException("could not guess " + cType
                + " encoder for: " + outputURL);

          oc.setCodec(codec);
        }

        /**
         * In general a IStreamCoder encoding video needs to know: 1) A ICodec
         * to use. 2) The Width and Height of the Video 3) The pixel format
         * (e.g. IPixelFormat.Type#YUV420P) of the video data. Most everything
         * else can be defaulted.
         */
        if (vbitrate == 0)
          vbitrate = ic.getBitRate();
        if (vbitrate == 0)
          vbitrate = 250000;
        oc.setBitRate(vbitrate);
        if (vbitratetolerance > 0)
          oc.setBitRateTolerance(vbitratetolerance);

        int oWidth = ic.getWidth();
        int oHeight = ic.getHeight();

        if (oHeight <= 0 || oWidth <= 0)
          throw new RuntimeException("could not find width or height in url: "
              + inputURL);

        /**
         * For this program we don't allow the user to specify the pixel format
         * type; we force the output to be the same as the input.
         */
        oc.setPixelType(ic.getPixelType());

        if (vscaleFactor != 1.0)
        {
          /**
           * In this case, it looks like the output video requires rescaling, so
           * we create a IVideoResampler to do that dirty work.
           */
          oWidth = (int) (oWidth * vscaleFactor);
          oHeight = (int) (oHeight * vscaleFactor);

          mVSamplers[i] = IVideoResampler
              .make(oWidth, oHeight, oc.getPixelType(), ic.getWidth(), ic
                  .getHeight(), ic.getPixelType());
          if (mVSamplers[i] == null)
          {
            throw new RuntimeException(
                "This version of Xuggler does not support video resampling "
                    + i);
          }
        }
        else
        {
          mVSamplers[i] = null;
        }
        oc.setHeight(oHeight);
        oc.setWidth(oWidth);

        if (vquality >= 0)
        {
          oc.setFlag(IStreamCoder.Flags.FLAG_QSCALE, true);
          oc.setGlobalQuality(vquality);
        }

        /**
         * TimeBases are important, especially for Video. In general Audio
         * encoders will assume that any new audio happens IMMEDIATELY after any
         * prior audio finishes playing. But for video, we need to make sure
         * it's being output at the right rate.
         *
         * In this case we make sure we set the same time base as the input, and
         * then we don't change the time stamps of any IVideoPictures.
         *
         * But take my word that time stamps are tricky, and this only touches
         * the envelope. The good news is, it's easier in Xuggler than some
         * other systems.
         */
        IRational num = null;
        num = ic.getFrameRate();
        oc.setFrameRate(num);
        oc.setTimeBase(IRational.make(num.getDenominator(), num
                .getNumerator()));
        num = null;

        /**
         * And allocate buffers for us to store decoded and resample video
         * pictures.
         */
        mIVideoPictures[i] = IVideoPicture.make(ic.getPixelType(), ic
            .getWidth(), ic.getHeight());
        mOVideoPictures[i] = IVideoPicture.make(oc.getPixelType(), oc
            .getWidth(), oc.getHeight());
      }
      else
      {
        log.warn("Ignoring input stream {} of type {}", i, cType);
      }
View Full Code Here

TOP

Related Classes of com.xuggle.xuggler.IStreamCoder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.