Package org.red5.server.net.rtmp.event

Examples of org.red5.server.net.rtmp.event.VideoData


          if (log.isTraceEnabled()) {
            log.trace(String.format("Image was captured in %s ms", System.currentTimeMillis() - start));
          }
          start = System.currentTimeMillis();
          try {
            VideoData data = se.encode(image);
            if (log.isTraceEnabled()) {
              log.trace(String.format("Image was encoded in %s ms", System.currentTimeMillis() - start));
            }
            frames.offer(data);
            se.createUnalteredFrame(screen);
          } catch (IOException e) {
            log.error("Error while encoding: ", e);
          }
        }
      }, 0, timeBetweenFrames * NANO_MULTIPLIER, TimeUnit.NANOSECONDS);
      sendScheduler.scheduleWithFixedDelay(new Runnable() {
        public void run() {
          VideoData f = frames.poll();
          f = f == null ? se.getUnalteredFrame() : f;
          if (f != null) {
            try {
              timestamp += timeBetweenFrames;
              pushVideo(f, (int)timestamp);
View Full Code Here


  /** {@inheritDoc} */
  @Override
  protected void setUp() throws Exception {
    super.setUp();
    VideoData message = new VideoData(IoBuffer.allocate(100));
    playBuffer = new PlayBuffer(1000);
    rtmpMessage = RTMPMessage.build(message);
  }
View Full Code Here

          if (videoCodec != null) {
            //check for decoder configuration to send
            IoBuffer config = videoCodec.getDecoderConfiguration();
            if (config != null) {
              log.debug("Decoder configuration is available for {}", videoCodec.getName());
              VideoData videoConf = new VideoData(config.asReadOnlyBuffer());
              try {
                log.debug("Setting decoder configuration for recording");
                listener.getFileConsumer().setVideoDecoderConfiguration(videoConf);
              } finally {
                videoConf.release();
              }
            }
          } else {
            log.debug("Could not initialize stream output, videoCodec is null.");
          }
View Full Code Here

          switch (tag.getDataType()) {
            case Constants.TYPE_AUDIO_DATA:
              msg = new AudioData(tag.getBody());
              break;
            case Constants.TYPE_VIDEO_DATA:
              msg = new VideoData(tag.getBody());
              break;
            case Constants.TYPE_INVOKE:
              msg = new Invoke(tag.getBody());
              break;
            case Constants.TYPE_NOTIFY:
View Full Code Here

          if (videoCodec != null) {
            //check for decoder configuration to send
            IoBuffer config = videoCodec.getDecoderConfiguration();
            if (config != null) {
              log.debug("Decoder configuration is available for {}", videoCodec.getName());
              VideoData videoConf = new VideoData(config.asReadOnlyBuffer());
              try {
                log.debug("Setting decoder configuration for recording");
                listener.getFileConsumer().setVideoDecoderConfiguration(videoConf);
              } finally {
                videoConf.release();
              }
            }
          } else {
            log.debug("Could not initialize stream output, videoCodec is null.");
          }
View Full Code Here

        }
        //drop it
        drop = true;
      } else {
        if (isVideo) {
          VideoData video = (VideoData) message;
          if (video.getFrameType() == FrameType.KEYFRAME) {
            //if its a key frame the inter and disposible checks can be skipped
            log.trace("Resuming stream with key frame; message: {}", message);
            mapping.setKeyFrameNeeded(false);
          } else if (tardiness >= baseTolerance && tardiness < midTolerance) {
            //drop disposable frames
            if (video.getFrameType() == FrameType.DISPOSABLE_INTERFRAME) {
              log.trace("Dropping disposible frame; message: {}", message);
              drop = true;
            }
          } else if (tardiness >= midTolerance && tardiness <= highestTolerance) {
            //drop inter-frames and disposable frames
View Full Code Here

        event = new AudioData(dataReference);
        event.setTimestamp(messageIn.getBody().getTimestamp());
        break;
      case Constants.TYPE_VIDEO_DATA:
        dataReference = ((VideoData) messageIn.getBody()).getData();
        event = new VideoData(dataReference);
        event.setTimestamp(messageIn.getBody().getTimestamp());
        break;
      default:
        dataReference = ((Notify) messageIn.getBody()).getData();
        event = new Notify(dataReference);
View Full Code Here

        lastTimestamp = timestamp;
      }
      // ensure that our first video frame written is a key frame
      if (msg instanceof VideoData) {
        if (!gotVideoKeyFrame) {
          VideoData video = (VideoData) msg;
          if (video.getFrameType() == FrameType.KEYFRAME) {
            log.debug("Got our first keyframe");
            gotVideoKeyFrame = true;
          } else {
            // skip this frame bail out
            log.debug("Skipping video data since keyframe has not been written yet");
View Full Code Here

          break;
        case Constants.TYPE_VIDEO_DATA:
          log.trace("Video data");
          buf = ((VideoData) msg).getData();
          if (buf != null) {
            VideoData videoData = new VideoData(buf.asReadOnlyBuffer());
            videoData.setHeader(header);
            videoData.setTimestamp(header.getTimer());
            log.trace("Source type: {}", ((VideoData) msg).getSourceType());
            videoData.setSourceType(((VideoData) msg).getSourceType());
            video.write(videoData);
          } else {
            log.warn("Video data was not found");
          }
          break;
View Full Code Here

              event = new AudioData(buffer);
              event.setTimestamp(cachedEvent.getTimestamp());
              message = RTMPMessage.build(event);
              break;
            case Constants.TYPE_VIDEO_DATA:
              event = new VideoData(buffer);
              event.setTimestamp(cachedEvent.getTimestamp());
              message = RTMPMessage.build(event);
              break;
            default:
              event = new Notify(buffer);
View Full Code Here

TOP

Related Classes of org.red5.server.net.rtmp.event.VideoData

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.