Package io.druid.timeline

Examples of io.druid.timeline.DataSegment


    DruidServer server = serverInventoryView.getInventoryValue(serverName);
    if (server == null) {
      return Response.status(Response.Status.NOT_FOUND).build();
    }

    DataSegment segment = server.getSegment(segmentId);
    if (segment == null) {
      return Response.status(Response.Status.NOT_FOUND).build();
    }

    return Response.status(Response.Status.OK).entity(segment).build();
View Full Code Here


          new Function<TimelineObjectHolder<String, DataSegment>, StorageAdapter>()
          {
            @Override
            public StorageAdapter apply(TimelineObjectHolder<String, DataSegment> input)
            {
              final DataSegment segment = input.getObject().getChunk(0).getObject();
              final File file = Preconditions.checkNotNull(
                  segmentFileMap.get(segment),
                  "File for segment %s", segment.getIdentifier()
              );

              try {
                return new QueryableIndexStorageAdapter((IndexIO.loadIndex(file)));
              }
View Full Code Here

    try {
      if (fromServer.getMetadata().equals(toServer.getMetadata())) {
        throw new IAE("Cannot move [%s] to and from the same server [%s]", segmentName, fromServer.getName());
      }

      final DataSegment segment = fromServer.getSegment(segmentName);
      if (segment == null) {
        throw new IAE("Unable to find segment [%s] on server [%s]", segmentName, fromServer.getName());
      }

      final LoadQueuePeon loadPeon = loadManagementPeons.get(toServer.getName());
      if (loadPeon == null) {
        throw new IAE("LoadQueuePeon hasn't been created yet for path [%s]", toServer.getName());
      }

      final LoadQueuePeon dropPeon = loadManagementPeons.get(fromServer.getName());
      if (dropPeon == null) {
        throw new IAE("LoadQueuePeon hasn't been created yet for path [%s]", fromServer.getName());
      }

      final ServerHolder toHolder = new ServerHolder(toServer, loadPeon);
      if (toHolder.getAvailableSize() < segment.getSize()) {
        throw new IAE(
            "Not enough capacity on server [%s] for segment [%s]. Required: %,d, available: %,d.",
            toServer.getName(),
            segment,
            segment.getSize(),
            toHolder.getAvailableSize()
        );
      }

      final String toLoadQueueSegPath = ZKPaths.makePath(
View Full Code Here

    DruidServer server = serverInventoryView.getInventoryValue(serverName);
    if (server == null) {
      return Response.status(Response.Status.NOT_FOUND).build();
    }

    DataSegment segment = server.getSegment(segmentId);
    if (segment == null) {
      return Response.status(Response.Status.NOT_FOUND).build();
    }

    return Response.status(Response.Status.OK).entity(segment).build();
View Full Code Here

    throttler = new ReplicationThrottler(2, 1);
    for (String tier : Arrays.asList("hot", DruidServer.DEFAULT_TIER)) {
      throttler.updateReplicationState(tier);
      throttler.updateTerminationState(tier);
    }
    segment = new DataSegment(
        "foo",
        new Interval("0/3000"),
        new DateTime().toString(),
        Maps.<String, Object>newHashMap(),
        Lists.<String>newArrayList(),
View Full Code Here

        );
      } else {
        throw new ISE("Unknown file system[%s]", outputFS.getClass());
      }

      DataSegment segment = new DataSegment(
          config.getDataSource(),
          interval,
          config.getSchema().getTuningConfig().getVersion(),
          loadSpec,
          dimensionNames,
View Full Code Here

              }

              log.info("Pushing %s.", toPush);
              s3Client.putObject(outputBucket, toPush);

              final DataSegment outSegment = inSegment.withSize(indexSize)
                                                      .withLoadSpec(
                                                          ImmutableMap.<String, Object>of(
                                                              "type",
                                                              "s3_zip",
                                                              "bucket",
View Full Code Here

    VersionConverterTask task = VersionConverterTask.create(dataSource, interval);

    Task task2 = jsonMapper.readValue(jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(task), Task.class);
    Assert.assertEquals(task, task2);

    DataSegment segment = new DataSegment(
        dataSource,
        interval,
        new DateTime().toString(),
        ImmutableMap.<String, Object>of(),
        ImmutableList.<String>of(),
View Full Code Here

    testTask = new TestMergeTask(
        "task1",
        "dummyDs",
        Lists.<DataSegment>newArrayList(
            new DataSegment(
                "dummyDs",
                new Interval("2012-01-01/2012-01-02"),
                new DateTime().toString(),
                null,
                null,
View Full Code Here

TOP

Related Classes of io.druid.timeline.DataSegment

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.