Examples of TimelineEntities


Examples of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities

  public void testDeleteEntitiesPrimaryFilters()
      throws IOException, InterruptedException {
    Map<String, Set<Object>> primaryFilter =
        Collections.singletonMap("user", Collections.singleton(
            (Object) "otheruser"));
    TimelineEntities atsEntities = new TimelineEntities();
    atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
        entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
        null)));
    TimelinePutResponse response = store.put(atsEntities);
    assertEquals(0, response.getErrors().size());
View Full Code Here

Examples of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities

      byte[] first = null;
      if (fromId != null) {
        Long fromIdStartTime = getStartTimeLong(fromId, entityType);
        if (fromIdStartTime == null) {
          // no start time for provided id, so return empty entities
          return new TimelineEntities();
        }
        if (fromIdStartTime <= endtime) {
          // if provided id's start time falls before the end of the window,
          // use it to construct the seek key
          first = kb.add(writeReverseOrderedLong(fromIdStartTime))
              .add(fromId).getBytesForLookup();
        }
      }
      // if seek key wasn't constructed using fromId, construct it using end ts
      if (first == null) {
        first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
      }
      byte[] last = null;
      if (starttime != null) {
        // if start time is not null, set a last key that will not be
        // iterated past
        last = KeyBuilder.newInstance().add(base).add(entityType)
            .add(writeReverseOrderedLong(starttime)).getBytesForLookup();
      }
      if (limit == null) {
        // if limit is not specified, use the default
        limit = DEFAULT_LIMIT;
      }

      TimelineEntities entities = new TimelineEntities();
      iterator = db.iterator();
      iterator.seek(first);
      // iterate until one of the following conditions is met: limit is
      // reached, there are no more keys, the key prefix no longer matches,
      // or a start time has been specified and reached/exceeded
      while (entities.getEntities().size() < limit && iterator.hasNext()) {
        byte[] key = iterator.peekNext().getKey();
        if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
            WritableComparator.compareBytes(key, 0, key.length, last, 0,
                last.length) > 0)) {
          break;
        }
        // read the start time and entity id from the current key
        KeyParser kp = new KeyParser(key, prefix.length);
        Long startTime = kp.getNextLong();
        String entityId = kp.getNextString();

        if (fromTs != null) {
          long insertTime = readReverseOrderedLong(iterator.peekNext()
              .getValue(), 0);
          if (insertTime > fromTs) {
            byte[] firstKey = key;
            while (iterator.hasNext() && prefixMatches(firstKey,
                kp.getOffset(), key)) {
              iterator.next();
              key = iterator.peekNext().getKey();
            }
            continue;
          }
        }

        // parse the entity that owns this key, iterating over all keys for
        // the entity
        TimelineEntity entity = getEntity(entityId, entityType, startTime,
            fields, iterator, key, kp.getOffset());
        // determine if the retrieved entity matches the provided secondary
        // filters, and if so add it to the list of entities to return
        boolean filterPassed = true;
        if (secondaryFilters != null) {
          for (NameValuePair filter : secondaryFilters) {
            Object v = entity.getOtherInfo().get(filter.getName());
            if (v == null) {
              Set<Object> vs = entity.getPrimaryFilters()
                  .get(filter.getName());
              if (vs != null && !vs.contains(filter.getValue())) {
                filterPassed = false;
                break;
              }
            } else if (!v.equals(filter.getValue())) {
              filterPassed = false;
              break;
            }
          }
        }
        if (filterPassed) {
          entities.addEntity(entity);
        }
      }
      return entities;
    } finally {
      IOUtils.cleanup(LOG, iterator);
View Full Code Here

Examples of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities

    Iterator<TimelineEntity> entityIterator = null;
    if (fromId != null) {
      TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId,
          entityType));
      if (firstEntity == null) {
        return new TimelineEntities();
      } else {
        entityIterator = new TreeSet<TimelineEntity>(entities.values())
            .tailSet(firstEntity, true).iterator();
      }
    }
    if (entityIterator == null) {
      entityIterator = new PriorityQueue<TimelineEntity>(entities.values())
          .iterator();
    }

    List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
    while (entityIterator.hasNext()) {
      TimelineEntity entity = entityIterator.next();
      if (entitiesSelected.size() >= limit) {
        break;
      }
      if (!entity.getEntityType().equals(entityType)) {
        continue;
      }
      if (entity.getStartTime() <= windowStart) {
        continue;
      }
      if (entity.getStartTime() > windowEnd) {
        continue;
      }
      if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(
          entity.getEntityId(), entity.getEntityType())) > fromTs) {
        continue;
      }
      if (primaryFilter != null &&
          !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
        continue;
      }
      if (secondaryFilters != null) { // AND logic
        boolean flag = true;
        for (NameValuePair secondaryFilter : secondaryFilters) {
          if (secondaryFilter != null && !matchPrimaryFilter(
              entity.getPrimaryFilters(), secondaryFilter) &&
              !matchFilter(entity.getOtherInfo(), secondaryFilter)) {
            flag = false;
            break;
          }
        }
        if (!flag) {
          continue;
        }
      }
      entitiesSelected.add(entity);
    }
    List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
    for (TimelineEntity entitySelected : entitiesSelected) {
      entitiesToReturn.add(maskFields(entitySelected, fields));
    }
    Collections.sort(entitiesToReturn);
    TimelineEntities entitiesWrapper = new TimelineEntities();
    entitiesWrapper.setEntities(entitiesToReturn);
    return entitiesWrapper;
  }
View Full Code Here

Examples of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities

      @QueryParam("fromId") String fromId,
      @QueryParam("fromTs") String fromTs,
      @QueryParam("limit") String limit,
      @QueryParam("fields") String fields) {
    init(res);
    TimelineEntities entities = null;
    try {
      entities = store.getEntities(
          parseStr(entityType),
          parseLongStr(limit),
          parseLongStr(windowStart),
          parseLongStr(windowEnd),
          parseStr(fromId),
          parseLongStr(fromTs),
          parsePairStr(primaryFilter, ":"),
          parsePairsStr(secondaryFilter, ",", ":"),
          parseFieldsStr(fields, ","));
    } catch (NumberFormatException e) {
      throw new BadRequestException(
          "windowStart, windowEnd or limit is not a numeric value.");
    } catch (IllegalArgumentException e) {
      throw new BadRequestException("requested invalid field.");
    } catch (IOException e) {
      LOG.error("Error getting entities", e);
      throw new WebApplicationException(e,
          Response.Status.INTERNAL_SERVER_ERROR);
    }
    if (entities == null) {
      return new TimelineEntities();
    }
    return entities;
  }
View Full Code Here

Examples of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities

    Assert.assertTrue(verified);
    t.join();
    LOG.info("Client run completed. Result=" + result);
    Assert.assertTrue(result.get());
   
    TimelineEntities entitiesAttempts = yarnCluster
        .getApplicationHistoryServer()
        .getTimelineStore()
        .getEntities(ApplicationMaster.DSEntity.DS_APP_ATTEMPT.toString(),
            null, null, null, null, null, null, null, null);
    Assert.assertNotNull(entitiesAttempts);
    Assert.assertEquals(1, entitiesAttempts.getEntities().size());
    Assert.assertEquals(2, entitiesAttempts.getEntities().get(0).getEvents()
        .size());
    Assert.assertEquals(entitiesAttempts.getEntities().get(0).getEntityType()
        .toString(), ApplicationMaster.DSEntity.DS_APP_ATTEMPT.toString());
    TimelineEntities entities = yarnCluster
        .getApplicationHistoryServer()
        .getTimelineStore()
        .getEntities(ApplicationMaster.DSEntity.DS_CONTAINER.toString(), null,
            null, null, null, null, null, null, null);
    Assert.assertNotNull(entities);
    Assert.assertEquals(2, entities.getEntities().size());
    Assert.assertEquals(entities.getEntities().get(0).getEntityType()
        .toString(), ApplicationMaster.DSEntity.DS_CONTAINER.toString());
  }
View Full Code Here

Examples of org.apache.hadoop.yarn.api.records.timeline.TimelineEntities

  private static final Log LOG =
      LogFactory.getLog(TestTimelineRecords.class);

  @Test
  public void testEntities() throws Exception {
    TimelineEntities entities = new TimelineEntities();
    for (int j = 0; j < 2; ++j) {
      TimelineEntity entity = new TimelineEntity();
      entity.setEntityId("entity id " + j);
      entity.setEntityType("entity type " + j);
      entity.setStartTime(System.currentTimeMillis());
      for (int i = 0; i < 2; ++i) {
        TimelineEvent event = new TimelineEvent();
        event.setTimestamp(System.currentTimeMillis());
        event.setEventType("event type " + i);
        event.addEventInfo("key1", "val1");
        event.addEventInfo("key2", "val2");
        entity.addEvent(event);
      }
      entity.addRelatedEntity("test ref type 1", "test ref id 1");
      entity.addRelatedEntity("test ref type 2", "test ref id 2");
      entity.addPrimaryFilter("pkey1", "pval1");
      entity.addPrimaryFilter("pkey2", "pval2");
      entity.addOtherInfo("okey1", "oval1");
      entity.addOtherInfo("okey2", "oval2");
      entities.addEntity(entity);
    }
    LOG.info("Entities in JSON:");
    LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(entities, true));

    Assert.assertEquals(2, entities.getEntities().size());
    TimelineEntity entity1 = entities.getEntities().get(0);
    Assert.assertEquals("entity id 0", entity1.getEntityId());
    Assert.assertEquals("entity type 0", entity1.getEntityType());
    Assert.assertEquals(2, entity1.getRelatedEntities().size());
    Assert.assertEquals(2, entity1.getEvents().size());
    Assert.assertEquals(2, entity1.getPrimaryFilters().size());
    Assert.assertEquals(2, entity1.getOtherInfo().size());
    TimelineEntity entity2 = entities.getEntities().get(1);
    Assert.assertEquals("entity id 1", entity2.getEntityId());
    Assert.assertEquals("entity type 1", entity2.getEntityType());
    Assert.assertEquals(2, entity2.getRelatedEntities().size());
    Assert.assertEquals(2, entity2.getEvents().size());
    Assert.assertEquals(2, entity2.getPrimaryFilters().size());
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.