Package cascading.tuple

Examples of cascading.tuple.TupleEntryIterator


    flow.complete();

    validateLength( flow, 5, null );

    TupleEntryIterator iterator = flow.openSink();

    assertEquals( "not equal: tuple.get(1)", "1 a", iterator.next().getObject( 1 ) );

    iterator.close();

    // confirm the tuple iterator can handle nulls from the source
    validateLength( flow.openSource(), 5 );
    }
View Full Code Here


    Hfs sourceExists = new Hfs( new TextLine( new Fields( "offset", "line" ) ), InputData.inputPath + "*" );

    assertTrue( sourceExists.resourceExists( getPlatform().getFlowProcess() ) );

    TupleEntryIterator iterator = sourceExists.openForRead( getPlatform().getFlowProcess() );
    assertTrue( iterator.hasNext() );
    iterator.close();

    try
      {
      Hfs sourceNotExists = new Hfs( new TextLine( new Fields( "offset", "line" ) ), InputData.inputPath + "/blah/" );
      iterator = sourceNotExists.openForRead( getPlatform().getFlowProcess() );
View Full Code Here

    Hfs sourceExists = new Hfs( new TextLine( new Fields( "offset", "line" ) ), InputData.inputPath + "{*}" );

    assertTrue( sourceExists.resourceExists( getPlatform().getFlowProcess() ) );

    TupleEntryIterator iterator = sourceExists.openForRead( getPlatform().getFlowProcess() );
    assertTrue( iterator.hasNext() );
    iterator.close();

    try
      {
      Hfs sourceNotExists = new Hfs( new TextLine( new Fields( "offset", "line" ) ), InputData.inputPath + "/blah/" );
      iterator = sourceNotExists.openForRead( getPlatform().getFlowProcess() );
View Full Code Here

    flow.complete();

    validateLength( flow, results.length, 5 );

    // validate input parsing compares to expected, and results compare to expected
    TupleEntryIterator iterator = flow.openSource();

    int count = 0;
    while( iterator.hasNext() )
      {
      Tuple tuple = iterator.next().getTuple();
      assertEquals( tuples[ count++ ], tuple );
      }

    iterator = flow.openSink();

    count = 0;
    while( iterator.hasNext() )
      {
      Tuple tuple = iterator.next().getTuple();
      assertEquals( tuples[ count++ ], tuple );
      }
    }
View Full Code Here

    Flow flow = getPlatform().getFlowConnector().connect( input, output, pipe );

    flow.complete();

    Fields fields = new Fields( "first", "second", "third", "fourth", "fifth" );
    TupleEntryIterator iterator = flow.openTapForRead( getPlatform().getDelimitedFile( fields, true, true, ",", "\"", null, output1.getIdentifier(), SinkMode.REPLACE ) );

    validateLength( iterator, 13, 5 );

    assertHeaders( output1, flow );
    assertHeaders( output2, flow );
View Full Code Here

    assertHeaders( output2, flow );
    }

  private void assertHeaders( Tap output, Flow flow ) throws IOException
    {
    TupleEntryIterator iterator = flow.openTapForRead( getPlatform().getTextFile( new Fields( "line" ), output.getIdentifier() ) );

    assertEquals( iterator.next().getObject( 0 ), "first,second,third,fourth,fifth" );

    iterator.close();
    }
View Full Code Here

    Map<Object, Object> properties = new HashMap<Object, Object>(){{
      put("io.serializations", new JobConf().get("io.serializations") + "," + ProtobufSerialization.class.getName());
    }};
    new HadoopFlowConnector(properties).connect(t, sink, groupByPipe).complete();

    TupleEntryIterator tei = sink.openForRead(new HadoopFlowProcess(new JobConf()));
    Set<Tuple> tuples = new HashSet<Tuple>();
    while (tei.hasNext()) {
      tuples.add(tei.next().getTupleCopy());
    }

    assertEquals(expectedTuples, tuples);
  }
View Full Code Here

      put("io.serializations",
          new JobConf().get("io.serializations") + "," + ProtobufSerialization.class.getName());
    }};
    new HadoopFlowConnector(properties).connect(t, sink, groupByPipe).complete();

    TupleEntryIterator tei = sink.openForRead(new HadoopFlowProcess(new JobConf()));
    Set<Tuple> tuples = new HashSet<Tuple>();
    while (tei.hasNext()) {
      tuples.add(tei.next().getTupleCopy());
    }

    assertEquals(expectedTuples, tuples);
  }
View Full Code Here

    }
    tec.close();

    // read results back out
    Tap outputTap = new Hfs(new ProtobufScheme("value", Example.Person.class), "/tmp/input");
    TupleEntryIterator iter = outputTap.openForRead(new HadoopFlowProcess(), null);
    List<Tuple> tuples = new ArrayList<Tuple>();
    while (iter.hasNext()) {
      tuples.add(iter.next().getTupleCopy());
    }

    assertEquals(new HashSet<Tuple>(expected), new HashSet<Tuple>(tuples));
  }
View Full Code Here

    Pipe p = new Each(inPipe, new Fields("value"), new ExpandProto(Example.Person.class), new Fields("id", "name", "email", "position"));

    Hfs sink = new Hfs(new TextLine(), "/tmp/output");
    new HadoopFlowConnector().connect(inTap, sink, p).complete();

    TupleEntryIterator iter = sink.openForRead(new HadoopFlowProcess());
    List<Tuple> results = new ArrayList<Tuple>();
    while (iter.hasNext()) {
      results.add(iter.next().getTupleCopy());
    }
    assertEquals(2, results.size());

    assertEquals(new Tuple(0, 1, "bryan", "bryan@mail.com", Example.Person.Position.CEO.getNumber()).toString(), results.get(0).toString());
    assertEquals(new Tuple(25, 2, "lucas", null, null).toString(), results.get(1).toString());
View Full Code Here

TOP

Related Classes of cascading.tuple.TupleEntryIterator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.