Package org.springframework.data.hadoop.store.output

Examples of org.springframework.data.hadoop.store.output.TextFileWriter


  }

  @Test
  public void testWriteReadManyLinesWithNamingAndRollover() throws IOException {

    TextFileWriter writer = new TextFileWriter(getConfiguration(), testDefaultPath, null);
    writer.setFileNamingStrategy(new RollingFileNamingStrategy());
    writer.setRolloverStrategy(new SizeRolloverStrategy(40));
    writer.setIdleTimeout(10000);

    TestUtils.writeData(writer, DATA09ARRAY);

    TextFileReader reader1 = new TextFileReader(getConfiguration(), new Path(testDefaultPath, "0"), null);
    List<String> splitData1 = TestUtils.readData(reader1);
View Full Code Here


  }

  @Test
  public void testWriteReadManyLinesWithNamingAndRolloverWithGzip() throws IOException {

    TextFileWriter writer = new TextFileWriter(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());
    writer.setFileNamingStrategy(new RollingFileNamingStrategy());
    writer.setRolloverStrategy(new SizeRolloverStrategy(40));

    // codec is buffering so we need to write some amount of
    // data before anything is actually written into a file/stream
    // writing same data over and over again is compressing a lot
    for (int i = 0; i<45000; i++) {
View Full Code Here

  @Test
  public void testContinueStrategies() throws IOException, InterruptedException {
    String[] dataArray = new String[] { DATA10 };

    TextFileWriter writer = new TextFileWriter(getConfiguration(), testDefaultPath, null);

    ChainedFileNamingStrategy fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new RollingFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);
    writer.setInWritingSuffix(".tmp");
    writer.afterPropertiesSet();
    TestUtils.writeData(writer, dataArray);

    Thread.sleep(1000);

    writer = new TextFileWriter(getConfiguration(), testDefaultPath, null);
    fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new RollingFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);
    writer.setInWritingSuffix(".tmp");
    writer.afterPropertiesSet();
    TestUtils.writeData(writer, dataArray);

    Thread.sleep(1000);

    writer = new TextFileWriter(getConfiguration(), testDefaultPath, null);
    fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new RollingFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);
    writer.setInWritingSuffix(".tmp");
    writer.afterPropertiesSet();
    TestUtils.writeData(writer, dataArray);

    TextFileReader reader1 = new TextFileReader(getConfiguration(), new Path(testDefaultPath, "data-0"), null);
    List<String> splitData1 = TestUtils.readData(reader1);
View Full Code Here

  @Test
  public void testContinueStrategiesWithCodec() throws IOException, InterruptedException {
    String[] dataArray = new String[] { DATA10 };

    TextFileWriter writer = new TextFileWriter(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());

    ChainedFileNamingStrategy fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new RollingFileNamingStrategy());
    fileNamingStrategy.register(new CodecFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);
    writer.setInWritingSuffix(".tmp");
    writer.afterPropertiesSet();
    TestUtils.writeData(writer, dataArray);

    Thread.sleep(1000);

    writer = new TextFileWriter(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());
    fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new RollingFileNamingStrategy());
    fileNamingStrategy.register(new CodecFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);
    writer.setInWritingSuffix(".tmp");
    writer.afterPropertiesSet();
    TestUtils.writeData(writer, dataArray);

    Thread.sleep(1000);

    writer = new TextFileWriter(getConfiguration(), testDefaultPath, Codecs.GZIP.getCodecInfo());
    fileNamingStrategy = new ChainedFileNamingStrategy();
    fileNamingStrategy.register(new StaticFileNamingStrategy("data"));
    fileNamingStrategy.register(new RollingFileNamingStrategy());
    fileNamingStrategy.register(new CodecFileNamingStrategy());
    writer.setFileNamingStrategy(fileNamingStrategy);
    writer.setInWritingSuffix(".tmp");
    writer.afterPropertiesSet();
    TestUtils.writeData(writer, dataArray);

    TextFileReader reader1 = new TextFileReader(getConfiguration(), new Path(testDefaultPath, "data-0.gzip"), Codecs.GZIP.getCodecInfo());
    List<String> splitData1 = TestUtils.readData(reader1);
View Full Code Here

    Configuration failConfiguration = new Configuration();
    failConfiguration.set("fs.defaultFS", "hdfs://localhost:12345");
    String[] dataArray = new String[] { DATA10 };

    // use configuration which would not work for hdfs
    TextFileWriter writer = new TextFileWriter(failConfiguration, testDefaultPath, null);
    writer.afterPropertiesSet();
    writer.start();

    // restore configuration after writer lifecycle has been started
    TestUtils.setField("configuration", writer, getConfiguration());
    TestUtils.writeData(writer, dataArray);
View Full Code Here

    Configuration failConfiguration = new Configuration();
    failConfiguration.set("fs.defaultFS", "hdfs://localhost:12345");
    String[] dataArray = new String[] { DATA10 };

    // use configuration which would not work for hdfs
    TextFileWriter writer = new TextFileWriter(failConfiguration, testDefaultPath, null);
    writer.afterPropertiesSet();
    writer.start();

    // test write exception before we switch configuration
    Exception e = null;
    try {
      TestUtils.writeData(writer, dataArray, false);
View Full Code Here

  private ApplicationContext context;

  @Test
  public void testWriteReadManyLines() throws IOException, InterruptedException {

    TextFileWriter writer = context.getBean("writer", TextFileWriter.class);
    assertNotNull(writer);

    TestUtils.writeData(writer, new String[] { DATA10 }, false);
    Thread.sleep(3000);
    TestUtils.writeData(writer, new String[] { DATA11 }, false);
View Full Code Here

        writer.setMaxOpenAttempts(fileOpenAttempts);
      }
      storeWriter = writer;
    }
    else {
      TextFileWriter writer = new TextFileWriter(configuration, basePath, codec);
      writer.setIdleTimeout(idleTimeout);
      writer.setInWritingPrefix(inUsePrefix);
      writer.setInWritingSuffix(inUseSuffix);
      writer.setOverwrite(overwrite);
      writer.setFileNamingStrategy(fileNamingStrategy);
      writer.setRolloverStrategy(rolloverStrategy);
      if (beanFactory != null) {
        writer.setBeanFactory(beanFactory);
      }
      if (fileOpenAttempts > 0) {
        writer.setMaxOpenAttempts(fileOpenAttempts);
      }
      storeWriter = writer;
    }
    if (storeWriter instanceof InitializingBean) {
      ((InitializingBean) storeWriter).afterPropertiesSet();
View Full Code Here

    EventDrivenConsumer adapter = context.getBean("adapter", EventDrivenConsumer.class);
    HdfsStoreMessageHandler handler = (HdfsStoreMessageHandler) new DirectFieldAccessor(adapter).getPropertyValue("handler");
    DirectFieldAccessor handlerAccessor = new DirectFieldAccessor(handler);
    assertEquals(false, handlerAccessor.getPropertyValue("autoStartup"));

    TextFileWriter storeWriter = (TextFileWriter) handlerAccessor.getPropertyValue("storeWriter");
    assertNotNull(storeWriter);

    Configuration configuration = (Configuration) new DirectFieldAccessor(storeWriter).getPropertyValue("configuration");
    assertEquals(context.getBean("hadoopConfiguration"), configuration);
View Full Code Here

TOP

Related Classes of org.springframework.data.hadoop.store.output.TextFileWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.