Package org.springframework.data.hadoop.store

Examples of org.springframework.data.hadoop.store.StoreException


  public void setDatasetDefinitions(Collection<DatasetDefinition> datasetDefinitions) {
    for (DatasetDefinition def : datasetDefinitions) {
      if (def.getTargetClass() != null) {
        this.datasetDefinitions.put(getDatasetName(def.getTargetClass()), def);
      } else {
        throw new StoreException("Target class is required for dataset definitions, invalid definition: " + def);
      }
    }
  }
View Full Code Here


  @Override
  public <T> Collection<T> read(Class<T> targetClass) {
    DatasetDescriptor descriptor = getDatasetDescriptor(targetClass);
    if (descriptor == null) {
      throw new StoreException("Unable to locate dataset for target class " + targetClass.getName());
    }
    if (Formats.PARQUET.equals(descriptor.getFormat())) {
      return readGenericRecords(targetClass, null);
    } else {
      return readPojo(targetClass, null);
View Full Code Here

  @Override
  public <T> Collection<T> read(Class<T> targetClass, ViewCallback viewCallback) {
    DatasetDescriptor descriptor = getDatasetDescriptor(targetClass);
    if (descriptor == null) {
      throw new StoreException("Unable to locate dataset for target class " + targetClass.getName());
    }
    if (Formats.PARQUET.equals(descriptor.getFormat())) {
      return readGenericRecords(targetClass, viewCallback);
    } else {
      return readPojo(targetClass, viewCallback);
View Full Code Here

  }

  private <T> void readWithCallback(Class<T> targetClass, RecordCallback<T> callback, ViewCallback viewCallback) {
    Dataset<T> dataset = DatasetUtils.getDataset(dsFactory, targetClass);
    if (dataset == null) {
      throw new StoreException("Unable to locate dataset for target class " + targetClass.getName());
    }
    DatasetReader<T> reader = null;
    if (viewCallback == null) {
      reader = dataset.newReader();
    } else {
View Full Code Here

  }

  private <T> Collection<T> readPojo(Class<T> targetClass, ViewCallback viewCallback) {
    Dataset<T> dataset = DatasetUtils.getDataset(dsFactory, targetClass);
    if (dataset == null) {
      throw new StoreException("Unable to locate dataset for target class " + targetClass.getName());
    }
    DatasetReader<T> reader = null;
    if (viewCallback == null) {
      reader = dataset.newReader();
    } else {
View Full Code Here

            }
          }
          results.add(data);
        }
      } catch (InstantiationException e) {
        throw new StoreException("Unable to read records for class: " + targetClass.getName(), e);
      } catch (IllegalAccessException e) {
        throw new StoreException("Unable to read records for class: " + targetClass.getName(), e);
      } finally {
        reader.close();
      }
    }
    return results;
View Full Code Here

      for (T rec : records) {
        writer.write(rec);
      }
      writer.flush();
    } catch (IOException e) {
      throw new StoreException("Error writing " + pojoClass.getName(), e);
    } finally {
      try {
        writer.close();
      } catch (IOException ignore) {}
    }
View Full Code Here

      }

    } while (++openAttempt < maxOpenAttempts);

    if (wout == null) {
      throw new StoreException("We've reached maxOpenAttempts=" + maxOpenAttempts
          + " to find suitable output path. Last path tried was path=[" + p + "]");
    }

    log.info("Creating output for path " + p);
    holder.setPath(p);
View Full Code Here

        if (Formats.AVRO.getName().equals(datasetDefinition.getFormat().getName())) {
          Dataset<T> dataset =
              DatasetUtils.getOrCreateDataset(datasetRepositoryFactory, datasetDefinition, entityClass, entityClass);
          writer = dataset.newWriter();
        } else {
          throw new StoreException("Invalid format " + datasetDefinition.getFormat() +
              " specified, you must use 'avro' with " + this.getClass().getSimpleName() + ".");
        }
      }
    }
    writer.write(entity);
View Full Code Here

    Path inputPath = getPath();
    StreamsHolder<InputStream> holder = new StreamsHolder<InputStream>();
    final FileSystem fs = inputPath.getFileSystem(getConfiguration());
    Path p = inputPath.isAbsolute() ? inputPath : new Path(getPath(), inputPath);
    if (!fs.exists(p)) {
      throw new StoreException("Path " + p + " does not exist");
    }
    if (!isCompressed()) {
      if (getSplit() == null) {
        // no codec, no split
        InputStream input = fs.open(p);
View Full Code Here

TOP

Related Classes of org.springframework.data.hadoop.store.StoreException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.