Package org.springframework.data.hadoop.store

Examples of org.springframework.data.hadoop.store.StoreException


  public void setFormat(String format) {
    Assert.notNull(format, "The format can't be null");
    try {
      this.format = Formats.fromString(format);
    } catch (IllegalArgumentException e) {
      throw new StoreException("Invalid format '" + format + "' specified", e);
    }
  }
View Full Code Here


          Dataset<GenericRecord> dataset =
              DatasetUtils.getOrCreateDataset(datasetRepositoryFactory, datasetDefinition, entityClass, GenericRecord.class);
          writer = dataset.newWriter();
          schema = dataset.getDescriptor().getSchema();
        } else {
          throw new StoreException("Invalid format " + datasetDefinition.getFormat() +
              " specified, you must use 'parquet' with " + this.getClass().getSimpleName() + ".");
        }
      }
    }
    GenericRecordBuilder builder = new GenericRecordBuilder(schema);
    BeanWrapper beanWrapper = PropertyAccessorFactory.forBeanPropertyAccess(entity);
    for (Schema.Field f : schema.getFields()) {
      if (beanWrapper.isReadableProperty(f.name())) {
        Schema fieldSchema = f.schema();
        if (f.schema().getType().equals(Schema.Type.UNION)) {
          for (Schema s : f.schema().getTypes()) {
            if (!s.getName().equals("null")) {
              fieldSchema = s;
            }
          }
        }
        if (fieldSchema.getType().equals(Schema.Type.RECORD)) {
          throw new StoreException("Nested record currently not supported for field: " + f.name() +
              " of type: " + beanWrapper.getPropertyDescriptor(f.name()).getPropertyType().getName());
        } else {
          if (fieldSchema.getType().equals(Schema.Type.BYTES)) {
            ByteBuffer buffer = null;
            Object value = beanWrapper.getPropertyValue(f.name());
            if (value == null || value instanceof byte[]) {
              if(value != null) {
                byte[] bytes = (byte[]) value;
                buffer = ByteBuffer.wrap(bytes);
              }
              builder.set(f.name(), buffer);
            } else {
              throw new StoreException("Don't know how to handle " + value.getClass() + " for " + fieldSchema);
            }
          } else {
            builder.set(f.name(), beanWrapper.getPropertyValue(f.name()));
            }
        }
      }
    }
    try {
      writer.write(builder.build());
    } catch (ClassCastException cce) {
      throw new StoreException("Failed to write record with schema: " +
          schema, cce);
    }
  }
View Full Code Here

  public OutputContext getOutputContext() {
    if (!internalInitDone) {
      try {
        initOutputContext();
      } catch (Exception e) {
        throw new StoreException("Store output context not yet initialized", e);
      }
    }
    return outputContext;
  }
View Full Code Here

      p = getPath();
    }

    // check for file without inuse prefix/suffix
    if (isFileWriteable(p)) {
      throw new StoreException("Path [" + p + "] exists and overwritten not allowed");
    }

    String name = (StringUtils.hasText(prefix) ? prefix : "") + p.getName()
        + (StringUtils.hasText(suffix) ? suffix : "");

    p = new Path(p.getParent(), name);
    // check for file with inuse prefix/suffix
    if (isFileWriteable(p)) {
      throw new StoreException("Path [" + p + "] exists and overwritten not allowed");
    }
    return p;
  }
View Full Code Here

      boolean succeed;
      try {
        fs.delete(toPath, false);
        succeed = fs.rename(path, toPath);
      } catch (Exception e) {
        throw new StoreException("Failed renaming from " + path + " to " + toPath, e);
      }
      if (!succeed) {
        throw new StoreException("Failed renaming from " + path + " to " + toPath + " because hdfs returned false");
      }
    }
    catch (IOException e) {
      log.error("Error renaming file", e);
      throw new StoreException("Error renaming file", e);
    }
  }
View Full Code Here

        System.out.println("Processing " + fileDir + " ...");
    File f = new File(fileDir);
        try {
            processFile(f);
        } catch (IOException e) {
            throw new StoreException("Error writing FileInfo", e);
        } finally {
            close();
        }
        countFileInfoEntries();
        System.out.println("Done!");
View Full Code Here

  private void close() {
        try {
            writer.close();
        } catch (IOException e) {
            throw new StoreException("Error closing FileInfo", e);
        }
    }
View Full Code Here

        Expression e = parser.parseExpression(expr);
        psb = e.getValue(ctx, PartitionStrategy.Builder.class);
      }
      catch (SpelParseException spe) {
        if (!expr.trim().endsWith(")")) {
          throw new StoreException("Invalid partitioning expression '" + expr
              + "' -  did you forget the closing parenthesis?", spe);
        }
        else {
          throw new StoreException("Invalid partitioning expression '" + expr + "'!", spe);
        }
      }
      catch (SpelEvaluationException see) {
        throw new StoreException("Invalid partitioning expression '" + expr + "' - failed evaluation!", see);
      }
      catch (NullPointerException npe) {
        throw new StoreException("Invalid partitioning expression '" + expr + "' - was evaluated to null!", npe);
      }
    }
    return psb.build();
  }
View Full Code Here

TOP

Related Classes of org.springframework.data.hadoop.store.StoreException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.