Package org.kitesdk.data

Examples of org.kitesdk.data.DatasetIOException


        return (T) Double.valueOf(value);
      case STRING:
        try {
          return (T) URLDecoder.decode(value, "UTF-8");
        } catch (UnsupportedEncodingException e) {
          throw new DatasetIOException("Failed to decode value: " + value, e);
        }
      default:
        //  otherwise, the value is Avro binary encoded with base64
        byte[] binary = Base64.decodeBase64(value);
        Decoder decoder = DecoderFactory.get()
            .binaryDecoder(new ByteArrayInputStream(binary), null);
        DatumReader<T> reader = ReflectData.get().createDatumReader(schema);
        try {
          return reader.read(null, decoder);
        } catch (IOException e) {
          throw new DatasetIOException("Cannot decode Avro value", e);
        }
    }
  }
View Full Code Here


      Configuration conf = DefaultConfiguration.get();
      FileSystem fs;
      try {
        fs = FileSystem.get(fileSystemURI(match), conf);
      } catch (IOException ex) {
        throw new DatasetIOException("Could not get a FileSystem", ex);
      }
      return new FileSystemDatasetRepository.Builder()
          .configuration(new Configuration(conf)) // make a modifiable copy
          .rootDirectory(fs.makeQualified(root))
          .build();
View Full Code Here

    FileSystem fs = null;

    try {
      fs = dataPath.getFileSystem(conf);
    } catch (IOException e) {
      throw new DatasetIOException(
          "Cannot get FileSystem for descriptor: " + descriptor, e);
    }

    try {
      if (!fs.exists(dataPath)) {
        fs.mkdirs(dataPath);
      }
    } catch (IOException e) {
      throw new DatasetIOException("Cannot access data location", e);
    }
  }
View Full Code Here

          }
        }
      }
      return deleted;
    } catch (IOException ex) {
      throw new DatasetIOException("Could not cleanly delete path:" + path, ex);
    }
  }
View Full Code Here

    this.conf = conf;
    try {
      this.rootFileSystem = rootDirectory.getFileSystem(conf);
      this.rootDirectory = rootFileSystem.makeQualified(rootDirectory);
    } catch (IOException ex) {
      throw new DatasetIOException("Cannot get FileSystem for root path", ex);
    }
  }
View Full Code Here

    try {
      inputStream = rootFileSystem.open(descriptorPath);
      properties.load(inputStream);
      threw = false;
    } catch (IOException e) {
      throw new DatasetIOException(
          "Unable to load descriptor file:" + descriptorPath +
          " for dataset:" + name, e);
    } finally {
      try {
        Closeables.close(inputStream, threw);
      } catch (IOException e) {
        throw new DatasetIOException("Cannot close", e);
      }
    }

    if (properties.containsKey(FORMAT_FIELD_NAME)) {
      builder.format(Accessor.getDefault().newFormat(
          properties.getProperty(FORMAT_FIELD_NAME)));
    }
    if (properties.containsKey(COMPRESSION_TYPE_FIELD_NAME)) {
      builder.compressionType(properties.getProperty(COMPRESSION_TYPE_FIELD_NAME));
    }
    if (properties.containsKey(PARTITION_EXPRESSION_FIELD_NAME)) {
      builder.partitionStrategy(Accessor.getDefault().fromExpression(properties
          .getProperty(PARTITION_EXPRESSION_FIELD_NAME)));
    }
    Path schemaPath = new Path(metadataPath, SCHEMA_FILE_NAME);
    try {
      builder.schemaUri(rootFileSystem.makeQualified(schemaPath).toUri());
    } catch (IOException e) {
      throw new DatasetIOException(
          "Unable to load schema file:" + schemaPath + " for dataset:" + name, e);
    }

    final Path location;
    if (properties.containsKey(LOCATION_FIELD_NAME)) {
View Full Code Here

            "Descriptor directory already exists: " + metadataLocation);
      }
      // create the directory so that update can do the rest of the work
      rootFileSystem.mkdirs(metadataLocation);
    } catch (IOException e) {
      throw new DatasetIOException(
          "Unable to create metadata directory: " + metadataLocation +
          " for dataset: " + name, e);
    }

    writeDescriptor(rootFileSystem, metadataLocation, name, descriptor);
View Full Code Here

        }
      } else {
        return false;
      }
    } catch (IOException e) {
      throw new DatasetIOException(
          "Unable to find or delete metadata directory:" + metadataDirectory +
          " for dataset:" + name, e);
    }
  }
View Full Code Here

      }
    } catch (FileNotFoundException ex) {
      // the repo hasn't created any files yet
      return namespaces;
    } catch (IOException ex) {
      throw new DatasetIOException("Could not list namespaces", ex);
    }
    return namespaces;
  }
View Full Code Here

      }
    } catch (FileNotFoundException e) {
      // if the root directory doesn't exist, then no namespace directories do
      return datasets;
    } catch (IOException ex) {
      throw new DatasetIOException("Could not list datasets", ex);
    }

    try {
      FileStatus[] entries = rootFileSystem.listStatus(
          new Path(rootDirectory, namespace),
          PathFilters.notHidden());
      for (FileStatus entry : entries) {
        if (entry.isDir() && isDataset(entry.getPath())) {
          // may want to add a check: !RESERVED_NAMES.contains(name)
          datasets.add(entry.getPath().getName());
        }
      }

    } catch (FileNotFoundException ex) {
      // the repo hasn't created any files yet
      return datasets;
    } catch (IOException ex) {
      throw new DatasetIOException("Could not list datasets", ex);
    }
    return datasets;
  }
View Full Code Here

TOP

Related Classes of org.kitesdk.data.DatasetIOException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.