Package org.kitesdk.data.spi

Examples of org.kitesdk.data.spi.DatasetRepository


    command.datasets = Lists.newArrayList(source, dest);

    int rc = command.run();
    Assert.assertEquals("Should return success", 0, rc);

    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:" + repoUri);
    FileSystemDataset<GenericData.Record> ds =
        (FileSystemDataset<GenericData.Record>) repo.<GenericData.Record>
            load("default", dest);
    int size = DatasetTestUtilities.datasetSize(ds);
    Assert.assertEquals("Should contain copied records", 6, size);

    Assert.assertEquals("Should produce 3 files",
View Full Code Here


        .build(), Object.class);

    int rc = command.run();
    Assert.assertEquals("Should return success", 0, rc);

    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:" + repoUri);
    FileSystemDataset<GenericData.Record> ds =
        (FileSystemDataset<GenericData.Record>) repo.<GenericData.Record>
            load("default", "dest_partitioned");
    int size = DatasetTestUtilities.datasetSize(ds);
    Assert.assertEquals("Should contain copied records", 6, size);

    Assert.assertEquals("Should produce 2 partitions",
View Full Code Here

    }
  }

  @Test
  public void testExternal() {
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:hive:/tmp/data");
    repo.delete("ns", "test");
    repo.create("ns", "test", descriptor);

    Dataset<GenericRecord> ds = Datasets.load("dataset:hive:/tmp/data/ns/test");

    Assert.assertNotNull("Should load dataset", ds);
    Assert.assertTrue(ds instanceof FileSystemDataset);
    Assert.assertEquals("Locations should match",
        URI.create("hdfs://" + hdfsAuth + "/tmp/data/ns/test"),
        ds.getDescriptor().getLocation());
    Assert.assertEquals("Descriptors should match",
        repo.load("ns", "test").getDescriptor(), ds.getDescriptor());

    repo.delete("ns", "test");
  }
View Full Code Here

    repo.delete("ns", "test");
  }

  @Test
  public void testExternalRoot() {
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:hive:/");
    repo.delete("ns", "test");
    repo.create("ns", "test", descriptor);

    Dataset<GenericRecord> ds = Datasets.load("dataset:hive:/ns/test");

    Assert.assertNotNull("Should load dataset", ds);
    Assert.assertTrue(ds instanceof FileSystemDataset);
    Assert.assertEquals("Locations should match",
        URI.create("hdfs://" + hdfsAuth + "/ns/test"),
        ds.getDescriptor().getLocation());
    Assert.assertEquals("Descriptors should match",
        repo.load("ns", "test").getDescriptor(), ds.getDescriptor());

    repo.delete("ns", "test");
  }
View Full Code Here

    repo.delete("ns", "test");
  }

  @Test
  public void testExternalRelative() {
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:hive:data");
    repo.delete("ns", "test");
    repo.create("ns", "test", descriptor);

    Dataset<GenericRecord> ds = Datasets.load("dataset:hive:data/ns/test");

    Assert.assertNotNull("Should load dataset", ds);
    Assert.assertTrue(ds instanceof FileSystemDataset);
    Path cwd = getDFS().makeQualified(new Path("."));
    Assert.assertEquals("Locations should match",
        new Path(cwd, "data/ns/test").toUri(), ds.getDescriptor().getLocation());
    Assert.assertEquals("Descriptors should match",
        repo.load("ns", "test").getDescriptor(), ds.getDescriptor());

    repo.delete("ns", "test");
  }
View Full Code Here

    repo.delete("ns", "test");
  }

  @Test
  public void testManaged() {
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:hive");
    repo.delete("ns", "test");
    repo.create("ns", "test", descriptor);

    Dataset<GenericRecord> ds = Datasets.load("dataset:hive?dataset=test&namespace=ns");

    Assert.assertNotNull("Should load dataset", ds);
    Assert.assertTrue(ds instanceof FileSystemDataset);
    Assert.assertEquals("Descriptors should match",
        repo.load("ns", "test").getDescriptor(), ds.getDescriptor());

    repo.delete("ns", "test");
  }
View Full Code Here

    Assert.assertEquals("string", columns.get(10).getType());
  }

  @Test
  public void testManagedDatasetCreation() {
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:hive");
    repo.delete("ns", "records"); // ensure it does not already exist
    repo.create("ns", "records", descriptor);
    repo.delete("ns", "records"); // clean up
  }
View Full Code Here

    repo.delete("ns", "records"); // clean up
  }

  @Test
  public void testExternalDatasetCreation() {
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:hive:target/");
    repo.delete("ns", "records"); // ensure it does not already exist
    repo.create("ns", "records", descriptor);
    repo.delete("ns", "records"); // clean up
  }
View Full Code Here

    super(console);
  }

  @Override
  public int run() throws IOException {
    DatasetRepository repo = getDatasetRepository();

    if (datasets == null || datasets.isEmpty()) {
      throw new IllegalArgumentException("No dataset names were specified.");
    }

    for (String datasetUriOrName : datasets) {
      if (isDataUri(datasets.get(0))) {
        Datasets.delete(datasetUriOrName);
      } else {
        repo.delete(namespace, datasetUriOrName);
      }
      console.debug("Deleted dataset {}", datasetUriOrName);
    }

    return 0;
View Full Code Here

    } else {
      LOG.warn(
          "kite.datasetName is deprecated, instead use kite.uri=<dataset-uri>");
      Preconditions.checkArgument(datasetName != null,
          "kite.datasetName is required if kite.uri is not used");
      DatasetRepository repo = getDatasetRepository();
      repo.delete(datasetNamespace, datasetName);
    }
  }
View Full Code Here

TOP

Related Classes of org.kitesdk.data.spi.DatasetRepository

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.