Package com.hp.hpl.jena.query

Examples of com.hp.hpl.jena.query.Dataset


    {
        String graphName = "http://example/" ;
        Triple triple = SSE.parseTriple("(<x> <y> <z>)") ;
        Node gn = com.hp.hpl.jena.graph.NodeFactory.createURI(graphName) ;

        Dataset ds = graphLocation.getDataset() ;
        // ?? See TupleLib.
        ds.asDatasetGraph().deleteAny(gn, null, null, null) ;
       
        Graph g2 = ds.asDatasetGraph().getGraph(gn) ;
       
//        if ( true )
//        {
//            PrintStream ps = System.err ;
//            ps.println("Dataset names: ") ;
//            Iter.print(ps, ds.listNames()) ;
//        }
       
        // Graphs only exists if they have a triple in them
        assertFalse(ds.containsNamedModel(graphName)) ;
       
        List<String> names = Iter.toList(ds.listNames()) ;
        assertEquals(0, names.size()) ;
        assertEquals(0, ds.asDatasetGraph().size()) ;
    }
View Full Code Here


   
    @Test public void dataset5()
    {
        String graphName = "http://example/" ;
        Triple triple = SSE.parseTriple("(<x> <y> <z>)") ;
        Dataset ds = graphLocation.getDataset() ;
        Graph g2 = ds.asDatasetGraph().getGraph(com.hp.hpl.jena.graph.NodeFactory.createURI(graphName)) ;
        // Graphs only exists if they have a triple in them
        g2.add(triple) ;
       
        assertTrue(ds.containsNamedModel(graphName)) ;
        List<String> x = Iter.toList(ds.listNames()) ;
        List<String> y = Arrays.asList(graphName) ;
        assertEquals(x,y) ;
       
        assertEquals(1, ds.asDatasetGraph().size()) ;
    }
View Full Code Here

   
    private void createTest(String filename, Resource type)
    {
        Object thing = AssemblerUtils.build(filename, type) ;
        assertTrue(thing instanceof Dataset) ;
        Dataset ds = (Dataset)thing ;
        assertTrue(ds.asDatasetGraph() instanceof DatasetGraphTransaction) ;
        assertTrue(ds.supportsTransactions()) ;
        ds.close();
       
    }
View Full Code Here

            assertTrue(dsgTxn2.contains(q)) ;
        assertTrue(dsgTxn2.contains(q1)) ;
        dsgTxn2.end() ;

        // Check API methods work.
        Dataset ds = TDBFactory.createDataset(loc) ;
        ds.begin(ReadWrite.READ) ;
        Model m = (q.isDefaultGraph() ? ds.getDefaultModel() : ds.getNamedModel("g")) ;
        assertEquals( nonTxnData ? 2 : 1 , m.size()) ;
        ds.end() ;
    }
View Full Code Here

  /** Create a memory Dataset and read in some data
     * @see #read(Dataset,String)
     */
    public static Dataset loadDataset(String uri)
  {
        Dataset ds = createDataset() ;
        read(ds, uri) ;
        return ds ;
    }
View Full Code Here

  /** Create a memory Dataset and read in some data
     * @see #read(Dataset,String,Lang)
     */
    public static Dataset loadDataset(String uri, Lang lang)
  {
        Dataset ds = createDataset() ;
        read(ds, uri, lang) ;
        return ds ;
  }
View Full Code Here

  private void test(String query, String[] expected, int expectedCount) {
    //Parse the query
    Query q = QueryFactory.create(query);
   
    //Render the dataset appropriately, ARQ doesn't do this for us automatically
    Dataset ds = DynamicDatasets.dynamicDataset(q.getDatasetDescription(), this.ds, false);
   
    //Then execute the query
    QueryExecution exec = QueryExecutionFactory.create(q, ds);
   
    ResultSet results = exec.execSelect();
View Full Code Here

    @Test public void writer03() { test("writer-rt-23.trig") ; }
   
    private void test(String filename)
    {
        String displayname = filename.substring(0, filename.lastIndexOf('.')) ;
        Dataset ds = readDataset(filename) ;
        Lang lang = format.getLang() ;

        WriterDatasetRIOT rs = RDFWriterRegistry.getWriterDatasetFactory(format).create(format) ;
        assertEquals(lang, rs.getLang()) ;

        ByteArrayOutputStream out = new ByteArrayOutputStream() ;
        RDFDataMgr.write(out, ds, format) ;
       
        if ( lang == Lang.RDFNULL )
            return ;
       
        ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()) ;
        String s = StrUtils.fromUTF8bytes(out.toByteArray()) ;
        Dataset ds2 = DatasetFactory.createMem() ;
        try {
            RDFDataMgr.read(ds2, in, lang) ;
        } catch (RiotException ex)
        {
            System.out.println(displayname+" : "+format) ;
View Full Code Here

{
    static String DIR = "testing/RIOT/Writer" ;
   
    static Dataset readDataset(String filename) {
        String fn = DIR + "/" + filename ;
        Dataset ds = RDFDataMgr.loadDataset(fn) ;
        return ds ;
    }
View Full Code Here

      int i = 0;

      for (i = 4; i < data.length; i++) {

        Dataset ds = DatasetStore.create(store);
        Model model = ds.getDefaultModel();

        model.notifyEvent(GraphEvents.startRead);
        try {
          // [elementName, BEGIN_PROPERTIES, propName, BEGIN_VALUES,
          // val1, val2, END_PROPERTY_VALUES, BEGIN_PROPERTIES,
          // propName, BEGIN_VALUES,
          // val1, val2, END_PROPERTY_VALUES]
          // I add connection tuple Input and Output properties
          UUIDFactory factory = new UUID_V4_Gen();
          Resource subject = model
              .createResource(AdmireRegistryConstants.PlatformOntologyURI
                  + factory.generate().asUUID());

          i++;
          RDFNode objectST;
          Statement stmPropValue;
          String property = "";
          subject.addProperty(model
              .getProperty(AdmireRegistryConstants.dcterms
                  + "title"), elementName);
          if (peType.equals("Element")) {
            objectST = model
                .createResource(AdmireRegistryConstants.PlatformOntologyURI
                    + "Element");
            stmPropValue = model.createStatement(subject, model
                .getProperty(AdmireRegistryConstants.RDFType),
                objectST);
          }
          while (!data[i].equals("END_PROPERTY_LIST")) {
            while (!data[i].equals("BEGIN_PROPERTY")) {
              i++;
            }
            i++;
            property = AdmireRegistryConstants.PlatformOntologyURI
                + data[i];
            while (!data[i].equals("BEGIN_PROPERTY_VALUES")) {
              i++;
            }
            i++;

            while (!data[i].equals("END_PROPERTY_VALUES")) {
              objectST = model
                  .createResource(AdmireRegistryConstants.PlatformOntologyURI
                      + data[i]);
              stmPropValue = model.createStatement(subject, model
                  .createProperty(property), objectST);
              model.add(stmPropValue);
              if (peType.equals("Function")) {
                (model
                    .createResource(AdmireRegistryConstants.PlatformOntologyURI
                        + data[i]))
                    .addProperty(
                        model
                            .getProperty(AdmireRegistryConstants.PlatformOntologyURI
                                + "ofType"),
                        model
                            .createResource(AdmireRegistryConstants.PlatformOntologyURI
                                + data[i + 1]));
                i++;
                model
                    .add(model
                        .createLiteralStatement(
                            (Resource) objectST,
                            model
                                .createProperty(AdmireRegistryConstants.PlatformOntologyURI
                                    + "hasIndex"),
                            data[i + 1]));
                i++;
              }
              i++;
            }
            i++;
          }
          // RDFNode objectST =
          // model.createResource(AdmireRegistryConstants.PlatformOntologyURI
          // + data[i]);
          i++;
          property = AdmireRegistryConstants.PlatformOntologyURI
              + "hasRepositoryLocation";
          stmPropValue = model.createStatement(subject, model
              .createProperty(property), data[i]);
          model.add(stmPropValue);
          property = AdmireRegistryConstants.PlatformOntologyURI
              + "hasStatus";
          stmPropValue = model.createStatement(subject, model
              .createProperty(property), "active");
          model.add(stmPropValue);
          subject
              .addProperty(
                  model
                      .getProperty(AdmireRegistryConstants.RDFType),
                  model
                      .createResource(AdmireRegistryConstants.PlatformOntologyURI
                          + peType));
          i++;
        } finally {
          model.notifyEvent(GraphEvents.finishRead);
          model.close();
          store.close();
        }
        break;
      }
    } else if (data[0].equals("DELETE_ELEMENT")) {
      String elementName = data[1];
      Dataset ds = DatasetStore.create(store);
      Model model = ds.getDefaultModel();

      model.notifyEvent(GraphEvents.startRead);
      try {
        Resource subject = model
            .getResource(AdmireRegistryConstants.PlatformOntologyURI
View Full Code Here

TOP

Related Classes of com.hp.hpl.jena.query.Dataset

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.