Package org.semanticweb.yars.nx.parser

Examples of org.semanticweb.yars.nx.parser.NxParser


    if (file.endsWith(".gz")) {
      is = new GZIPInputStream(new BufferedInputStream(new FileInputStream(new File(file))));
    } else {
      is = new BufferedInputStream(new FileInputStream(new File(file)));
    }
    np = new NxParser(is);
    it = np.iterator();
  }
View Full Code Here


 
  // Using the parser NxParser for NQUADS
  public static int parseRDFData(final InputStream in, final TripleConsumer tc,
      final String encoding) throws UnsupportedEncodingException {
    // ignore encoding as it is not supported by the NxParser!
    final NxParser nxp = new NxParser(in);
    int number = 0;
    while (nxp.hasNext()) {
      final Node[] ns = nxp.next();
      number++;
      if (number % 1000000 == 0){
        System.err.println("#triples:" + number);
      }
      try {
View Full Code Here

        size = 0;
        int numeric = 0;
        try
        {
            FileInputStream is = new FileInputStream(fnGold);
            NxParser nxp = new NxParser(is); // https://code.google.com/p/nxparser/
            while (nxp.hasNext())
            {
                org.semanticweb.yars.nx.Node[] ns = nxp.next();
                if (ns.length == 3)
                {
                    String subj = ns[0].toString();
                    String prop = ns[1].toString();
                    String obj = ns[2].toString();
View Full Code Here

                        // --> Sieve output for FF i generated

                        // Collect fusion function values:
                        FileInputStream is = new FileInputStream(path+output);
                        NxParser nxp = new NxParser(is); // https://code.google.com/p/nxparser/
                        while (nxp.hasNext())
                        {
                            org.semanticweb.yars.nx.Node[] ns = nxp.next();
                            if (ns.length == 4)
                            {
                                String subj = ns[0].toString();
                                String obj = ns[2].toString();
                                String pr = WikiLangPrefix(ns[3].toString());
View Full Code Here

    public static Set<String> getConfusableSurfaceForms(String targetType, File instancesFile, LuceneCandidateSearcher surrogateSearcher) throws IOException, ParseException {
        System.err.println("Getting all surface forms for "+targetType+"s...");
        Set<String> surfaceForms = new HashSet<String>();
        if (!targetType.startsWith(SpotlightConfiguration.DEFAULT_ONTOLOGY_PREFIX))
            targetType = SpotlightConfiguration.DEFAULT_ONTOLOGY_PREFIX+ targetType;
        NxParser parser = new NxParser(new FileInputStream(instancesFile));
        while (parser.hasNext()) {
            Node[] triple = parser.next();
            if (triple[2].toString().equals(targetType)) {
                String targetUri = triple[0].toString().replace(SpotlightConfiguration.DEFAULT_NAMESPACE, "");
                try {
                    Set<SurfaceForm> surfaceFormsForURI = surrogateSearcher.getSurfaceForms(new DBpediaResource(targetUri));
                    for (SurfaceForm sf : surfaceFormsForURI) {
View Full Code Here

     * Index surrogates mapping from a triples file.
     */
    public void addFromNTfile(File surfaceFormsDataSet) throws IOException, IndexException {
        LOG.info("Indexing candidate map from "+surfaceFormsDataSet.getName()+" to "+mLucene.directory()+"...");

        NxParser nxParser = new NxParser(new FileInputStream(surfaceFormsDataSet), false);
        while (nxParser.hasNext()) {
            Node[] nodes = nxParser.next();
            String resourceString = nodes[0].toString().replace(SpotlightConfiguration.DEFAULT_NAMESPACE,"");
            String surfaceFormString = nodes[2].toString();
            List<SurfaceForm> surfaceForms = AddSurfaceFormsToIndex.fromTitlesToAlternativesJ(new SurfaceForm(surfaceFormString));
            add(surfaceForms, new DBpediaResource(resourceString));
        }
View Full Code Here

TOP

Related Classes of org.semanticweb.yars.nx.parser.NxParser

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.