Package org.apache.lucene.store

Examples of org.apache.lucene.store.OutputStream


        Directory dest = getDirectory();
        String[] files = dir.list();
        for (int i = 0; i < files.length; i++) {
            InputStream in = dir.openFile(files[i]);
            try {
                OutputStream out = dest.createFile(files[i]);
                try {
                    long remaining = in.length();
                    while (remaining > 0) {
                        int num = (int) Math.min(remaining, buffer.length);
                        in.readBytes(buffer, 0, num);
                        out.writeBytes(buffer, num);
                        remaining -= num;
                    }
                } finally {
                    out.close();
                }
            } finally {
                in.close();
            }
        }
View Full Code Here


    quickSort(postings, left + 1, hi);
  }

  private final void writePostings(Posting[] postings, String segment)
    throws IOException {
    OutputStream freq = null, prox = null;
    TermInfosWriter tis = null;

    try {
      freq = directory.createFile(segment + ".frq");
      prox = directory.createFile(segment + ".prx");
      tis = new TermInfosWriter(directory, segment, fieldInfos);
      TermInfo ti = new TermInfo();

      for (int i = 0; i < postings.length; i++) {
        Posting posting = postings[i];

        // add an entry to the dictionary with pointers to prox and freq files
        ti.set(1, freq.getFilePointer(), prox.getFilePointer());
        tis.add(posting.term, ti);

        // add an entry to the freq file
        int f = posting.freq;
        if (f == 1)          // optimize freq=1
          freq.writeVInt(1);        // set low bit of doc num.
        else {
          freq.writeVInt(0);        // the document number
          freq.writeVInt(f);        // frequency in doc
        }

        int lastPosition = 0;        // write positions
        int[] positions = posting.positions;
        for (int j = 0; j < f; j++) {      // use delta-encoding
          int position = positions[j];
          prox.writeVInt(position - lastPosition);
          lastPosition = position;
        }
      }
    } finally {
      if (freq != null) freq.close();
      if (prox != null) prox.close();
      if (tis != null) tis.close();
    }
  }
View Full Code Here

      Field field = (Field) fields.nextElement();
      if (field.isIndexed()) {
        int n = fieldInfos.fieldNumber(field.name());
        float norm =
          fieldBoosts[n] * similarity.lengthNorm(field.name(),fieldLengths[n]);
        OutputStream norms = directory.createFile(segment + ".f" + n);
        try {
          norms.writeByte(similarity.encodeNorm(norm));
        } finally {
          norms.close();
        }
      }
    }
  }
View Full Code Here

  /** Writes this vector to the file <code>name</code> in Directory
    <code>d</code>, in a format that can be read by the constructor {@link
    #BitVector(Directory, String)}.  */
  public final void write(Directory d, String name) throws IOException {
    OutputStream output = d.createFile(name);
    try {
      output.writeInt(size());        // write size
      output.writeInt(count());        // write count
      output.writeBytes(bits, bits.length);    // write bits
    } finally {
      output.close();
    }
  }
View Full Code Here

  final int size() {
    return byNumber.size();
  }

  final void write(Directory d, String name) throws IOException {
    OutputStream output = d.createFile(name);
    try {
      write(output);
    } finally {
      output.close();
    }
  }
View Full Code Here

    /** Creates a file of the specified size with random data. */
    private void createRandomFile(Directory dir, String name, int size)
    throws IOException
    {
        OutputStream os = dir.createFile(name);
        for (int i=0; i<size; i++) {
            byte b = (byte) (Math.random() * 256);
            os.writeByte(b);
        }
        os.close();
    }
View Full Code Here

                                    String name,
                                    byte start,
                                    int size)
    throws IOException
    {
        OutputStream os = dir.createFile(name);
        for (int i=0; i < size; i++) {
            os.writeByte(start);
            start ++;
        }
        os.close();
    }
View Full Code Here

    private void demo_FSInputStreamBug(FSDirectory fsdir, String file)
    throws IOException
    {
        // Setup the test file - we need more than 1024 bytes
        OutputStream os = fsdir.createFile(file);
        for(int i=0; i<2000; i++) {
            os.writeByte((byte) i);
        }
        os.close();

        InputStream in = fsdir.openFile(file);

        // This read primes the buffer in InputStream
        byte b = in.readByte();
View Full Code Here

                "No entries to merge have been defined");

        merged = true;

        // open the compound stream
        OutputStream os = null;
        try {
            os = directory.createFile(fileName);

            // Write the number of entries
            os.writeVInt(entries.size());

            // Write the directory with all offsets at 0.
            // Remember the positions of directory entries so that we can
            // adjust the offsets later
            Iterator it = entries.iterator();
            while(it.hasNext()) {
                FileEntry fe = (FileEntry) it.next();
                fe.directoryOffset = os.getFilePointer();
                os.writeLong(0);    // for now
                os.writeString(fe.file);
            }

            // Open the files and copy their data into the stream.
            // Remeber the locations of each file's data section.
            byte buffer[] = new byte[1024];
            it = entries.iterator();
            while(it.hasNext()) {
                FileEntry fe = (FileEntry) it.next();
                fe.dataOffset = os.getFilePointer();
                copyFile(fe, os, buffer);
            }

            // Write the data offsets into the directory of the compound stream
            it = entries.iterator();
            while(it.hasNext()) {
                FileEntry fe = (FileEntry) it.next();
                os.seek(fe.directoryOffset);
                os.writeLong(fe.dataOffset);
            }

            // Close the output stream. Set the os to null before trying to
            // close so that if an exception occurs during the close, the
            // finally clause below will not attempt to close the stream
            // the second time.
            OutputStream tmp = os;
            os = null;
            tmp.close();

        } finally {
            if (os != null) try { os.close(); } catch (IOException e) { }
        }
    }
View Full Code Here

  }
  private final void mergeNorms() throws IOException {
    for (int i = 0; i < fieldInfos.size(); i++) {
      FieldInfo fi = fieldInfos.fieldInfo(i);
      if (fi.isIndexed) {
  OutputStream output = directory.createFile(segment + ".f" + i);
  try {
    for (int j = 0; j < readers.size(); j++) {
      IndexReader reader = (IndexReader)readers.elementAt(j);
      byte[] input = reader.norms(fi.name);
            int maxDoc = reader.maxDoc();
            for (int k = 0; k < maxDoc; k++) {
              byte norm = input != null ? input[k] : (byte)0;
              if (!reader.isDeleted(k)) {
                output.writeByte(norm);
              }
      }
    }
  } finally {
    output.close();
  }
      }
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.lucene.store.OutputStream

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.