Package org.apache.lucene.store

Examples of org.apache.lucene.store.IndexOutput


        String fileNameIn = SegmentInfos.getCurrentSegmentFileName(dir);
        String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
                                                                   "",
                                                                   1+gen);
        IndexInput in = dir.openInput(fileNameIn);
        IndexOutput out = dir.createOutput(fileNameOut);
        long length = in.length();
        for(int i=0;i<length-1;i++) {
          out.writeByte(in.readByte());
        }
        in.close();
        out.close();
        dir.deleteFile(fileNameIn);

        IndexReader reader = null;
        try {
          reader = IndexReader.open(dir);
View Full Code Here


    long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
    assertTrue("segment generation should be > 1 but got " + gen, gen > 1);

    final String segmentsFileName = SegmentInfos.getCurrentSegmentFileName(dir);
    IndexInput in = dir.openInput(segmentsFileName);
    IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen));
    out.copyBytes(in, in.length()-1);
    byte b = in.readByte();
    out.writeByte((byte) (1+b));
    out.close();
    in.close();

    IndexReader reader = null;
    try {
      reader = IndexReader.open(dir);
View Full Code Here

    File indexDir = new File(System.getProperty("tempDir"), "otherfiles");
    Directory dir = FSDirectory.open(indexDir);
    try {
      // Create my own random file:

      IndexOutput out = dir.createOutput("myrandomfile");
      out.writeByte((byte) 42);
      out.close();

      new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED).close();

      assertTrue(dir.fileExists("myrandomfile"));
View Full Code Here

    return df;
  }

  private void mergeNorms() throws IOException {
    byte[] normBuffer = null;
    IndexOutput output = null;
    try {
      int numFieldInfos = fieldInfos.size();
      for (int i = 0; i < numFieldInfos; i++) {
        FieldInfo fi = fieldInfos.fieldInfo(i);
        if (fi.isIndexed && !fi.omitNorms) {
          if (output == null) {
            output = directory.createOutput(segment + "." + IndexFileNames.NORMS_EXTENSION);
            output.writeBytes(NORMS_HEADER,NORMS_HEADER.length);
          }
          for (Iterator iter = readers.iterator(); iter.hasNext();) {
            IndexReader reader = (IndexReader) iter.next();
            int maxDoc = reader.maxDoc();
            if (normBuffer == null || normBuffer.length < maxDoc) {
              // the buffer is too small for the current segment
              normBuffer = new byte[maxDoc];
            }
            reader.norms(fi.name, normBuffer, 0);
            if (!reader.hasDeletions()) {
              //optimized case for segments without deleted docs
              output.writeBytes(normBuffer, maxDoc);
            } else {
              // this segment has deleted docs, so we have to
              // check for every doc if it is deleted or not
              for (int k = 0; k < maxDoc; k++) {
                if (!reader.isDeleted(k)) {
                  output.writeByte(normBuffer[k]);
                }
              }
            }
            checkAbort.work(maxDoc);
          }
        }
      }
    } finally {
      if (output != null) {
        output.close();
      }
    }
  }
View Full Code Here

  @Override
  public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {

    final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION);
    final IndexOutput out = state.directory.createOutput(fileName, state.context);
   
    return new FieldsConsumer() {
      @Override
      public TermsConsumer addField(FieldInfo field) {
        //System.out.println("\naddField field=" + field.name);
        return new TermsWriter(out, field, doPackFST, acceptableOverheadRatio);
      }

      @Override
      public void close() throws IOException {
        // EOF marker:
        try {
          out.writeVInt(0);
        } finally {
          out.close();
        }
      }
    };
  }
View Full Code Here

              infoStream.message(INFO_STREAM_COMPONENT, "doUpdate(): detected client was closed); abort file copy");
            }
            return;
          }
          InputStream in = null;
          IndexOutput out = null;
          try {
            in = replicator.obtainFile(session.id, source, file.fileName);
            out = dir.createOutput(file.fileName, IOContext.DEFAULT);
            copyBytes(out, in);
            cpFiles.add(file.fileName);
View Full Code Here

    termSuffixes = new GrowableByteArrayDataOutput(ArrayUtil.oversize(chunkSize, 1));
    payloadBytes = new GrowableByteArrayDataOutput(ArrayUtil.oversize(1, 1));
    lastTerm = new BytesRef(ArrayUtil.oversize(30, 1));

    boolean success = false;
    IndexOutput indexStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_INDEX_EXTENSION), context);
    try {
      vectorsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION), context);

      final String codecNameIdx = formatName + CODEC_SFX_IDX;
      final String codecNameDat = formatName + CODEC_SFX_DAT;
      CodecUtil.writeHeader(indexStream, codecNameIdx, VERSION_CURRENT);
      CodecUtil.writeHeader(vectorsStream, codecNameDat, VERSION_CURRENT);
      assert CodecUtil.headerLength(codecNameDat) == vectorsStream.getFilePointer();
      assert CodecUtil.headerLength(codecNameIdx) == indexStream.getFilePointer();

      indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
      indexStream = null;

      vectorsStream.writeVInt(PackedInts.VERSION_CURRENT);
View Full Code Here

  }
 
  @Override
  public void write(Directory directory, String segmentName, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
    final String fileName = IndexFileNames.segmentFileName(segmentName, segmentSuffix, Lucene46FieldInfosFormat.EXTENSION);
    IndexOutput output = directory.createOutput(fileName, context);
    boolean success = false;
    try {
      CodecUtil.writeHeader(output, Lucene46FieldInfosFormat.CODEC_NAME, Lucene46FieldInfosFormat.FORMAT_CURRENT);
      output.writeVInt(infos.size());
      for (FieldInfo fi : infos) {
        IndexOptions indexOptions = fi.getIndexOptions();
        byte bits = 0x0;
        if (fi.hasVectors()) bits |= Lucene46FieldInfosFormat.STORE_TERMVECTOR;
        if (fi.omitsNorms()) bits |= Lucene46FieldInfosFormat.OMIT_NORMS;
        if (fi.hasPayloads()) bits |= Lucene46FieldInfosFormat.STORE_PAYLOADS;
        if (fi.isIndexed()) {
          bits |= Lucene46FieldInfosFormat.IS_INDEXED;
          assert indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.hasPayloads();
          if (indexOptions == IndexOptions.DOCS_ONLY) {
            bits |= Lucene46FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS;
          } else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) {
            bits |= Lucene46FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS;
          } else if (indexOptions == IndexOptions.DOCS_AND_FREQS) {
            bits |= Lucene46FieldInfosFormat.OMIT_POSITIONS;
          }
        }
        output.writeString(fi.name);
        output.writeVInt(fi.number);
        output.writeByte(bits);

        // pack the DV types in one byte
        final byte dv = docValuesByte(fi.getDocValuesType());
        final byte nrm = docValuesByte(fi.getNormType());
        assert (dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0;
        byte val = (byte) (0xff & ((nrm << 4) | dv));
        output.writeByte(val);
        output.writeLong(fi.getDocValuesGen());
        output.writeStringStringMap(fi.attributes());
      }
      success = true;
    } finally {
      if (success) {
        output.close();
      } else {
        IOUtils.closeWhileHandlingException(output);
      }
    }
  }
View Full Code Here

    this.numStoredFields = new int[16];
    this.endOffsets = new int[16];
    this.numBufferedDocs = 0;

    boolean success = false;
    IndexOutput indexStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION), context);
    try {
      fieldsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);

      final String codecNameIdx = formatName + CODEC_SFX_IDX;
      final String codecNameDat = formatName + CODEC_SFX_DAT;
      CodecUtil.writeHeader(indexStream, codecNameIdx, VERSION_CURRENT);
      CodecUtil.writeHeader(fieldsStream, codecNameDat, VERSION_CURRENT);
      assert CodecUtil.headerLength(codecNameDat) == fieldsStream.getFilePointer();
      assert CodecUtil.headerLength(codecNameIdx) == indexStream.getFilePointer();

      indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
      indexStream = null;

      fieldsStream.writeVInt(chunkSize);
View Full Code Here

        final int bufferSize = random().nextBoolean()
            ? _TestUtil.nextInt(random(), 0, 48)
            : _TestUtil.nextInt(random(), 0, 4096);
        final Directory d = newDirectory();
       
        IndexOutput out = d.createOutput("out.bin", newIOContext(random()));
        final float acceptableOverhead;
        if (iter == 0) {
          // have the first iteration go through exact nbits
          acceptableOverhead = 0.0f;
        } else {
          acceptableOverhead = random().nextFloat();
        }
        PackedInts.Writer w = PackedInts.getWriter(out, valueCount, nbits, acceptableOverhead);
        final long startFp = out.getFilePointer();

        final int actualValueCount = random().nextBoolean() ? valueCount : _TestUtil.nextInt(random(), 0, valueCount);
        final long[] values = new long[valueCount];
        for(int i=0;i<actualValueCount;i++) {
          if (nbits == 64) {
            values[i] = random().nextLong();
          } else {
            values[i] = _TestUtil.nextLong(random(), 0, maxValue);
          }
          w.add(values[i]);
        }
        w.finish();
        final long fp = out.getFilePointer();
        out.close();

        // ensure that finish() added the (valueCount-actualValueCount) missing values
        final long bytes = w.getFormat().byteCount(PackedInts.VERSION_CURRENT, valueCount, w.bitsPerValue);
        assertEquals(bytes, fp - startFp);
View Full Code Here

TOP

Related Classes of org.apache.lucene.store.IndexOutput

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.