Package org.apache.lucene.store

Examples of org.apache.lucene.store.ChecksumIndexInput


          // contents.  Then we take the larger of the two
          // gens.  This way, if either approach is hitting
          // a stale cache (NFS) we have a better chance of
          // getting the right generation.
          long genB = -1;
          ChecksumIndexInput genInput = null;
          try {
            genInput = directory.openChecksumInput(IndexFileNames.SEGMENTS_GEN, IOContext.READONCE);
          } catch (IOException e) {
            if (infoStream != null) {
              message("segments.gen open: IOException " + e);
            }
          }
 
          if (genInput != null) {
            try {
              int version = genInput.readInt();
              if (version == FORMAT_SEGMENTS_GEN_47 || version == FORMAT_SEGMENTS_GEN_CHECKSUM) {
                long gen0 = genInput.readLong();
                long gen1 = genInput.readLong();
                if (infoStream != null) {
                  message("fallback check: " + gen0 + "; " + gen1);
                }
                if (version == FORMAT_SEGMENTS_GEN_CHECKSUM) {
                  CodecUtil.checkFooter(genInput);
                } else {
                  CodecUtil.checkEOF(genInput);
                }
                if (gen0 == gen1) {
                  // The file is consistent.
                  genB = gen0;
                }
              } else {
                throw new IndexFormatTooNewException(genInput, version, FORMAT_SEGMENTS_GEN_START, FORMAT_SEGMENTS_GEN_CURRENT);
              }
            } catch (IOException err2) {
              // rethrow any format exception
              if (err2 instanceof CorruptIndexException) throw err2;
            } finally {
              genInput.close();
            }
          }

          if (infoStream != null) {
            message(IndexFileNames.SEGMENTS_GEN + " check: genB=" + genB);
View Full Code Here


        }
      } else {
        final CompressingStoredFieldsIndexReader index = matchingVectorsReader.getIndex();
        final IndexInput vectorsStreamOrig = matchingVectorsReader.getVectorsStream();
        vectorsStreamOrig.seek(0);
        final ChecksumIndexInput vectorsStream = new BufferedChecksumIndexInput(vectorsStreamOrig.clone());
       
        for (int i = nextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; ) {
          // We make sure to move the checksum input in any case, otherwise the final
          // integrity check might need to read the whole file a second time
          final long startPointer = index.getStartPointer(i);
          if (startPointer > vectorsStream.getFilePointer()) {
            vectorsStream.seek(startPointer);
          }
          if (pendingDocs.isEmpty()
              && (i == 0 || index.getStartPointer(i - 1) < startPointer)) { // start of a chunk
            final int docBase = vectorsStream.readVInt();
            final int chunkDocs = vectorsStream.readVInt();
            assert docBase + chunkDocs <= matchingSegmentReader.maxDoc();
            if (docBase + chunkDocs < matchingSegmentReader.maxDoc()
                && nextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs) {
              final long chunkEnd = index.getStartPointer(docBase + chunkDocs);
              final long chunkLength = chunkEnd - vectorsStream.getFilePointer();
              indexWriter.writeIndex(chunkDocs, this.vectorsStream.getFilePointer());
              this.vectorsStream.writeVInt(docCount);
              this.vectorsStream.writeVInt(chunkDocs);
              this.vectorsStream.copyBytes(vectorsStream, chunkLength);
              docCount += chunkDocs;
              this.numDocs += chunkDocs;
              mergeState.checkAbort.work(300 * chunkDocs);
              i = nextLiveDoc(docBase + chunkDocs, liveDocs, maxDoc);
            } else {
              for (; i < docBase + chunkDocs; i = nextLiveDoc(i + 1, liveDocs, maxDoc)) {
                final Fields vectors = reader.getTermVectors(i);
                addAllDocVectors(vectors, mergeState);
                ++docCount;
                mergeState.checkAbort.work(300);
              }
            }
          } else {
            final Fields vectors = reader.getTermVectors(i);
            addAllDocVectors(vectors, mergeState);
            ++docCount;
            mergeState.checkAbort.work(300);
            i = nextLiveDoc(i + 1, liveDocs, maxDoc);
          }
        }
       
        vectorsStream.seek(vectorsStream.length() - CodecUtil.footerLength());
        CodecUtil.checkFooter(vectorsStream);
      }
    }
    finish(mergeState.fieldInfos, docCount);
    return docCount;
View Full Code Here

  @Override
  public SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException {
    BytesRef scratch = new BytesRef();
    String segFileName = IndexFileNames.segmentFileName(segmentName, "", SimpleTextSegmentInfoFormat.SI_EXTENSION);
    ChecksumIndexInput input = directory.openChecksumInput(segFileName, context);
    boolean success = false;
    try {
      SimpleTextUtil.readLine(input, scratch);
      assert StringHelper.startsWith(scratch, SI_VERSION);
      final String version = readString(SI_VERSION.length, scratch);
   
      SimpleTextUtil.readLine(input, scratch);
      assert StringHelper.startsWith(scratch, SI_DOCCOUNT);
      final int docCount = Integer.parseInt(readString(SI_DOCCOUNT.length, scratch));
   
      SimpleTextUtil.readLine(input, scratch);
      assert StringHelper.startsWith(scratch, SI_USECOMPOUND);
      final boolean isCompoundFile = Boolean.parseBoolean(readString(SI_USECOMPOUND.length, scratch));
   
      SimpleTextUtil.readLine(input, scratch);
      assert StringHelper.startsWith(scratch, SI_NUM_DIAG);
      int numDiag = Integer.parseInt(readString(SI_NUM_DIAG.length, scratch));
      Map<String,String> diagnostics = new HashMap<>();

      for (int i = 0; i < numDiag; i++) {
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, SI_DIAG_KEY);
        String key = readString(SI_DIAG_KEY.length, scratch);
     
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, SI_DIAG_VALUE);
        String value = readString(SI_DIAG_VALUE.length, scratch);
        diagnostics.put(key, value);
      }
     
      SimpleTextUtil.readLine(input, scratch);
      assert StringHelper.startsWith(scratch, SI_NUM_FILES);
      int numFiles = Integer.parseInt(readString(SI_NUM_FILES.length, scratch));
      Set<String> files = new HashSet<>();

      for (int i = 0; i < numFiles; i++) {
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, SI_FILE);
        String fileName = readString(SI_FILE.length, scratch);
        files.add(fileName);
      }
     
      SimpleTextUtil.checkFooter(input);

      SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount,
                                         isCompoundFile, null, diagnostics);
      info.setFiles(files);
      success = true;
      return info;
    } finally {
      if (!success) {
        IOUtils.closeWhileHandlingException(input);
      } else {
        input.close();
      }
    }
  }
View Full Code Here

  @Override
  public void checkIntegrity() throws IOException {
    BytesRef scratch = new BytesRef();
    IndexInput clone = data.clone();
    clone.seek(0);
    ChecksumIndexInput input = new BufferedChecksumIndexInput(clone);
    while(true) {
      SimpleTextUtil.readLine(input, scratch);
      if (scratch.equals(END)) {
        SimpleTextUtil.checkFooter(input);
        break;
View Full Code Here

public class SimpleTextFieldInfosReader extends FieldInfosReader {

  @Override
  public FieldInfos read(Directory directory, String segmentName, String segmentSuffix, IOContext iocontext) throws IOException {
    final String fileName = IndexFileNames.segmentFileName(segmentName, segmentSuffix, FIELD_INFOS_EXTENSION);
    ChecksumIndexInput input = directory.openChecksumInput(fileName, iocontext);
    BytesRef scratch = new BytesRef();
   
    boolean success = false;
    try {
     
      SimpleTextUtil.readLine(input, scratch);
      assert StringHelper.startsWith(scratch, NUMFIELDS);
      final int size = Integer.parseInt(readString(NUMFIELDS.length, scratch));
      FieldInfo infos[] = new FieldInfo[size];

      for (int i = 0; i < size; i++) {
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, NAME);
        String name = readString(NAME.length, scratch);
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, NUMBER);
        int fieldNumber = Integer.parseInt(readString(NUMBER.length, scratch));

        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, ISINDEXED);
        boolean isIndexed = Boolean.parseBoolean(readString(ISINDEXED.length, scratch));
       
        final IndexOptions indexOptions;
        if (isIndexed) {
          SimpleTextUtil.readLine(input, scratch);
          assert StringHelper.startsWith(scratch, INDEXOPTIONS);
          indexOptions = IndexOptions.valueOf(readString(INDEXOPTIONS.length, scratch));         
        } else {
          indexOptions = null;
        }
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, STORETV);
        boolean storeTermVector = Boolean.parseBoolean(readString(STORETV.length, scratch));
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, PAYLOADS);
        boolean storePayloads = Boolean.parseBoolean(readString(PAYLOADS.length, scratch));
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, NORMS);
        boolean omitNorms = !Boolean.parseBoolean(readString(NORMS.length, scratch));
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, NORMS_TYPE);
        String nrmType = readString(NORMS_TYPE.length, scratch);
        final DocValuesType normsType = docValuesType(nrmType);
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, DOCVALUES);
        String dvType = readString(DOCVALUES.length, scratch);
        final DocValuesType docValuesType = docValuesType(dvType);
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, DOCVALUES_GEN);
        final long dvGen = Long.parseLong(readString(DOCVALUES_GEN.length, scratch));
       
        SimpleTextUtil.readLine(input, scratch);
        assert StringHelper.startsWith(scratch, NUM_ATTS);
        int numAtts = Integer.parseInt(readString(NUM_ATTS.length, scratch));
        Map<String,String> atts = new HashMap<>();

        for (int j = 0; j < numAtts; j++) {
          SimpleTextUtil.readLine(input, scratch);
          assert StringHelper.startsWith(scratch, ATT_KEY);
          String key = readString(ATT_KEY.length, scratch);
       
          SimpleTextUtil.readLine(input, scratch);
          assert StringHelper.startsWith(scratch, ATT_VALUE);
          String value = readString(ATT_VALUE.length, scratch);
          atts.put(key, value);
        }

        infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
          omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(atts));
        infos[i].setDocValuesGen(dvGen);
      }

      SimpleTextUtil.checkFooter(input);
     
      FieldInfos fieldInfos = new FieldInfos(infos);
      success = true;
      return fieldInfos;
    } finally {
      if (success) {
        input.close();
      } else {
        IOUtils.closeWhileHandlingException(input);
      }
    }
  }
View Full Code Here

    assert info.hasDeletions();
    BytesRef scratch = new BytesRef();
    CharsRef scratchUTF16 = new CharsRef();
   
    String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getDelGen());
    ChecksumIndexInput in = null;
    boolean success = false;
    try {
      in = dir.openChecksumInput(fileName, context);
     
      SimpleTextUtil.readLine(in, scratch);
View Full Code Here

 
  // we don't actually write a .fdx-like index, instead we read the
  // stored fields file in entirety up-front and save the offsets
  // so we can seek to the documents later.
  private void readIndex(int size) throws IOException {
    ChecksumIndexInput input = new BufferedChecksumIndexInput(in);
    offsets = new long[size];
    int upto = 0;
    while (!scratch.equals(END)) {
      SimpleTextUtil.readLine(input, scratch);
      if (StringHelper.startsWith(scratch, DOC)) {
        offsets[upto] = input.getFilePointer();
        upto++;
      }
    }
    SimpleTextUtil.checkFooter(input);
    assert upto == offsets.length;
View Full Code Here

 
  // we don't actually write a .tvx-like index, instead we read the
  // vectors file in entirety up-front and save the offsets
  // so we can seek to the data later.
  private void readIndex(int maxDoc) throws IOException {
    ChecksumIndexInput input = new BufferedChecksumIndexInput(in);
    offsets = new long[maxDoc];
    int upto = 0;
    while (!scratch.equals(END)) {
      SimpleTextUtil.readLine(input, scratch);
      if (StringHelper.startsWith(scratch, DOC)) {
        offsets[upto] = input.getFilePointer();
        upto++;
      }
    }
    SimpleTextUtil.checkFooter(input);
    assert upto == offsets.length;
View Full Code Here

      }
    }
  }
 
  private TreeMap<String,Long> readFields(IndexInput in) throws IOException {
    ChecksumIndexInput input = new BufferedChecksumIndexInput(in);
    BytesRef scratch = new BytesRef(10);
    TreeMap<String,Long> fields = new TreeMap<>();
   
    while (true) {
      SimpleTextUtil.readLine(input, scratch);
      if (scratch.equals(END)) {
        SimpleTextUtil.checkFooter(input);
        return fields;
      } else if (StringHelper.startsWith(scratch, FIELD)) {
        String fieldName = new String(scratch.bytes, scratch.offset + FIELD.length, scratch.length - FIELD.length, StandardCharsets.UTF_8);
        fields.put(fieldName, input.getFilePointer());
      }
    }
  }
View Full Code Here

  /** Constructs a bit vector from the file <code>name</code> in Directory
    <code>d</code>, as written by the {@link #write} method.
    */
  public BitVector(Directory d, String name, IOContext context) throws IOException {
    ChecksumIndexInput input = d.openChecksumInput(name, context);

    try {
      final int firstInt = input.readInt();

      if (firstInt == -2) {
        // New format, with full header & version:
        version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION_CURRENT);
        size = input.readInt();
      } else {
        version = VERSION_PRE;
        size = firstInt;
      }
      if (size == -1) {
        if (version >= VERSION_DGAPS_CLEARED) {
          readClearedDgaps(input);
        } else {
          readSetDgaps(input);
        }
      } else {
        readBits(input);
      }

      if (version < VERSION_DGAPS_CLEARED) {
        invertAll();
      }

      if (version >= VERSION_CHECKSUM) {
        CodecUtil.checkFooter(input);
      } else {
        CodecUtil.checkEOF(input);
      }
      assert verifyCount();
    } finally {
      input.close();
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.lucene.store.ChecksumIndexInput

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.