Package java.util.zip

Examples of java.util.zip.Checksum


    private static int errors;

    public long crc( String str )
    {
        final Checksum checksum = new CRC32();

        final byte bytes[] = str.getBytes();
        checksum.update( bytes, 0, bytes.length );
        return checksum.getValue();
    }
View Full Code Here


        logger.info( "total used=" + Ram.inMb( mem.used() ) );
    }

    private static long crc32( byte[] payload )
    {
        final Checksum checksum = new CRC32();
        checksum.update( payload, 0, payload.length );
        return checksum.getValue();
    }
View Full Code Here

                {
                    // last CL entry didn't get completely written.  that's ok.
                    break;
                }
                bufIn.reset(bytes, bytes.length);
                Checksum checksum = new CRC32();
                checksum.update(bytes, 0, bytes.length);
                if (claimedCRC32 != checksum.getValue())
                {
                    // this part of the log must not have been fsynced.  probably the rest is bad too,
                    // but just in case there is no harm in trying them.
                    continue;
                }
View Full Code Here

            try
            {
                currentPosition = logWriter_.getFilePointer();
                CommitLogContext cLogCtx = new CommitLogContext(logFile_, currentPosition);
                maybeUpdateHeader(rowMutation);
                Checksum checkum = new CRC32();
                if (serializedRow instanceof DataOutputBuffer)
                {
                    DataOutputBuffer buffer = (DataOutputBuffer) serializedRow;
                    logWriter_.writeLong(buffer.getLength());
                    logWriter_.write(buffer.getData(), 0, buffer.getLength());
                    checkum.update(buffer.getData(), 0, buffer.getLength());
                }
                else
                {
                    assert serializedRow instanceof byte[];
                    byte[] bytes = (byte[]) serializedRow;
                    logWriter_.writeLong(bytes.length);
                    logWriter_.write(bytes);
                    checkum.update(bytes, 0, bytes.length);
                }
                logWriter_.writeLong(checkum.getValue());
                maybeRollLog();
                return cLogCtx;
            }
            catch (IOException e)
            {
View Full Code Here

      });

      results.add(futureChecksum);
        }

    Checksum checksum = new CRC32();
    int currentIndex = -1;
    for (Future<FileParseResult> future : results) {
      currentIndex++;
      int fileChecksum = 0;
      try {
        FileParseResult fileResult = future.get();
        if (COMPUTE_TRANSITION_STATS) {
          totalTransitionsPerFile[currentPass][currentIndex] = sum(fileResult.parserTotalTransitions);
          computedTransitionsPerFile[currentPass][currentIndex] = sum(fileResult.parserComputedTransitions);

          if (DETAILED_DFA_STATE_STATS) {
            decisionInvocationsPerFile[currentPass][currentIndex] = fileResult.decisionInvocations;
            fullContextFallbackPerFile[currentPass][currentIndex] = fileResult.fullContextFallback;
            nonSllPerFile[currentPass][currentIndex] = fileResult.nonSll;
            totalTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserTotalTransitions;
            computedTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserComputedTransitions;
            fullContextTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserFullContextTransitions;
          }
        }

        if (COMPUTE_TIMING_STATS) {
          timePerFile[currentPass][currentIndex] = fileResult.endTime - fileResult.startTime;
          tokensPerFile[currentPass][currentIndex] = fileResult.tokenCount;
        }

        fileChecksum = fileResult.checksum;
      } catch (ExecutionException ex) {
        Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
      }

      if (COMPUTE_CHECKSUM) {
        updateChecksum(checksum, fileChecksum);
      }
    }

    executorService.shutdown();
    executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);

        System.out.format("%d. Total parse time for %d files (%d KB, %d tokens%s): %.0fms%n",
              currentPass + 1,
                          inputCount,
                          inputSize / 1024,
                          tokenCount.get(currentPass),
              COMPUTE_CHECKSUM ? String.format(", checksum 0x%8X", checksum.getValue()) : "",
                          (double)(System.nanoTime() - startTime) / 1000000.0);

    if (sharedLexers.length > 0) {
      int index = FILE_GRANULARITY ? 0 : ((NumberedThread)Thread.currentThread()).getThreadNumber();
      Lexer lexer = sharedLexers[index];
View Full Code Here

            parserCtor.newInstance(new CommonTokenStream(tokenSource));

            return new ParserFactory() {
        @Override
                public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
          final Checksum checksum = new CRC32();

          final long startTime = System.nanoTime();
          assert thread >= 0 && thread < NUMBER_OF_THREADS;

                    try {
            ParseTreeListener listener = sharedListeners[thread];
            if (listener == null) {
              listener = listenerClass.newInstance();
              sharedListeners[thread] = listener;
            }

            Lexer lexer = sharedLexers[thread];
                        if (REUSE_LEXER && lexer != null) {
                            lexer.setInputStream(input);
                        } else {
              Lexer previousLexer = lexer;
                            lexer = lexerCtor.newInstance(input);
              DFA[] decisionToDFA = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getInterpreter().decisionToDFA;
              if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
                decisionToDFA = new DFA[decisionToDFA.length];
              }

              if (COMPUTE_TRANSITION_STATS) {
                lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
              } else if (!REUSE_LEXER_DFA) {
                lexer.setInterpreter(new LexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
              }

              sharedLexers[thread] = lexer;
                        }

            lexer.removeErrorListeners();
            lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);

            if (lexer.getInterpreter().decisionToDFA[0] == null) {
              ATN atn = lexer.getATN();
              for (int i = 0; i < lexer.getInterpreter().decisionToDFA.length; i++) {
                lexer.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
              }
            }

                        CommonTokenStream tokens = new CommonTokenStream(lexer);
                        tokens.fill();
                        tokenCount.addAndGet(currentPass, tokens.size());

            if (COMPUTE_CHECKSUM) {
              for (Token token : tokens.getTokens()) {
                updateChecksum(checksum, token);
              }
            }

                        if (!RUN_PARSER) {
                            return new FileParseResult(input.getSourceName(), (int)checksum.getValue(), null, tokens.size(), startTime, lexer, null);
                        }

            final long parseStartTime = System.nanoTime();
            Parser parser = sharedParsers[thread];
                        if (REUSE_PARSER && parser != null) {
                            parser.setInputStream(tokens);
                        } else {
              Parser previousParser = parser;

              if (USE_PARSER_INTERPRETER) {
                Parser referenceParser = parserCtor.newInstance(tokens);
                parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
              }
              else {
                parser = parserCtor.newInstance(tokens);
              }

              DFA[] decisionToDFA = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getInterpreter().decisionToDFA;
              if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
                decisionToDFA = new DFA[decisionToDFA.length];
              }

              if (COMPUTE_TRANSITION_STATS) {
                parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
              } else if (!REUSE_PARSER_DFA) {
                parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
              }

              sharedParsers[thread] = parser;
                        }

            parser.removeParseListeners();
            parser.removeErrorListeners();
            if (!TWO_STAGE_PARSING) {
              parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
              parser.addErrorListener(new SummarizingDiagnosticErrorListener());
            }

            if (parser.getInterpreter().decisionToDFA[0] == null) {
              ATN atn = parser.getATN();
              for (int i = 0; i < parser.getInterpreter().decisionToDFA.length; i++) {
                parser.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
              }
            }

            parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
            parser.setBuildParseTree(BUILD_PARSE_TREES);
            if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
              parser.addParseListener(listener);
            }
            if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
              parser.setErrorHandler(new BailErrorStrategy());
            }

                        Method parseMethod = parserClass.getMethod(entryPoint);
                        Object parseResult;

            try {
              if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
                parser.addParseListener(new ChecksumParseTreeListener(checksum));
              }

              if (USE_PARSER_INTERPRETER) {
                ParserInterpreter parserInterpreter = (ParserInterpreter)parser;
                parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
              }
              else {
                parseResult = parseMethod.invoke(parser);
              }
            } catch (InvocationTargetException ex) {
              if (!TWO_STAGE_PARSING) {
                throw ex;
              }

              String sourceName = tokens.getSourceName();
              sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName+": " : "";
              if (REPORT_SECOND_STAGE_RETRY) {
                System.err.println(sourceName+"Forced to retry with full context.");
              }

              if (!(ex.getCause() instanceof ParseCancellationException)) {
                throw ex;
              }

              tokens.reset();
              if (REUSE_PARSER && parser != null) {
                parser.setInputStream(tokens);
              } else {
                Parser previousParser = parser;

                if (USE_PARSER_INTERPRETER) {
                  Parser referenceParser = parserCtor.newInstance(tokens);
                  parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
                }
                else {
                  parser = parserCtor.newInstance(tokens);
                }

                DFA[] decisionToDFA = previousParser.getInterpreter().decisionToDFA;
                if (COMPUTE_TRANSITION_STATS) {
                  parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
                } else if (!REUSE_PARSER_DFA) {
                  parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
                }

                sharedParsers[thread] = parser;
              }

              parser.removeParseListeners();
              parser.removeErrorListeners();
              parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
              parser.addErrorListener(new SummarizingDiagnosticErrorListener());
              parser.getInterpreter().setPredictionMode(PredictionMode.LL);
              parser.setBuildParseTree(BUILD_PARSE_TREES);
              if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
                parser.addParseListener(new ChecksumParseTreeListener(checksum));
              }
              if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
                parser.addParseListener(listener);
              }
              if (BAIL_ON_ERROR) {
                parser.setErrorHandler(new BailErrorStrategy());
              }

              parseResult = parseMethod.invoke(parser);
            }

            assertThat(parseResult, instanceOf(ParseTree.class));
            if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
              ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree)parseResult);
            }
                        if (BUILD_PARSE_TREES && BLANK_LISTENER) {
                            ParseTreeWalker.DEFAULT.walk(listener, (ParseTree)parseResult);
                        }

            return new FileParseResult(input.getSourceName(), (int)checksum.getValue(), (ParseTree)parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
                    } catch (Exception e) {
            if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
              return new FileParseResult("unknown", (int)checksum.getValue(), null, 0, startTime, null, null);
            }

                        e.printStackTrace(System.out);
                        throw new IllegalStateException(e);
                    }
View Full Code Here

            byte[] buf = Util.marshallTxnEntry(hdr, txn);
            if (buf == null || buf.length == 0) {
                throw new IOException("Faulty serialization for header " +
                        "and txn");
            }
            Checksum crc = makeChecksumAlgorithm();
            crc.update(buf, 0, buf.length);
            oa.writeLong(crc.getValue(), "txnEntryCRC");
            Util.writeTxnBytes(oa, buf);
           
            return true;
        }
        return false;
View Full Code Here

                // Since we preallocate, we define EOF to be an
                if (bytes == null || bytes.length==0)
                   throw new EOFException("Failed to read");
                // EOF or corrupted record
                // validate CRC
                Checksum crc = makeChecksumAlgorithm();
                crc.update(bytes, 0, bytes.length);
                if (crcValue != crc.getValue())
                    throw new IOException(CRC_ERROR);
                if (bytes == null || bytes.length == 0)
                    return false;
                InputArchive iab = BinaryInputArchive
                                    .getArchive(new ByteArrayInputStream(bytes));
View Full Code Here

                // Using Adler-32 instead of CRC-32 because it's much faster and
                // it's
                // weakness for short messages with few hundred bytes is not a
                // factor in this case since we know
                // our write batches are going to much larger.
                Checksum checksum = new Adler32();
                for (PageWrite w : batch) {
                    try {
                        checksum.update(w.diskBound, 0, pageSize);
                    } catch (Throwable t) {
                        throw IOExceptionSupport.create(
                                "Cannot create recovery file. Reason: " + t, t);
                    }
                }

                // Can we shrink the recovery buffer??
                if (recoveryPageCount > recoveryFileMaxPageCount) {
                    int t = Math.max(recoveryFileMinPageCount, batch.size());
                    recoveryFile.setLength(recoveryFileSizeForPages(t));
                }

                // Record the page writes in the recovery buffer.
                recoveryFile.seek(0);
                // Store the next tx id...
                recoveryFile.writeLong(nextTxid.get());
                // Store the checksum for thw write batch so that on recovery we
                // know if we have a consistent
                // write batch on disk.
                recoveryFile.writeLong(checksum.getValue());
                // Write the # of pages that will follow
                recoveryFile.writeInt(batch.size());

                // Write the pages.
                recoveryFile.seek(RECOVERY_FILE_HEADER_SIZE);
View Full Code Here

        long nextTxId = recoveryFile.readLong();
        long expectedChecksum = recoveryFile.readLong();
        int pageCounter = recoveryFile.readInt();
       
        recoveryFile.seek(RECOVERY_FILE_HEADER_SIZE);
        Checksum checksum = new Adler32();
        LinkedHashMap<Long, byte[]> batch = new LinkedHashMap<Long, byte[]>();
        try {
            for (int i = 0; i < pageCounter; i++) {
                long offset = recoveryFile.readLong();
                byte []data = new byte[pageSize];
                if( recoveryFile.read(data, 0, pageSize) != pageSize ) {
                    // Invalid recovery record, Could not fully read the data". Probably due to a partial write to the recovery buffer
                    return nextTxId;
                }
                checksum.update(data, 0, pageSize);
                batch.put(offset, data);
            }
        } catch (Exception e) {
            // If an error occurred it was cause the redo buffer was not full written out correctly.. so don't redo it.
            // as the pages should still be consistent.
            LOG.debug("Redo buffer was not fully intact: ", e);
            return nextTxId;
        }
       
        recoveryPageCount = pageCounter;
       
        // If the checksum is not valid then the recovery buffer was partially written to disk.
        if( checksum.getValue() != expectedChecksum ) {
            return nextTxId;
        }
       
        // Re-apply all the writes in the recovery buffer.
        for (Map.Entry<Long, byte[]> e : batch.entrySet()) {
View Full Code Here

TOP

Related Classes of java.util.zip.Checksum

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.