Examples of HgDataFile


Examples of org.tmatesoft.hg.repo.HgDataFile

     * @throws HgRuntimeException
     */
    public List<QueueElement> buildFileRenamesQueue(int csetStart, int csetEnd) throws HgPathNotFoundException, HgRuntimeException {
      LinkedList<QueueElement> rv = new LinkedList<QueueElement>();
      Nodeid startRev = null;
      HgDataFile fileNode = repo.getFileNode(file);
      if (!fileNode.exists()) {
        throw new HgPathNotFoundException(String.format("File %s not found in the repository", file), file);
      }
      if (followAncestry) {
        // TODO subject to dedicated method either in HgRepository (getWorkingCopyParentRevisionIndex)
        // or in the HgDataFile (getWorkingCopyOriginRevision)
        Nodeid wdParentChangeset = repo.getWorkingCopyParents().first();
        if (!wdParentChangeset.isNull()) {
          int wdParentRevIndex = repo.getChangelog().getRevisionIndex(wdParentChangeset);
          startRev = repo.getManifest().getFileRevision(wdParentRevIndex, fileNode.getPath());
        }
        // else fall-through, assume null (eventually, lastRevision()) is ok here
      }
      QueueElement p = new QueueElement(fileNode, startRev);
      if (!followRenames) {
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

     * @return list of history elements, from oldest to newest. In case {@link #followAncestry} is <code>true</code>, the list
     * is modifiable (to further augment with last/first elements of renamed file histories)
     */
    List<HistoryNode> go(QueueElement qe) throws HgRuntimeException {
      resultHistory = null;
      HgDataFile fileNode = qe.file();
      // TODO int fileLastRevIndexToVisit = qe.fileTo
      int fileLastRevIndexToVisit = followAncestry ? fileNode.getRevisionIndex(qe.lastFileRev()) : fileNode.getLastRevision();
      completeHistory = new HistoryNode[fileLastRevIndexToVisit+1];
      commitRevisions = new int[completeHistory.length];
      fileNode.indexWalk(qe.fileFrom(), fileLastRevIndexToVisit, this);
      if (!followAncestry) {
        resultHistory = new ArrayList<HistoryNode>(fileLastRevIndexToVisit - qe.fileFrom() + 1);
        // items in completeHistory with index < qe.fileFrom are empty
        for (int i = qe.fileFrom(); i <= fileLastRevIndexToVisit; i++) {
          resultHistory.add(completeHistory[i]);
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

    return fileHistory;
  }
 
  // NO_REVISION is not allowed as any argument
  public void diff(int fileRevIndex1, int clogRevIndex1, int fileRevIndex2, int clogRevIndex2) throws HgCallbackTargetException, HgRuntimeException {
    HgDataFile targetFile = linesCache.getFile(clogRevIndex2);
    LineSequence c1 = linesCache.lines(clogRevIndex1, fileRevIndex1);
    LineSequence c2 = linesCache.lines(clogRevIndex2, fileRevIndex2);
    DiffHelper<LineSequence> pg = new DiffHelper<LineSequence>();
    pg.init(c1, c2);
    BlameBlockInspector bbi = new BlameBlockInspector(targetFile, fileRevIndex2, insp, clogRevIndex1, clogRevIndex2);
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

    pg.findMatchingBlocks(bbi);
    bbi.checkErrors();
  }

  public void annotateChange(int fileRevIndex, int csetRevIndex, int[] fileParentRevs, int[] fileParentClogRevs) throws HgCallbackTargetException, HgRuntimeException {
    HgDataFile targetFile = linesCache.getFile(csetRevIndex);
    final LineSequence fileRevLines = linesCache.lines(csetRevIndex, fileRevIndex);
    if (fileParentClogRevs[0] != NO_REVISION && fileParentClogRevs[1] != NO_REVISION) {
      int p1ClogIndex = fileParentClogRevs[0];
      int p2ClogIndex = fileParentClogRevs[1];
      LineSequence p1Lines = linesCache.lines(p1ClogIndex, fileParentRevs[0]);
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

  }

  private LineSequence getBaseRevisionLines(int clogRevIndex, int[] fileParentClogRevs) {
    assert fileParentClogRevs[0] >= 0;
    assert fileParentClogRevs[1] >= 0;
    HgDataFile targetFile = linesCache.getFile(clogRevIndex);
    final HgRepository repo = targetFile.getRepo();
    if (clogMap == null) {
      // FIXME replace HgParentChildMap with revlog.indexWalk(AncestorIterator))
      clogMap = new HgParentChildMap<HgChangelog>(repo.getChangelog());
      clogMap.init();
    }
    final HgRevisionMap<HgChangelog> m = clogMap.getRevisionMap();
    Nodeid ancestor = clogMap.ancestor(m.revision(fileParentClogRevs[0]), m.revision(fileParentClogRevs[1]));
    final int ancestorRevIndex = m.revisionIndex(ancestor);
    Nodeid fr = repo.getManifest().getFileRevision(ancestorRevIndex, targetFile.getPath());
    if (fr == null) {
      return LineSequence.newlines(new byte[0]);
    }
    return linesCache.lines(ancestorRevIndex, targetFile.getRevisionIndex(fr));
  }
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

    }
   
    public boolean next(Nodeid nid, Path fname, Flags flags) {
      WorkingDirFileWriter workingDirWriter = null;
      try {
        HgDataFile df = hgRepo.getRepo().getFileNode(fname);
        int fileRevIndex = df.getRevisionIndex(nid);
        // check out files based on manifest
        workingDirWriter = new WorkingDirFileWriter(hgRepo);
        workingDirWriter.processFile(df, fileRevIndex, flags);
        lastWrittenFileSize = workingDirWriter.bytesWritten();
        lastFileMode = workingDirWriter.fmode();
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

    public LineSequence lines(int clogRevIndex, int fileRevIndex) throws HgRuntimeException {
      Pair<Integer, LineSequence> cached = checkCache(clogRevIndex);
      if (cached != null) {
        return cached.second();
      }
      HgDataFile df = getFile(clogRevIndex);
      try {
        ByteArrayChannel c;
        df.content(fileRevIndex, c = new ByteArrayChannel());
        LineSequence rv = LineSequence.newlines(c.toArray());
        lruCache.addFirst(new Pair<Integer, LineSequence>(clogRevIndex, rv));
        if (lruCache.size() > limit) {
          lruCache.removeLast();
        }
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

        FileRenameQueueBuilder frqBuilder = new FileRenameQueueBuilder();
        List<QueueElement> fileRenames = frqBuilder.buildFileRenamesQueue(firstCset, lastCset);
        progressHelper.start(fileRenames.size());
        for (int nameIndex = 0, fileRenamesSize = fileRenames.size(); nameIndex < fileRenamesSize; nameIndex++) {
          QueueElement curRename = fileRenames.get(nameIndex);
          HgDataFile fileNode = curRename.file();
          if (followAncestry) {
            TreeBuildInspector treeBuilder = new TreeBuildInspector(followAncestry);
            @SuppressWarnings("unused")
            List<HistoryNode> fileAncestry = treeBuilder.go(curRename);
            int[] commitRevisions = narrowChangesetRange(treeBuilder.getCommitRevisions(), firstCset, lastCset);
            if (iterateDirection == HgIterateDirection.OldToNew) {
              repo.getChangelog().range(filterInsp, commitRevisions);
              csetTransform.checkFailure();
            } else {
              assert iterateDirection == HgIterateDirection.NewToOld;
              // visit one by one in the opposite direction
              for (int i = commitRevisions.length-1; i >= 0; i--) {
                int csetWithFileChange = commitRevisions[i];
                repo.getChangelog().range(csetWithFileChange, csetWithFileChange, filterInsp);
              }
            }
          } else {
            // report complete file history (XXX may narrow range with [startRev, endRev], but need to go from file rev to link rev)
            int fileStartRev = curRename.fileFrom();
            int fileEndRev = curRename.file().getLastRevision(); //curRename.fileTo();
            if (iterateDirection == HgIterateDirection.OldToNew) {
              fileNode.history(fileStartRev, fileEndRev, filterInsp);
              csetTransform.checkFailure();
            } else {
              assert iterateDirection == HgIterateDirection.NewToOld;
              BatchRangeHelper brh = new BatchRangeHelper(fileStartRev, fileEndRev, BATCH_SIZE, true);
              BatchChangesetInspector batchInspector = new BatchChangesetInspector(Math.min(fileEndRev-fileStartRev+1, BATCH_SIZE));
              filterInsp.delegateTo(batchInspector);
              while (brh.hasNext()) {
                brh.next();
                fileNode.history(brh.start(), brh.end(), filterInsp);
                for (BatchChangesetInspector.BatchRecord br : batchInspector.iterate(true /*iterateDirection == IterateDirection.FromNewToOld*/)) {
                  csetTransform.next(br.csetIndex, br.csetRevision, br.cset);
                  csetTransform.checkFailure();
                }
                batchInspector.reset();
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

    final HgChangelog clog = repository.getChangelog();
    // warm-up
    HgRevisionMap<HgChangelog> clogMap = new HgRevisionMap<HgChangelog>(clog).init();

    for (String fname : fileNames) {
      HgDataFile fileNode = repository.getFileNode(fname);
      // warm-up
      HgRevisionMap<HgDataFile> fileMap = new HgRevisionMap<HgDataFile>(fileNode).init();
      //
      final int latestRevision = fileNode.getLastRevision();
      //
      final long start_0 = System.nanoTime();
      final Map<Nodeid, Nodeid> changesetToNodeid_0 = new HashMap<Nodeid, Nodeid>();
      for (int fileRevisionIndex = 0; fileRevisionIndex <= latestRevision; fileRevisionIndex++) {
        Nodeid fileRevision = fileNode.getRevision(fileRevisionIndex);
        Nodeid changesetRevision = fileNode.getChangesetRevision(fileRevision);
        changesetToNodeid_0.put(changesetRevision, fileRevision);
      }
      final long end_0 = System.nanoTime();
      //
      final long start_1 = System.nanoTime();
      fileMap = new HgRevisionMap<HgDataFile>(fileNode).init();
      final long start_1a = System.nanoTime();
      final Map<Nodeid, Nodeid> changesetToNodeid_1 = new HashMap<Nodeid, Nodeid>();
      for (int revision = 0; revision <= latestRevision; revision++) {
        final Nodeid nodeId = fileMap.revision(revision);
        int localCset = fileNode.getChangesetRevisionIndex(revision);
        final Nodeid changesetId = clog.getRevision(localCset);
//        final Nodeid changesetId = fileNode.getChangesetRevision(nodeId);
        changesetToNodeid_1.put(changesetId, nodeId);
      }
      final long end_1 = System.nanoTime();
      //
      final long start_2 = System.nanoTime();
      clogMap = new HgRevisionMap<HgChangelog>(clog).init();
      fileMap = new HgRevisionMap<HgDataFile>(fileNode).init();
      final Map<Nodeid, Nodeid> changesetToNodeid_2 = new HashMap<Nodeid, Nodeid>();
      final long start_2a = System.nanoTime();
      for (int revision = 0; revision <= latestRevision; revision++) {
        Nodeid nidFile = fileMap.revision(revision);
        int localCset = fileNode.getChangesetRevisionIndex(revision);
        Nodeid nidCset = clogMap.revision(localCset);
        changesetToNodeid_2.put(nidCset, nidFile);
      }
      final long end_2 = System.nanoTime();
      Assert.assertEquals(changesetToNodeid_1, changesetToNodeid_2);
      //
      final long start_3 = System.nanoTime();
      final Map<Nodeid, Nodeid> changesetToNodeid_3 = new HashMap<Nodeid, Nodeid>();
      fileNode.indexWalk(0, TIP, new HgDataFile.RevisionInspector() {
 
        public void next(int fileRevisionIndex, Nodeid revision, int linkedRevisionIndex) throws HgRuntimeException {
          changesetToNodeid_3.put(clog.getRevision(linkedRevisionIndex), revision);
        }
      });
View Full Code Here

Examples of org.tmatesoft.hg.repo.HgDataFile

     
    }, tagLocalRevs);
    System.out.printf("Cache built: %d ms\n", System.currentTimeMillis() - start);
    //
    // look up specific file. This part is fast.
    HgDataFile fileNode = repository.getFileNode(targetPath);
    final Nodeid[] allTagsOfTheFile = file2rev2tag.get(targetPath);
    // TODO if fileNode.isCopy, repeat for each getCopySourceName()
    for (int fileRevIndex = 0; fileRevIndex < fileNode.getRevisionCount(); fileRevIndex++) {
      Nodeid fileRev = fileNode.getRevision(fileRevIndex);
      int changesetRevIndex = fileNode.getChangesetRevisionIndex(fileRevIndex);
      List<String> associatedTags = new LinkedList<String>();
      for (int i = 0; i < allTagsOfTheFile.length; i++) {
        if (fileRev.equals(allTagsOfTheFile[i])) {
          associatedTags.add(allTags[i].name());
        }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.