Package org.tmatesoft.hg.repo

Examples of org.tmatesoft.hg.repo.HgDataFile$MetadataInspector


  }
 
  @Test
  public void testDiffTwoRevisions() throws Exception {
    HgRepository repo = Configuration.get().find("test-annotate");
    HgDataFile df = repo.getFileNode("file1");
    LineGrepOutputParser gp = new LineGrepOutputParser("^@@.+");
    ExecHelper eh = new ExecHelper(gp, repo.getWorkingDir());
    int[] toTest = { 3, 4, 5 }; // p1 ancestry line, p2 ancestry line, not in ancestry line
    final HgDiffCommand diffCmd = new HgDiffCommand(repo).file(df);
    for (int cs : toTest) {
      ByteArrayOutputStream bos = new ByteArrayOutputStream();
      diffCmd.range(cs, 8).executeDiff(new DiffOutInspector(new PrintStream(bos)));
      eh.run("hg", "diff", "-r", String.valueOf(cs), "-r", "8", "-U", "0", df.getPath().toString());
      //
      String[] apiResult = splitLines(bos.toString());
      String[] expected = splitLines(gp.result());
      Assert.assertArrayEquals("diff -r " + cs + "-r 8", expected, apiResult);
      gp.reset();
View Full Code Here


   * Make sure boundary values are ok (down to BlameHelper#prepare and FileHistory)
   */
  @Test
  public void testAnnotateFirstFileRev() throws Exception {
    HgRepository repo = Configuration.get().find("test-annotate");
    HgDataFile df = repo.getFileNode("file1");
    LineGrepOutputParser gp = new LineGrepOutputParser("^@@.+");
    ExecHelper eh = new ExecHelper(gp, repo.getWorkingDir());
    eh.run("hg", "diff", "-c", "0", "-U", "0", df.getPath().toString());
    //
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    HgDiffCommand diffCmd = new HgDiffCommand(repo).file(df);
    diffCmd.changeset(0).executeParentsAnnotate(new DiffOutInspector(new PrintStream(bos)));
    //
View Full Code Here

  }
 
  @Test
  public void testAnnotateMergeMapViaBase() throws Exception {
    HgRepository repo = Configuration.get().find("test-annotate3");
    HgDataFile df1 = repo.getFileNode("file1");
    HgDataFile df4 = repo.getFileNode("file4");
    HgDataFile df5 = repo.getFileNode("file5");
    assertTrue("[sanity]", df1.exists() && df4.exists());
    // hg annotate handles merge in its own way, here we check
    // how map(diff(p1->base->p2)) merge strategy works
    final String file1AnnotateResult = "3:1:1\n3:2:2x\n3:3:3y\n2:4:z\n0:1:1\n1:2:2x\n4:3:3y\n";
    final String file4AnnotateResult = "3:1:1\n1:2:2x\n4:3:3y\n2:4:z\n0:1:1\n3:6:2x\n3:7:3y\n";
View Full Code Here

  @Test
  public void testStripMetadata() throws Exception {
    repo = Configuration.get().find("log-1");
    ByteArrayChannel ch = new ByteArrayChannel();
    HgDataFile dir_b = repo.getFileNode("dir/b");
    Assert.assertTrue(dir_b.isCopy());
    Assert.assertEquals("b", dir_b.getCopySourceName().toString());
    Assert.assertEquals("e44751cdc2d14f1eb0146aa64f0895608ad15917", dir_b.getCopySourceRevision().toString());
    dir_b.content(0, ch);
    // assert rawContent has 1 10 ... 1 10
    assertArrayEquals("a \r\n".getBytes(), ch.toArray());
    //
    // try once again to make sure metadata records/extracts correct offsets
    dir_b.content(0, ch = new ByteArrayChannel());
    assertArrayEquals("a \r\n".getBytes(), ch.toArray());
  }
View Full Code Here

    assertEquals(0, exec.getExitValue());
    //
    // modify working copy
    write(f1, c3);
    //
    HgDataFile df = repo.getFileNode(f1.getName());
    // 1. Shall take content of the file from the dir
    df.workingCopy(ch = new ByteArrayChannel());
    assertArrayEquals(c3.getBytes(), ch.toArray());
    // 2. Shall supply working copy even if no local file is there
    f1.delete();
    assertFalse(f1.exists());
    df = repo.getFileNode(f1.getName());
    df.workingCopy(ch = new ByteArrayChannel());
    assertArrayEquals(c2.getBytes(), ch.toArray());
    //
    // 3. Shall extract revision of the file that corresponds actual parents (from dirstate) not the TIP as it was 
    exec.run("hg", "update", "-r", "0");
    assertEquals(0, exec.getExitValue());
    f1.delete();
    assertFalse(f1.exists());
    // there's no file and workingCopy shall do some extra work to find out actual revision to check out
    df = repo.getFileNode(f1.getName());
    df.workingCopy(ch = new ByteArrayChannel());
    assertArrayEquals(c1.getBytes(), ch.toArray());
  }
View Full Code Here

    Object[] historyD = new Object[] {"d", 1, 1, 5, 5, "b", 2, 3, 3, 4, "a", 0, 1, 0, 2};
   
    FileRenameHistory frh = new FileRenameHistory(0, 5);
    for (Object[] history : new Object[][] {historyA, historyB, historyC, historyD}) {
      String fname = history[0].toString();
      HgDataFile df = repo.getFileNode(fname);
      Assert.assertFalse(frh.isOutOfRange(df, df.getLastRevision()));
      frh.build(df, df.getLastRevision());
      int recordIndex = 0;
      errorCollector.assertEquals(history.length / 5, frh.chunks());
      for (Chunk c : frh.iterate(HgIterateDirection.NewToOld)) {
        compareChunk(fname, c, history, recordIndex++);
      }
      errorCollector.assertEquals("Shall compare full history", history.length, recordIndex * 5);
    }
    //
    HgDataFile df = repo.getFileNode("d");
    Assert.assertFalse(frh.isOutOfRange(df, 0));
    frh.build(df, 0);
    errorCollector.assertEquals(1, frh.chunks());
    Chunk c = frh.iterate(NewToOld).iterator().next();
    compareChunk("abandoned d(0)", c, new Object[] { "d", 0, 0, 4, 4 }, 0);
    //
    df = repo.getFileNode("a");
    Assert.assertFalse(frh.isOutOfRange(df, 0));
    frh.build(df, 0);
    errorCollector.assertEquals(1, frh.chunks());
    c = frh.iterate(NewToOld).iterator().next();
    compareChunk("a(0) and boundary checks", c, new Object[] { "a", 0, 0, 0, 0 }, 0);
    //
    repo = Configuration.get().find("test-annotate"); // need a long file history
    df = repo.getFileNode("file1");
    Assert.assertTrue("[sanity]", repo.getChangelog().getLastRevision() >=9);
    Assert.assertTrue("[sanity]", df.exists() && df.getLastRevision() >= 9);
    frh = new FileRenameHistory(0, 9);
    frh.build(df, 9);
    errorCollector.assertEquals(1, frh.chunks());
    c = frh.iterate(NewToOld).iterator().next();
    compareChunk("regular file, no renames", c, new Object[] { "file1", 0, 9, 0, 9 }, 0);
View Full Code Here

        return new Outcome(Kind.Failure, "nothing to add");
      }
      final Internals implRepo = Internals.getInstance(repo);
      CommitFacility cf = new CommitFacility(implRepo, parentRevs[0], parentRevs[1]);
      for (Path m : status.getModified()) {
        HgDataFile df = repo.getFileNode(m);
        cf.add(df, new WorkingCopyContent(df));
      }
      for (Path a : status.getAdded()) {
        HgDataFile df = repo.getFileNode(a); // TODO need smth explicit, like repo.createNewFileNode(Path) here
        // XXX might be an interesting exercise not to demand a content supplier, but instead return a "DataRequester"
        // object, that would indicate interest in data, and this code would "push" it to requester, so that any exception
        // is handled here, right away, and won't need to travel supplier and CommitFacility. (although try/catch inside
        // supplier.read (with empty throws declaration)
        cf.add(df, new FileContentSupplier(repo, a));
      }
      for (Path r : status.getRemoved()) {
        HgDataFile df = repo.getFileNode(r);
        cf.forget(df);
      }
      cf.branch(detectBranch());
      cf.user(detectUser());
      Transaction.Factory trFactory = implRepo.getTransactionFactory();
View Full Code Here

    checkResult = null;
    renamed = false;
    if (cset == null || file == null || file.isDirectory()) {
      throw new IllegalArgumentException();
    }
    HgDataFile dataFile = repo.getFileNode(file);
    if (!dataFile.exists()) {
      checkResult = new Outcome(Outcome.Kind.Success, String.format("File named %s is not known in the repository", file));
      return checkResult;
    }
    Nodeid toExtract = null;
    String phaseMsg = "Extract manifest revision failed";
    try {
      if (cachedManifest == null) {
        int csetRev = repo.getChangelog().getRevisionIndex(cset);
        cachedManifest = new ManifestRevision(null, null); // XXX how about context and cached manifest revisions
        repo.getManifest().walk(csetRev, csetRev, cachedManifest);
        // cachedManifest shall be meaningful - changelog.getRevisionIndex() above ensures we've got version that exists.
      }
      toExtract = cachedManifest.nodeid(file);
      phaseMsg = "Follow copy/rename failed";
      if (toExtract == null && followRenames) {
        int csetIndex = repo.getChangelog().getRevisionIndex(cset);
        int ccFileRevIndex = dataFile.getLastRevision(); // copy candidate
        int csetFileEnds = dataFile.getChangesetRevisionIndex(ccFileRevIndex);
        if (csetIndex > csetFileEnds) {
          return new Outcome(Outcome.Kind.Success, String.format("%s: last known changeset for the file %s is %d. Follow renames is possible towards older changesets only", phaseMsg, file, csetFileEnds));
        }
        // @see FileRenameHistory, with similar code, which doesn't trace alternative paths
        // traceback stack keeps record of all files with isCopy(fileRev) == true we've tried to follow, so that we can try earlier file
        // revisions in case followed fileRev didn't succeed
        ArrayDeque<Pair<HgDataFile, Integer>> traceback = new ArrayDeque<Pair<HgDataFile, Integer>>();
        do {
          int ccCsetIndex = dataFile.getChangesetRevisionIndex(ccFileRevIndex);
          if (ccCsetIndex <= csetIndex) {
            // present dataFile is our (distant) origin
            toExtract = dataFile.getRevision(ccFileRevIndex);
            renamed = true;
            break;
          }
          if (!dataFile.isCopy(ccFileRevIndex)) {
            // nothing left to return to when traceback.isEmpty()
            while (ccFileRevIndex == 0 && !traceback.isEmpty()) {
              Pair<HgDataFile, Integer> lastTurnPoint = traceback.pop();
              dataFile = lastTurnPoint.first();
              ccFileRevIndex = lastTurnPoint.second(); // generally ccFileRevIndex != 0 here, but doesn't hurt to check, hence while
              // fall through to shift down from the file revision we've already looked at
            }
            ccFileRevIndex--;
            continue;
          }
          if (ccFileRevIndex > 0) {
            // there's no reason to memorize turn point if it's the very first revision
            // of the file and we won't be able to try any other earlier revision
            traceback.push(new Pair<HgDataFile, Integer>(dataFile, ccFileRevIndex));
          }
          HgFileRevision origin = dataFile.getCopySource(ccFileRevIndex);
          dataFile = repo.getFileNode(origin.getPath());
          ccFileRevIndex = dataFile.getRevisionIndex(origin.getRevision());
        } while (ccFileRevIndex >= 0);
        // didn't get to csetIndex, no ancestor in file rename history found.
      }
    } catch (HgRuntimeException ex) {
      checkResult = new Outcome(Outcome.Kind.Failure, phaseMsg, ex);
      return checkResult;
    }
    if (toExtract != null) {
      Flags extractRevFlags = cachedManifest.flags(dataFile.getPath());
      fileRevision = new HgFileRevision(repo, toExtract, extractRevFlags, dataFile.getPath());
      checkResult = new Outcome(Outcome.Kind.Success, String.format("File %s, revision %s found at changeset %s", dataFile.getPath(), toExtract.shortNotation(), cset.shortNotation()));
      return checkResult;
    }
    checkResult = new Outcome(Outcome.Kind.Success, String.format("File %s nor its origins were known at revision %s", file, cset.shortNotation()));
    return checkResult;
  }
View Full Code Here

      public void next(int localRevision, Nodeid revision, int linkedRevision) {
        Assert.assertEquals(localRevision, linkedRevision);
      }
    });
    final HgDataFile fileNode = repository.getFileNode("file1");
    fileNode.indexWalk(0, TIP, new HgDataFile.RevisionInspector() {
      int i = 0;

      public void next(int localRevision, Nodeid revision, int linkedRevision) throws HgRuntimeException {
        assertEquals(i++, localRevision);
        assertEquals(fileNode.getChangesetRevisionIndex(localRevision), linkedRevision);
        assertEquals(fileNode.getRevision(localRevision), revision);
      }
    });
    class ParentInspectorCheck implements HgDataFile.ParentInspector {
      private int i, c;
      private Nodeid[] all;
      private final int start;
     
      public ParentInspectorCheck(int start, int total) {
        this.start = start;
        i = start; // revision index being iterated
        c = 0; // index/counter of visited revisions
        all = new Nodeid[total];
      }

      public void next(int localRevision, Nodeid revision, int parent1, int parent2, Nodeid nidParent1, Nodeid nidParent2) {
        assertEquals(i++, localRevision);
        all[c++] = revision;
        assertNotNull(revision);
        assertFalse(localRevision == 0 && (parent1 != -1 || parent2 != -1));
        assertFalse(localRevision > 0 && parent1 == -1 && parent2 == -1);
        if (parent1 != -1) {
          Assert.assertNotNull(nidParent1);
          if (parent1 >= start) {
            // deliberately ==, not asserEquals to ensure same instance
            Assert.assertTrue(nidParent1 == all[parent1-start])
          }
        }
        if (parent2 != -1) {
          Assert.assertNotNull(nidParent2);
          if (parent2 >= start) {
            Assert.assertTrue(nidParent2 == all[parent2-start]);
          }
        }
      }
    };
    fileNode.indexWalk(0, TIP, new ParentInspectorCheck(0, fileNode.getRevisionCount()));
    assert fileNode.getRevisionCount() > 2 : "prereq"; // need at least few revisions
    // there used to be a defect in #walk impl, assumption all parents come prior to a revision
    fileNode.indexWalk(1, 3, new ParentInspectorCheck(1, 3));
  }
View Full Code Here

        for (String f : cset.files()) {
          if (seenFiles.contains(f)) {
            continue;
          }
          seenFiles.add(f);
          HgDataFile df = repo.getRepo().getFileNode(f);
          files.add(df);
        }
      }
    }, clogRevs);
    manifestRevs.sort(true);
    //
    final File bundleFile = File.createTempFile("hg4j-", ".bundle");
    if (clogRevs.length == 0) {
      // nothing to write
      return bundleFile;
    }
    final FileOutputStream osBundle = new FileOutputStream(bundleFile);
    final OutputStreamSerializer outRaw = new OutputStreamSerializer(osBundle);
    outRaw.write("HG10UN".getBytes(), 0, 6);
    //
    RevlogStream clogStream = repo.getImplAccess().getChangelogStream();
    new ChunkGenerator(outRaw, clogMap).iterate(clogStream, clogRevs);
    outRaw.writeInt(0); // null chunk for changelog group
    //
    RevlogStream manifestStream = repo.getImplAccess().getManifestStream();
    new ChunkGenerator(outRaw, clogMap).iterate(manifestStream, manifestRevs.toArray(true));
    outRaw.writeInt(0); // null chunk for manifest group
    //
    EncodingHelper fnEncoder = repo.buildFileNameEncodingHelper();
    for (HgDataFile df : sortedByName(files)) {
      RevlogStream s = repo.getImplAccess().getStream(df);
      final IntVector fileRevs = new IntVector();
      s.iterate(0, TIP, false, new RevlogStream.Inspector() {
       
        public void next(int revisionIndex, int actualLen, int baseRevision, int linkRevision, int parent1Revision, int parent2Revision, byte[] nodeid, DataAccess data) throws HgRuntimeException {
          if (Arrays.binarySearch(clogRevs, linkRevision) >= 0) {
            fileRevs.add(revisionIndex);
          }
        }
      });
      fileRevs.sort(true);
      if (!fileRevs.isEmpty()) {
        // although BundleFormat page says "filename length, filename" for a file,
        // in fact there's a sort of 'filename chunk', i.e. filename length field includes
        // not only length of filename, but also length of the field itseld, i.e. filename.length+sizeof(int)
        byte[] fnameBytes = fnEncoder.toBundle(df.getPath());
        outRaw.writeInt(fnameBytes.length + 4);
        outRaw.writeByte(fnameBytes);
        new ChunkGenerator(outRaw, clogMap).iterate(s, fileRevs.toArray(true));
        outRaw.writeInt(0); // null chunk for file group
      }
View Full Code Here

TOP

Related Classes of org.tmatesoft.hg.repo.HgDataFile$MetadataInspector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.