Package org.tmatesoft.hg.repo

Examples of org.tmatesoft.hg.repo.HgDataFile$MetadataInspector


    }
    int rev = cmdLineOpts.getSingleInt(TIP, "-r", "--rev");
    OutputStreamChannel out = new OutputStreamChannel(System.out);
    for (String fname : cmdLineOpts.getList("")) {
      System.out.println(fname);
      HgDataFile fn = hgRepo.getFileNode(fname);
      if (fn.exists()) {
        fn.contentWithFilters(rev, out);
        System.out.println();
      } else {
        System.out.printf("%s not found!\n", fname);
      }
    }
View Full Code Here


        cmd.range(r[0], r[1]).execute(dump);
      }
      dump.done();
    } else {
      for (String fname : files) {
        HgDataFile f1 = hgRepo.getFileNode(fname);
        System.out.println("History of the file: " + f1.getPath());
        if (limit == -1) {
          cmd.file(f1.getPath(), true).execute(dump);
        } else {
          int[] r = new int[] { 0, f1.getRevisionCount() };
          if (fixRange(r, reverseOrder, limit) == 0) {
            System.out.println("No changes");
            continue;
          }
          cmd.range(r[0], r[1]).file(f1.getPath(), true).execute(dump);
        }
        dump.done();
      }
    }
//    cmd = null;
View Full Code Here

    final long end = System.nanoTime();
    System.out.printf("buildFileLog: %,d ms\n", (end-start)/1000);
  }

  private void buildFileLogOld() throws Exception {
    final HgDataFile fn = hgRepo.getFileNode("file1");
    final int[] fileChangesetRevisions = new int[fn.getRevisionCount()];
    fn.history(new HgChangelog.Inspector() {
      private int fileLocalRevisions = 0;
      private int[] parentRevisions = new int[2];
     
      public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
        try {
          fileChangesetRevisions[fileLocalRevisions] = revisionNumber;
          fn.parents(fileLocalRevisions, parentRevisions, null, null);
          boolean join = parentRevisions[0] != -1 && parentRevisions[1] != -1;
          if (join) {
            System.out.print("join[");
          }
          if (parentRevisions[0] != -1) {
View Full Code Here

    }
  }

  private void testReadWorkingCopy() throws Exception {
    for (String fname : cmdLineOpts.getList("")) {
      HgDataFile fn = hgRepo.getFileNode(fname);
      ByteArrayChannel sink = new ByteArrayChannel();
      fn.workingCopy(sink);
      System.out.printf("%s: read %d bytes of working copy", fname, sink.toArray().length);
    }
  }
View Full Code Here

   */
  private void testEffectiveFileLog() throws Exception {
    for (String fname : cmdLineOpts.getList("")) {
      System.out.println(fname);
      final long start = System.currentTimeMillis();
      HgDataFile fn = hgRepo.getFileNode(fname);
      if (fn.exists()) {
        fn.history(new HgChangelog.Inspector() {
          public void next(int revisionNumber, Nodeid nodeid, RawChangeset cset) {
            System.out.printf("%d:%s\n", revisionNumber, nodeid);
          }
        });
      }
View Full Code Here

//    }
//    System.out.println("10:" + (System.currentTimeMillis() - start));
  }
 
  private void inflaterLengthException() throws Exception {
    HgDataFile f1 = hgRepo.getFileNode("src/com/tmate/hgkit/console/Bundle.java");
    HgDataFile f2 = hgRepo.getFileNode("test-repos.jar");
    System.out.println(f1.isCopy());
    System.out.println(f2.isCopy());
    ByteArrayChannel bac = new ByteArrayChannel();
    f1.content(1, bac); // 0: 1151, 1: 1139
    System.out.println(bac.toArray().length);
    f2.content(0, bac = new ByteArrayChannel()); // 0: 14269
    System.out.println(bac.toArray().length);
  }
View Full Code Here

 
  private void catCompleteHistory() throws Exception {
    DigestHelper dh = new DigestHelper();
    for (String fname : cmdLineOpts.getList("")) {
      System.out.println(fname);
      HgDataFile fn = hgRepo.getFileNode(fname);
      if (fn.exists()) {
        int total = fn.getRevisionCount();
        System.out.printf("Total revisions: %d\n", total);
        for (int i = 0; i < total; i++) {
          ByteArrayChannel sink = new ByteArrayChannel();
          fn.content(i, sink);
          System.out.println("==========>");
          byte[] content = sink.toArray();
          System.out.println(new String(content));
          int[] parentRevisions = new int[2];
          byte[] parent1 = new byte[20];
          byte[] parent2 = new byte[20];
          fn.parents(i, parentRevisions, parent1, parent2);
          System.out.println(dh.sha1(parent1, parent2, content).asHexString());
        }
      } else {
        System.out.println(">>>Not found!");
      }
View Full Code Here

    }
  }


  private void testStatusInternals() throws HgException, HgRuntimeException {
    HgDataFile n = hgRepo.getFileNode(Path.create("design.txt"));
    for (String s : new String[] {"011dfd44417c72bd9e54cf89b82828f661b700ed", "e5529faa06d53e06a816e56d218115b42782f1ba", "c18e7111f1fc89a80a00f6a39d51288289a382fc"}) {
      // expected: 359, 2123, 3079
      byte[] b = s.getBytes();
      final Nodeid nid = Nodeid.fromAscii(b, 0, b.length);
      System.out.println(s + " : " + n.getLength(nid));
    }
  }
View Full Code Here

    RepoUtils.createFile(new File(repoLoc, "file1"), "hello\n");
    new ExecHelper(new OutputParser.Stub(), repoLoc).run("hg", "commit", "--addremove", "-m", "FIRST");
    //
    HgRepository hgRepo = new HgLookup().detect(repoLoc);
    CommitFacility cf = new CommitFacility(Internals.getInstance(hgRepo), 0);
    HgDataFile df = hgRepo.getFileNode("file1");
    cf.add(df, new ByteArrayDataSource("hello\nworld".getBytes()));
    Transaction tr = newTransaction(hgRepo);
    Nodeid secondRev = cf.commit("SECOND", tr);
    tr.commit();
    //
    List<HgChangeset> commits = new HgLogCommand(hgRepo).execute();
    errorCollector.assertEquals(2, commits.size());
    HgChangeset c1 = commits.get(0);
    HgChangeset c2 = commits.get(1);
    errorCollector.assertEquals("FIRST", c1.getComment());
    errorCollector.assertEquals("SECOND", c2.getComment());
    errorCollector.assertEquals(df.getPath(), c2.getAffectedFiles().get(0));
    errorCollector.assertEquals(c1.getNodeid(), c2.getFirstParentRevision());
    errorCollector.assertEquals(Nodeid.NULL, c2.getSecondParentRevision());
    errorCollector.assertEquals(secondRev, c2.getNodeid());
  }
View Full Code Here

//    Pair<Integer, Integer> manifestParents = getManifestParents();
    Pair<Integer, Integer> manifestParents = new Pair<Integer, Integer>(c1Manifest.revisionIndex(), c2Manifest.revisionIndex());
    TreeMap<Path, Nodeid> newManifestRevision = new TreeMap<Path, Nodeid>();
    HashMap<Path, Pair<Integer, Integer>> fileParents = new HashMap<Path, Pair<Integer,Integer>>();
    for (Path f : c1Manifest.files()) {
      HgDataFile df = repo.getRepo().getFileNode(f);
      Nodeid fileKnownRev1 = c1Manifest.nodeid(f), fileKnownRev2;
      final int fileRevIndex1 = df.getRevisionIndex(fileKnownRev1);
      final int fileRevIndex2;
      if ((fileKnownRev2 = c2Manifest.nodeid(f)) != null) {
        // merged files
        fileRevIndex2 = df.getRevisionIndex(fileKnownRev2);
      } else {
        fileRevIndex2 = NO_REVISION;
      }
       
      fileParents.put(f, new Pair<Integer, Integer>(fileRevIndex1, fileRevIndex2));
      newManifestRevision.put(f, fileKnownRev1);
    }
    //
    // Forget removed
    for (Path p : removals) {
      newManifestRevision.remove(p);
    }
    //
    saveCommitMessage(message);
    //
    // Register new/changed
    FNCacheFile.Mediator fncache = new FNCacheFile.Mediator(repo, transaction);
    ArrayList<Path> touchInDirstate = new ArrayList<Path>();
    for (Pair<HgDataFile, DataSource> e : files.values()) {
      HgDataFile df = e.first();
      DataSource bds = e.second();
      Pair<Integer, Integer> fp = fileParents.get(df.getPath());
      if (fp == null) {
        // NEW FILE, either just added or resurrected from p2
        Nodeid fileRevInP2;
        if ((fileRevInP2 = c2Manifest.nodeid(df.getPath())) != null) {
          fp = new Pair<Integer, Integer>(df.getRevisionIndex(fileRevInP2), NO_REVISION);
        } else {
          // brand new
          fp = new Pair<Integer, Integer>(NO_REVISION, NO_REVISION);
        }
      }
      // TODO if fp.first() != NO_REVISION and fp.second() != NO_REVISION check if one
      // revision is ancestor of another and use the latest as p1, then
      Nodeid fileRev = null;
      final boolean isNewFile = !df.exists();
      if (fp.first() != NO_REVISION && fp.second() == NO_REVISION && !isNewFile) {
        // compare file contents to see if anything has changed, and reuse old revision, if unchanged.
        // XXX ineffective, need better access to revision conten
        ByteArraySerializer bas = new ByteArraySerializer();
        bds.serialize(bas);
        final byte[] newContent = bas.toByteArray();
        // unless there's a way to reset DataSource, replace it with the content just read
        bds = new DataSerializer.ByteArrayDataSource(newContent);
        if (new ComparatorChannel(newContent).same(df, fp.first())) {
          fileRev = df.getRevision(fp.first());
        }
      }
      if (fileRev == null) {
        RevlogStream contentStream = repo.getImplAccess().getStream(df);
        RevlogStreamWriter fileWriter = new RevlogStreamWriter(repo, contentStream, transaction);
        fileRev = fileWriter.addRevision(bds, clogRevisionIndex, fp.first(), fp.second()).second();
        if (isNewFile) {
          // registerNew shall go after fileWriter.addRevision as it needs to know if data is inlined or not
          fncache.registerNew(df.getPath(), contentStream);
        }
      }
      newManifestRevision.put(df.getPath(), fileRev);
      touchInDirstate.add(df.getPath());
    }
    //
    final EncodingHelper encHelper = repo.buildFileNameEncodingHelper();
    //
    // Manifest
View Full Code Here

TOP

Related Classes of org.tmatesoft.hg.repo.HgDataFile$MetadataInspector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.