Package org.tmatesoft.hg.repo

Examples of org.tmatesoft.hg.repo.HgRepositoryLock


  public void execute(Mediator mediator) throws HgCallbackTargetException, HgRepositoryLockException, HgIOException, HgLibraryFailureException, CancelledException {
    if (firstCset == BAD_REVISION || secondCset == BAD_REVISION || ancestorCset == BAD_REVISION) {
      throw new IllegalArgumentException("Merge heads and their ancestors are not initialized");
    }
    final HgRepositoryLock wdLock = repo.getWorkingDirLock();
    wdLock.acquire();
    try {
      Pool<Nodeid> cacheRevs = new Pool<Nodeid>();
      Pool<Path> cacheFiles = new Pool<Path>();

      Internals implRepo = Internals.getInstance(repo);
      final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
      dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource(repo.getSessionContext().getPathFactory(), cacheFiles)));
      final HgChangelog clog = repo.getChangelog();
      final Nodeid headCset1 = clog.getRevision(firstCset);
      dirstateBuilder.parents(headCset1, clog.getRevision(secondCset));
      //
      MergeStateBuilder mergeStateBuilder = new MergeStateBuilder(implRepo);
      mergeStateBuilder.prepare(headCset1);

      ManifestRevision m1, m2, ma;
      m1 = new ManifestRevision(cacheRevs, cacheFiles).init(repo, firstCset);
      m2 = new ManifestRevision(cacheRevs, cacheFiles).init(repo, secondCset);
      ma = new ManifestRevision(cacheRevs, cacheFiles).init(repo, ancestorCset);
      Transaction transaction = implRepo.getTransactionFactory().create(repo);
      ResolverImpl resolver = new ResolverImpl(implRepo, dirstateBuilder, mergeStateBuilder);
      try {
        for (Path f : m1.files()) {
          Nodeid fileRevBase, fileRevA, fileRevB;
          if (m2.contains(f)) {
            fileRevA = m1.nodeid(f);
            fileRevB = m2.nodeid(f);
            fileRevBase = ma.contains(f) ? ma.nodeid(f) : null;
            if (fileRevA.equals(fileRevB)) {
              HgFileRevision fr = new HgFileRevision(repo, fileRevA, m1.flags(f), f);
              resolver.presentState(f, fr, fr, null);
              mediator.same(fr, resolver);
            } else if (fileRevBase == fileRevA) {
              assert fileRevBase != null;
              HgFileRevision frBase = new HgFileRevision(repo, fileRevBase, ma.flags(f), f);
              HgFileRevision frSecond= new HgFileRevision(repo, fileRevB, m2.flags(f), f);
              resolver.presentState(f, frBase, frSecond, frBase);
              mediator.fastForwardB(frBase, frSecond, resolver);
            } else if (fileRevBase == fileRevB) {
              assert fileRevBase != null;
              HgFileRevision frBase = new HgFileRevision(repo, fileRevBase, ma.flags(f), f);
              HgFileRevision frFirst = new HgFileRevision(repo, fileRevA, m1.flags(f), f);
              resolver.presentState(f, frFirst, frBase, frBase);
              mediator.fastForwardA(frBase, frFirst, resolver);
            } else {
              HgFileRevision frBase = fileRevBase == null ? null : new HgFileRevision(repo, fileRevBase, ma.flags(f), f);
              HgFileRevision frFirst = new HgFileRevision(repo, fileRevA, m1.flags(f), f);
              HgFileRevision frSecond= new HgFileRevision(repo, fileRevB, m2.flags(f), f);
              resolver.presentState(f, frFirst, frSecond, frBase);
              mediator.resolve(frBase, frFirst, frSecond, resolver);
            }
          } else {
            // m2 doesn't contain the file, either new in m1, or deleted in m2
            HgFileRevision frFirst = new HgFileRevision(repo, m1.nodeid(f), m1.flags(f), f);
            if (ma.contains(f)) {
              // deleted in m2
              HgFileRevision frBase = new HgFileRevision(repo, ma.nodeid(f), ma.flags(f), f);
              resolver.presentState(f, frFirst, null, frBase);
              mediator.onlyA(frBase, frFirst, resolver);
            } else {
              // new in m1
              resolver.presentState(f, frFirst, null, null);
              mediator.newInA(frFirst, resolver);
            }
          }
          resolver.apply();
        } // for m1 files
        for (Path f : m2.files()) {
          if (m1.contains(f)) {
            continue;
          }
          HgFileRevision frSecond= new HgFileRevision(repo, m2.nodeid(f), m2.flags(f), f);
          // file in m2 is either new or deleted in m1
          if (ma.contains(f)) {
            // deleted in m1
            HgFileRevision frBase = new HgFileRevision(repo, ma.nodeid(f), ma.flags(f), f);
            resolver.presentState(f, null, frSecond, frBase);
            mediator.onlyB(frBase, frSecond, resolver);
          } else {
            // new in m2
            resolver.presentState(f, null, frSecond, null);
            mediator.newInB(frSecond, resolver);
          }
          resolver.apply();
        }
        resolver.serializeChanged(transaction);
        transaction.commit();
      } catch (HgRuntimeException ex) {
        transaction.rollback();
        mergeStateBuilder.abandon();
        throw ex;
      } catch (HgIOException ex) {
        transaction.rollback();
        mergeStateBuilder.abandon();
        throw ex;
      }
    } catch (HgRuntimeException ex) {
      throw new HgLibraryFailureException(ex);
    } finally {
      wdLock.release();
    }
  }
View Full Code Here


    File hgrc = new File(repoLoc, ".hg/hgrc");
    RepoUtils.createFile(hgrc, "[ui]\ntimeout=0\n"); // or 1
    final OutputParser.Stub p = new OutputParser.Stub();
    ExecHelper eh = new ExecHelper(p, repoLoc);
    HgRepository hgRepo = new HgLookup().detect(repoLoc);
    final HgRepositoryLock wdLock = hgRepo.getWorkingDirLock();
    try {
      wdLock.acquire();
      eh.run("hg", "tag", "tag-aaa");
      Assert.assertNotSame(0 /*returns 0 on success*/, eh.getExitValue());
      Assert.assertTrue(p.result().toString().contains("abort"));
    } finally {
      wdLock.release();
    }
  }
View Full Code Here

  private static void readWithHg4J(final HgLookup hgLookup, final File repoLoc) {
    try {
      System.out.print("(");
      final long start = System.nanoTime();
      HgRepository hgRepo = hgLookup.detect(repoLoc);
      final HgRepositoryLock wcLock = hgRepo.getWorkingDirLock();
      final HgRepositoryLock storeLock = hgRepo.getStoreLock();
      wcLock.acquire();
      System.out.print(".");
      storeLock.acquire();
      System.out.print(".");
      try {
        new HgStatusCommand(hgRepo).execute(new TestStatus.StatusCollector());
        System.out.printf("%d ms)\n", (System.nanoTime() - start) / 1000000);
      } finally {
        storeLock.release();
        wcLock.release();
      }
    } catch (RuntimeException ex) {
      throw ex;
    } catch (Exception ex) {
View Full Code Here

   * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
   * @throws HgRepositoryLockException if failed to lock the repo for modifications
   * @throws CancelledException if execution of the command was cancelled
   */
  public void execute() throws HgException, HgRepositoryLockException, CancelledException {
    final HgRepositoryLock wdLock = repo.getWorkingDirLock();
    wdLock.acquire();
    try {
      final ProgressSupport progress = getProgressSupport(null);
      final CancelSupport cancellation = getCancelSupport(null, true);
      cancellation.checkCancelled();
      progress.start(2 + toAdd.size() + toRemove.size());
      Internals implRepo = Internals.getInstance(repo);
      final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
      dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource()));
      progress.worked(1);
      cancellation.checkCancelled();
      for (Path p : toAdd) {
        dirstateBuilder.recordAdded(p, Flags.RegularFile, -1);
        progress.worked(1);
        cancellation.checkCancelled();
      }
      for (Path p : toRemove) {
        dirstateBuilder.recordRemoved(p);
        progress.worked(1);
        cancellation.checkCancelled();
      }
      Transaction.Factory trFactory = implRepo.getTransactionFactory();
      Transaction tr = trFactory.create(repo);
      try {
        dirstateBuilder.serialize(tr);
        tr.commit();
      } catch (RuntimeException ex) {
        tr.rollback();
        throw ex;
      } catch (HgException ex) {
        tr.rollback();
        throw ex;
      }
      progress.worked(1);
      progress.done();
    } catch (HgRuntimeException ex) {
      throw new HgLibraryFailureException(ex);
    } finally {
      wdLock.release();
    }
  }
View Full Code Here

   * @throws HgIOException to indicate troubles updating files in working copy
   * @throws HgException subclass thereof to indicate specific issue with the command arguments or repository state
   * @throws CancelledException if execution of the command was cancelled
   */
  public void execute() throws HgException, CancelledException {
    final HgRepositoryLock wdLock = repo.getWorkingDirLock();
    wdLock.acquire();
    try {
      final ProgressSupport progress = getProgressSupport(null);
      final CancelSupport cancellation = getCancelSupport(null, true);
      cancellation.checkCancelled();
      progress.start(files.size() + 2);
      final int csetRevision;
      if (changesetToCheckout.get() == HgRepository.WORKING_COPY) {
        csetRevision = repo.getChangelog().getRevisionIndex(repo.getWorkingCopyParents().first());
      } else {
        csetRevision = changesetToCheckout.get();
      }
      Internals implRepo = Internals.getInstance(repo);
      final DirstateBuilder dirstateBuilder = new DirstateBuilder(implRepo);
      dirstateBuilder.fillFrom(new DirstateReader(implRepo, new Path.SimpleSource()));
      progress.worked(1);
      cancellation.checkCancelled();
     
      final HgCheckoutCommand.CheckoutWorker worker = new HgCheckoutCommand.CheckoutWorker(implRepo);
     
      HgManifest.Inspector insp = new HgManifest.Inspector() {
       
        public boolean next(Nodeid nid, Path fname, Flags flags) {
          if (worker.next(nid, fname, flags)) {
            dirstateBuilder.recordUncertain(fname);
            return true;
          }
          return false;
        }
       
        public boolean end(int manifestRevision) {
          return false;
        }
       
        public boolean begin(int mainfestRevision, Nodeid nid, int changelogRevision) {
          return true;
        }
      };

      for (Path file : files) {
        File f = new File(repo.getWorkingDir(), file.toString());
        if (f.isFile()) {
          if (keepOriginal) {
            File copy = new File(f.getParentFile(), f.getName() + ".orig");
            if (copy.exists()) {
              copy.delete();
            }
            f.renameTo(copy);
          } else {
            f.delete();
          }
        }
        repo.getManifest().walkFileRevisions(file, insp, csetRevision);
        worker.checkFailed();
        progress.worked(1);
        cancellation.checkCancelled();
      }
      Transaction.Factory trFactory = implRepo.getTransactionFactory();
      Transaction tr = trFactory.create(repo);
      try {
        // TODO same code in HgAddRemoveCommand and similar in HgCommitCommand
        dirstateBuilder.serialize(tr);
        tr.commit();
      } catch (RuntimeException ex) {
        tr.rollback();
        throw ex;
      } catch (HgException ex) {
        tr.rollback();
        throw ex;
      }
      progress.worked(1);
      progress.done();
    } catch (HgRuntimeException ex) {
      throw new HgLibraryFailureException(ex);
    } finally {
      wdLock.release();
    }
  }
View Full Code Here

TOP

Related Classes of org.tmatesoft.hg.repo.HgRepositoryLock

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.