Package org.apache.hadoop.hbase

Examples of org.apache.hadoop.hbase.HLog


        " does not exist.";
        LOG.error(message);
        throw new FileNotFoundException(message);
      }

      this.log = new HLog(this.fs,
          new Path(this.fs.getHomeDirectory(),
              HConstants.HREGION_LOGDIR_NAME + "_" + System.currentTimeMillis()
          ),
          this.conf, null
      );
View Full Code Here


      }
    }
  }
 
  private void scanRootRegion(FileSystem fs, Path rootdir) throws IOException {
    HLog log = new HLog(fs, new Path(rootdir, HConstants.HREGION_LOGDIR_NAME),
        conf, null);

    try {
      // Open root region so we can scan it

      HRegion rootRegion = new HRegion(
          new Path(rootdir, HConstants.ROOT_TABLE_NAME.toString()), log, fs, conf,
          HRegionInfo.rootRegionInfo, null, null);

      try {
        HScannerInterface rootScanner = rootRegion.getScanner(
            HConstants.COL_REGIONINFO_ARRAY, HConstants.EMPTY_START_ROW,
            HConstants.LATEST_TIMESTAMP, null);

        try {
          HStoreKey key = new HStoreKey();
          SortedMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
          while (rootScanner.next(key, results)) {
            HRegionInfo info = Writables.getHRegionInfoOrNull(
                results.get(HConstants.COL_REGIONINFO));
            if (info == null) {
              LOG.warn("region info is null for row " + key.getRow() +
                  " in table " + HConstants.ROOT_TABLE_NAME);
              continue;
            }

            // First move the meta region to where it should be and rename
            // subdirectories as necessary

            migrateRegionDir(fs, rootdir, HConstants.META_TABLE_NAME,
                new Path(rootdir, OLD_PREFIX + info.getEncodedName()));

            // Now scan and process the meta table

            scanMetaRegion(fs, rootdir, log, info);
          }

        } finally {
          rootScanner.close();
        }

      } finally {
        rootRegion.close();
      }

    } finally {
      log.closeAndDelete();
    }
  }
View Full Code Here

      regions[i].getLog().closeAndDelete();
    }

    // Create a log that we can reuse when we need to open regions
   
    HLog log = new HLog(this.fs,
        new Path("/tmp", HConstants.HREGION_LOGDIR_NAME + "_" +
            System.currentTimeMillis()
        ),
        this.conf, null
    );
    try {
      /*
       * Merge Region 0 and Region 1
       */
      LOG.info("merging regions 0 and 1");
      Merge merger = new Merge(this.conf);
      ToolRunner.run(merger,
          new String[] {
          this.desc.getName().toString(),
          this.sourceRegions[0].getRegionName().toString(),
          this.sourceRegions[1].getRegionName().toString()
      }
      );
      HRegionInfo mergedInfo = merger.getMergedHRegionInfo();
   
      // Now verify that we can read all the rows from regions 0, 1
      // in the new merged region.
      HRegion merged =
        HRegion.openHRegion(mergedInfo, this.rootdir, log, this.conf);
     
      for (int i = 0; i < 2 ; i++) {
        for (int j = 0; j < rows[i].length; j++) {
          byte[] bytes = merged.get(rows[i][j], COLUMN_NAME);
          assertNotNull(rows[i][j].toString(), bytes);
          Text value = new Text(bytes);
          assertTrue(value.equals(rows[i][j]));
        }
      }
      merged.close();
      LOG.info("verified merge of regions 0 and 1");
      /*
       * Merge the result of merging regions 0 and 1 with region 2
       */
      LOG.info("merging regions 0+1 and 2");
      merger = new Merge(this.conf);
      ToolRunner.run(merger,
          new String[] {
            this.desc.getName().toString(),
            mergedInfo.getRegionName().toString(),
            this.sourceRegions[2].getRegionName().toString()
          }
      );
      mergedInfo = merger.getMergedHRegionInfo();

      // Now verify that we can read all the rows from regions 0, 1 and 2
      // in the new merged region.
     
      merged = HRegion.openHRegion(mergedInfo, this.rootdir, log, this.conf);

      for (int i = 0; i < 3 ; i++) {
        for (int j = 0; j < rows[i].length; j++) {
          byte[] bytes = merged.get(rows[i][j], COLUMN_NAME);
          assertNotNull(bytes);
          Text value = new Text(bytes);
          assertTrue(value.equals(rows[i][j]));
        }
      }
      merged.close();
      LOG.info("verified merge of regions 0+1 and 2");
      /*
       * Merge the result of merging regions 0, 1 and 2 with region 3
       */
      LOG.info("merging regions 0+1+2 and 3");
      merger = new Merge(this.conf);
      ToolRunner.run(merger,
          new String[] {
            this.desc.getName().toString(),
            mergedInfo.getRegionName().toString(),
            this.sourceRegions[3].getRegionName().toString()
          }
      );
      mergedInfo = merger.getMergedHRegionInfo();
     
      // Now verify that we can read all the rows from regions 0, 1, 2 and 3
      // in the new merged region.
     
      merged = HRegion.openHRegion(mergedInfo, this.rootdir, log, this.conf);
     
      for (int i = 0; i < 4 ; i++) {
        for (int j = 0; j < rows[i].length; j++) {
          byte[] bytes = merged.get(rows[i][j], COLUMN_NAME);
          assertNotNull(bytes);
          Text value = new Text(bytes);
          assertTrue(value.equals(rows[i][j]));
        }
      }
      merged.close();
      LOG.info("verified merge of regions 0+1+2 and 3");
      /*
       * Merge the result of merging regions 0, 1, 2 and 3 with region 4
       */
      LOG.info("merging regions 0+1+2+3 and 4");
      merger = new Merge(this.conf);
      ToolRunner.run(merger,
          new String[] {
            this.desc.getName().toString(),
            mergedInfo.getRegionName().toString(),
            this.sourceRegions[4].getRegionName().toString()
          }
      );
      mergedInfo = merger.getMergedHRegionInfo();
     
      // Now verify that we can read all the rows from the new merged region.

      merged = HRegion.openHRegion(mergedInfo, this.rootdir, log, this.conf);
     
      for (int i = 0; i < rows.length ; i++) {
        for (int j = 0; j < rows[i].length; j++) {
          byte[] bytes = merged.get(rows[i][j], COLUMN_NAME);
          assertNotNull(bytes);
          Text value = new Text(bytes);
          assertTrue(value.equals(rows[i][j]));
        }
      }
      merged.close();
      LOG.info("verified merge of regions 0+1+2+3 and 4");
     
    } finally {
      log.closeAndDelete();
    }
  }
View Full Code Here

        " does not exist.";
        LOG.error(message);
        throw new FileNotFoundException(message);
      }

      this.log = new HLog(this.fs,
          new Path(this.fs.getHomeDirectory(),
              HConstants.HREGION_LOGDIR_NAME + "_" + System.currentTimeMillis()
          ),
          this.conf, null
      );
View Full Code Here

    if (info2 == null) {
      throw new IOException("Cound not find " + region2 + " in " +
          meta2.getRegionName());
    }
    HRegion merged = null;
    HLog log = utils.getLog();
    HRegion region1 =
      HRegion.openHRegion(info1, this.rootdir, log, this.conf);
    try {
      HRegion region2 =
        HRegion.openHRegion(info2, this.rootdir, log, this.conf);
View Full Code Here

        " does not exist.";
        LOG.error(message);
        throw new FileNotFoundException(message);
      }

      this.log = new HLog(this.fs,
          new Path(this.fs.getHomeDirectory(),
              HConstants.HREGION_LOGDIR_NAME + "_" + System.currentTimeMillis()
          ),
          this.conf, null
      );
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hbase.HLog

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.