Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FileContext$Util


    String jobhistoryDir = JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(getConfig());
    FSDataInputStream in = null;
    Path historyFile = null;
    Path histDirPath = FileContext.getFileContext(getConfig()).makeQualified(
        new Path(jobhistoryDir));
    FileContext fc = FileContext.getFileContext(histDirPath.toUri(),
        getConfig());
    //read the previous history file
    historyFile = fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(
        histDirPath, jobName, (applicationAttemptId.getAttemptId() - 1)))
    LOG.info("History file is at " + historyFile);
    in = fc.open(historyFile);
    JobHistoryParser parser = new JobHistoryParser(in);
    jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file " + historyFile +
View Full Code Here


        .getDoneFileName(jobIndexInfo);

    Path historyFilePath = new Path(jobhistoryDir, jobhistoryFileName);
    FSDataInputStream in = null;
    LOG.info("JobHistoryFile is: " + historyFilePath);
    FileContext fc = null;
    try {
      fc = FileContext.getFileContext(conf);
      in = fc.open(fc.makeQualified(historyFilePath));
    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }
View Full Code Here

    String jobhistoryFileName = FileNameIndexUtils
        .getDoneFileName(jobIndexInfo);

    Path historyFilePath = new Path(jobhistoryDir, jobhistoryFileName);
    FSDataInputStream in = null;
    FileContext fc = null;
    try {
      fc = FileContext.getFileContext(conf);
      in = fc.open(fc.makeQualified(historyFilePath));
    } catch (IOException ioe) {
      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }
View Full Code Here

  public ConfInfo(Job job, Configuration conf) throws IOException {

    Path confPath = job.getConfFile();
    this.property = new ArrayList<ConfEntryInfo>();
    // Read in the configuration file and put it in a key/value table.
    FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
    Configuration jobConf = new Configuration(false);
    jobConf.addResource(fc.open(confPath));
    this.path = confPath.toString();
    for (Map.Entry<String, String> entry : jobConf) {
      this.property.add(new ConfEntryInfo(entry.getKey(), entry.getValue()));
    }
View Full Code Here

    delService.init(null);
    delService.start();

    AbstractFileSystem spylfs =
      spy(FileContext.getLocalFSFileContext().getDefaultFileSystem());
    FileContext lfs = FileContext.getFileContext(spylfs, conf);
    doNothing().when(spylfs).mkdir(
        isA(Path.class), isA(FsPermission.class), anyBoolean());

    List<Path> localDirs = new ArrayList<Path>();
    String[] sDirs = new String[4];
    for (int i = 0; i < 4; ++i) {
      localDirs.add(lfs.makeQualified(new Path(basedir, i + "")));
      sDirs[i] = localDirs.get(i).toString();
    }
    conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs);
    LocalDirsHandlerService diskhandler = new LocalDirsHandlerService();
    diskhandler.init(conf);
View Full Code Here

  @SuppressWarnings("unchecked") // mocked generics
  public void testResourceRelease() throws Exception {
    Configuration conf = new YarnConfiguration();
    AbstractFileSystem spylfs =
      spy(FileContext.getLocalFSFileContext().getDefaultFileSystem());
    final FileContext lfs = FileContext.getFileContext(spylfs, conf);
    doNothing().when(spylfs).mkdir(
        isA(Path.class), isA(FsPermission.class), anyBoolean());

    List<Path> localDirs = new ArrayList<Path>();
    String[] sDirs = new String[4];
    for (int i = 0; i < 4; ++i) {
      localDirs.add(lfs.makeQualified(new Path(basedir, i + "")));
      sDirs[i] = localDirs.get(i).toString();
    }
    conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs);

    Server ignore = mock(Server.class);
View Full Code Here

  @SuppressWarnings("unchecked") // mocked generics
  public void testLocalizationHeartbeat() throws Exception {
    Configuration conf = new YarnConfiguration();
    AbstractFileSystem spylfs =
      spy(FileContext.getLocalFSFileContext().getDefaultFileSystem());
    final FileContext lfs = FileContext.getFileContext(spylfs, conf);
    doNothing().when(spylfs).mkdir(
        isA(Path.class), isA(FsPermission.class), anyBoolean());

    List<Path> localDirs = new ArrayList<Path>();
    String[] sDirs = new String[4];
    for (int i = 0; i < 4; ++i) {
      localDirs.add(lfs.makeQualified(new Path(basedir, i + "")));
      sDirs[i] = localDirs.get(i).toString();
    }
    conf.setStrings(YarnConfiguration.NM_LOCAL_DIRS, sDirs);

    Server ignore = mock(Server.class);
View Full Code Here

    private void writeCredentials(Path nmPrivateCTokensPath)
        throws IOException {
      DataOutputStream tokenOut = null;
      try {
        Credentials credentials = context.getCredentials();
        FileContext lfs = getLocalFileContext(getConfig());
        tokenOut =
            lfs.create(nmPrivateCTokensPath, EnumSet.of(CREATE, OVERWRITE));
        LOG.info("Writing credentials to the nmPrivate file "
            + nmPrivateCTokensPath.toString() + ". Credentials list: ");
        if (LOG.isDebugEnabled()) {
          for (Token<? extends TokenIdentifier> tk : credentials
              .getAllTokens()) {
View Full Code Here

    AbstractFileSystem spylfs =
      spy(FileContext.getLocalFSFileContext().getDefaultFileSystem());
    // don't actually create dirs
    doNothing().when(spylfs).mkdir(
        isA(Path.class), isA(FsPermission.class), anyBoolean());
    FileContext lfs = FileContext.getFileContext(spylfs, conf);
    final String user = "yak";
    final String appId = "app_RM_0";
    final String cId = "container_0";
    final InetSocketAddress nmAddr = new InetSocketAddress("foobar", 4344);
    final List<Path> localDirs = new ArrayList<Path>();
    for (int i = 0; i < 4; ++i) {
      localDirs.add(lfs.makeQualified(new Path(basedir, i + "")));
    }
    RecordFactory mockRF = getMockLocalizerRecordFactory();
    ContainerLocalizer concreteLoc = new ContainerLocalizer(lfs, user,
        appId, cId, localDirs, mockRF);
    ContainerLocalizer localizer = spy(concreteLoc);

    // return credential stream instead of opening local file
    final Random r = new Random();
    long seed = r.nextLong();
    r.setSeed(seed);
    System.out.println("SEED: " + seed);
    DataInputBuffer appTokens = createFakeCredentials(r, 10);
    Path tokenPath =
      lfs.makeQualified(new Path(
            String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT, cId)));
    doReturn(new FSDataInputStream(new FakeFSDataInputStream(appTokens))
        ).when(spylfs).open(tokenPath);

    // mock heartbeat responses from NM
View Full Code Here

    private final TFile.Reader.Scanner scanner;
    private final TFile.Reader reader;

    public LogReader(Configuration conf, Path remoteAppLogFile)
        throws IOException {
      FileContext fileContext = FileContext.getFileContext(conf);
      this.fsDataIStream = fileContext.open(remoteAppLogFile);
      reader =
          new TFile.Reader(this.fsDataIStream, fileContext.getFileStatus(
              remoteAppLogFile).getLen(), conf);
      this.scanner = reader.createScanner();
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.fs.FileContext$Util

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.