微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

org.apache.hadoop.mapred.DefaultJobHistoryParser的实例源码

项目:hadoop-2.6.0-cdh5.4.3    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,JobInfo jobInfo) throws Exception {

  /*
   * Convert the input strings to URL
   */
  URL jobConfFileUrl = new URL(this._jobConfFile);
  URL jobHistoryFileUrl = new URL (this._jobHistoryFile);

  /*
   * Read the Job Configuration from the jobConfFile url
   */  
  jobConf.addResource(jobConfFileUrl);

  /* 
   * Read JobHistoryFile and build job counters to evaluate diagnostic rules
   */
  if (jobHistoryFileUrl.getProtocol().equals("hdfs")) {
    DefaultJobHistoryParser.parseJobTasks (jobHistoryFileUrl.getPath(),jobInfo,FileSystem.get(jobConf));
  } else if (jobHistoryFileUrl.getProtocol().equals("file")) {
    DefaultJobHistoryParser.parseJobTasks (jobHistoryFileUrl.getPath(),FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hadoop-EAR    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hiped2    文件JobHistoryHelper.java   
public static JobHistory.JobInfo getJobInfoFromHdfsOutputDir(String outputDir,Configuration conf)
    throws IOException {
  Path output = new Path(outputDir);
  Path historyLogDir = new Path(output,"_logs/history");
    FileSystem fs = output.getFileSystem(conf);
    if (!fs.exists(output)) {
      throw new IOException("History directory " + historyLogDir.toString()
          + " does not exist");
    }
    Path[] jobFiles = FileUtil.stat2Paths(fs.listStatus(historyLogDir,jobLogFileFilter));
    if (jobFiles.length == 0) {
      throw new IOException("Not a valid history directory "
          + historyLogDir.toString());
    }
    String[] jobDetails =
        JobHistory.JobInfo.decodeJobHistoryFileName(jobFiles[0].getName()).
            split("_");
    String jobId = jobDetails[2] + "_" + jobDetails[3] + "_" + jobDetails[4];
    JobHistory.JobInfo job = new JobHistory.JobInfo(jobId);
    DefaultJobHistoryParser.parseJobTasks(jobFiles[0].toString(),job,fs);
  return job;
}
项目:hadoop-on-lustre    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:RDFS    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hadoop-0.20    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hanoi-hadoop-2.0.0-cdh    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hortonworks-extension    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hadoop-gpu    文件PostExPerformanceDiagnoser.java   
/**
 * read and populate job statistics information.
 */
private void readJobinformation(JobConf jobConf,FileSystem.getLocal(jobConf));
  } else {
    throw new Exception("Malformed URL. Protocol: "+jobHistoryFileUrl.getProtocol());
  }
}
项目:hiped2    文件JobHistoryHelper.java   
public static JobHistory.JobInfo getJobInfoFromLocalFile(String outputFile,Configuration conf)
    throws IOException {
  FileSystem fs = FileSystem.getLocal(conf);

  Path outputFilePath = new Path(outputFile);

  String[] jobDetails =
      JobHistory.JobInfo.decodeJobHistoryFileName(outputFilePath.getName()).
          split("_");
  String jobId = jobDetails[2] + "_" + jobDetails[3] + "_" + jobDetails[4];
  JobHistory.JobInfo job = new JobHistory.JobInfo(jobId);
  DefaultJobHistoryParser.parseJobTasks(outputFile,fs);
  return job;
}
项目:spork-streaming    文件HadoopJobHistoryLoader.java   
@Override
public boolean nextkeyvalue() throws IOException,InterruptedException {
    if (location != null) {
        LOG.info("load: " + location);  
        Path full = new Path(location);  
        String[] jobDetails = 
            JobInfo.decodeJobHistoryFileName(full.getName()).split("_");
        String jobId = jobDetails[2] + "_" + jobDetails[3] + "_"
                + jobDetails[4];
        JobHistory.JobInfo job = new JobHistory.JobInfo(jobId); 

        value = new MRJobInfo();

        FileSystem fs = full.getFileSystem(conf);
        FileStatus fstat = fs.getFileStatus(full);

        LOG.info("file size: " + fstat.getLen());
        DefaultJobHistoryParser.parseJobTasks(location,full.getFileSystem(conf)); 
        LOG.info("job history parsed sucessfully");
        HadoopJobHistoryLoader.parseJobHistory(conf,value);
        LOG.info("get parsed job history");

        // parse Hadoop job xml file
        Path parent = full.getParent();
        String jobXml = jobDetails[0] + "_" + jobDetails[1] + "_"
                + jobDetails[2] + "_" + jobDetails[3] + "_"
                + jobDetails[4] + "_conf.xml";
        Path p = new Path(parent,jobXml);  

        FSDataInputStream fileIn = fs.open(p);
        Map<String,String> val = HadoopJobHistoryLoader
                .parseJobXML(fileIn);
        for (String key : val.keySet()) {
            value.job.put(key,val.get(key));
        }

        location = null;
        return true;
    }          
    value = null;
    return false;
}
项目:spork    文件HadoopJobHistoryLoader.java   
@Override
public boolean nextkeyvalue() throws IOException,value);
        LOG.info("get parsed job history");

        // parse Hadoop job xml file
        Path parent = full.getParent();
        String jobXml = jobDetails[0] + "_" + jobDetails[1] + "_" + jobDetails[2] + "_conf.xml";
        Path p = new Path(parent,val.get(key));
        }

        location = null;
        return true;
    }          
    value = null;
    return false;
}
项目:sedge    文件HadoopJobHistoryLoader.java   
@Override
public boolean nextkeyvalue() throws IOException,val.get(key));
        }

        location = null;
        return true;
    }          
    value = null;
    return false;
}

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。