ReadingHDFSFilesAsSideEffect.java
上传用户:quxuerui
上传日期:2018-01-08
资源大小:41811k
文件大小:3k
源码类别:
网格计算
开发平台:
Java
- /**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- package org.apache.hadoop.vaidya.postexdiagnosis.tests;
- import org.apache.hadoop.vaidya.statistics.job.JobStatistics;
- import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.JobKeys;
- import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.KeyDataType;
- import org.apache.hadoop.vaidya.statistics.job.JobStatisticsInterface.ReduceTaskKeys;
- import org.apache.hadoop.vaidya.statistics.job.ReduceTaskStatistics;
- import org.apache.hadoop.vaidya.DiagnosticTest;
- import org.w3c.dom.Element;
- import java.util.Hashtable;
- import java.util.List;
- /**
- *
- */
- public class ReadingHDFSFilesAsSideEffect extends DiagnosticTest {
- private double _impact;
- private JobStatistics _job;
- /**
- *
- */
- public ReadingHDFSFilesAsSideEffect() {
- }
- /*
- * Evaluate the test
- */
- @Override
- public double evaluate(JobStatistics job) {
- /*
- * Set the this._job
- */
- this._job = job;
- /*
- * Calculate and return the impact
- *
- * Check if job level aggregate bytes read from HDFS are more than map input bytes
- * Typically they should be same unless maps and/or reducers are reading some data
- * from HDFS as a side effect
- *
- * If side effect HDFS bytes read are >= twice map input bytes impact is treated as
- * maximum.
- */
- this._impact = (job.getLongValue(JobKeys.HDFS_BYTES_READ) / job.getLongValue(JobKeys.MAP_INPUT_BYTES));
- if (this._impact >= 2.0) {
- this._impact = 1;
- }
- else {
- this._impact -= 1;
- }
- return this._impact;
- }
- /* (non-Javadoc)
- * @see org.apache.hadoop.contrib.utils.perfadvisor.diagnostic_rules.DiagnosticRule#getAdvice()
- */
- @Override
- public String getPrescription() {
- return
- "Map and/or Reduce tasks are reading application specific files from HDFS. Make sure the replication factorn" +
- "of these HDFS files is high enough to avoid the data reading bottleneck. Typically replication factorn" +
- "can be square root of map/reduce tasks capacity of the allocated cluster.";
- }
- /* (non-Javadoc)
- * @see org.apache.hadoop.contrib.utils.perfadvisor.diagnostic_rules.DiagnosticRule#getReferenceDetails()
- */
- @Override
- public String getReferenceDetails() {
- String ref = "* Total HDFS Bytes read: "+this._job.getLongValue(JobKeys.HDFS_BYTES_READ)+"n"+
- "* Total Map Input Bytes read: "+this._job.getLongValue(JobKeys.MAP_INPUT_BYTES)+"n"+
- "* Impact: "+truncate(this._impact);
- return ref;
- }
- }