Error: java.lang.NoClassDefFoundError: org/apache/hadoop/uti



我正在构建我的自定义输入格式化程序,但在运行我的程序(即(时出现异常

错误消息

Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/hadoop/util/StopWatch
  at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:381)
  at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:304)
  at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:321)
  at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:199)
  at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1290)
  at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1287)
  at java.security.AccessController.doPrivileged(Native Method)
  at javax.security.auth.Subject.doAs(Subject.java:415)
  at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
  at org.apache.hadoop.mapreduce.Job.submit(Job.java:1287)
  at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1308)
  at com.oodles.Hadoop.inputFormat.MyFile.main(MyFile.java:32)

MyFile

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class MyFile {
    public static void main(String[] args) throws IOException,
            ClassNotFoundException, InterruptedException {
        Configuration conf = new Configuration();
        Path outputPath = new Path("/home/oodles/output");
        outputPath.getFileSystem(conf).delete(outputPath);

        Job job = new Job(conf,"custom input formate");
        job.setJarByClass(MyFile.class);
        job.setJobName("CustomTest");
        job.setNumReduceTasks(0);
         job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);
        job.setInputFormatClass(CustomeInputFormat.class);
        FileInputFormat.addInputPath(job, new Path("/home/oodles/input"));
        FileOutputFormat.setOutputPath(job, outputPath);
        job.waitForCompletion(true);
    }
}

自定义输入格式

public class CustomeInputFormat extends FileInputFormat<MyKey, MyValue>{
    @Override
    public RecordReader<MyKey, MyValue> createRecordReader(InputSplit split,
            TaskAttemptContext context) throws IOException,
            InterruptedException {
        // TODO Auto-generated method stub
        return new MyRecordReader();
    }
}

MyMapper

public class MyMapper extends Mapper<MyKey,MyValue,Text,Text>{
    protected void map(MyKey key,MyValue value,Context context) throws IOException, InterruptedException{
         String sensor = key.getSensorType().toString();
         if(sensor.toLowerCase().equals("a")){
            context.write(value.getValue1(),value.getValue2());
         }
    }
}

MyKey

public class MyKey implements WritableComparable{
    private Text SensorType,timestamp,status;
    public MyKey(){
        this.SensorType = new Text();
        this.timestamp = new Text();
        this.status = new Text();
    }
    public MyKey(Text SensorType,Text timestamp,Text status){
        this.SensorType = SensorType;
        this.timestamp = timestamp;
        this.status = status;       
    }
    public void readFields(DataInput in) throws IOException {
        // TODO Auto-generated method stub
        SensorType.readFields(in);
        timestamp.readFields(in);
        status.readFields(in);
    }
    public void write(DataOutput out) throws IOException {
        // TODO Auto-generated method stub
        SensorType.write(out);
        timestamp.write(out);
        status.write(out);
    }
    public int compareTo(Object o) {
        // TODO Auto-generated method stub
        MyKey other = (MyKey)o;
        int cmp = SensorType.compareTo(other.SensorType);
        if(cmp != 0){
                return cmp;
        }
        cmp = timestamp.compareTo(other.timestamp);
        if(cmp != 0){
                return cmp;
        }
        return status.compareTo(other.status);
    }
    public Text getSensorType() {
        return SensorType;
    }
    public void setSensorType(Text sensorType) {
        SensorType = sensorType;
    }
    public Text getTimestamp() {
        return timestamp;
    }
    public void setTimestamp(Text timestamp) {
        this.timestamp = timestamp;
    }
    public Text getStatus() {
        return status;
    }
    public void setStatus(Text status) {
        this.status = status;
    }

}

MyValue

public class MyValue implements WritableComparable{
    private Text value1,value2;
    public MyValue(){
        this.value1 = new Text();
        this.value2 = new Text();
    }
    public MyValue(Text value1,Text value2){
        this.value1 = value1;
        this.value2 = value2;
    }
    public void readFields(DataInput in) throws IOException {
        // TODO Auto-generated method stub
        value1.readFields(in);
        value2.readFields(in);
    }
    public void write(DataOutput out) throws IOException {
        // TODO Auto-generated method stub
        value1.write(out);
        value2.write(out);
    }
    public int compareTo(Object o) {
        MyValue other = (MyValue)o;
        int cmp = value1.compareTo(other.value1);
        if(cmp != 0){
                return cmp;
        }
        return value2.compareTo(other.value2);
    }
    public Text getValue1() {
        return value1;
    }
    public void setValue1(Text value1) {
        this.value1 = value1;
    }
    public Text getValue2() {
        return value2;
    }
    public void setValue2(Text value2) {
        this.value2 = value2;
    }

}

MyRecordReader

public class MyRecordReader extends RecordReader<MyKey, MyValue>{

    private MyKey key;
    private MyValue value;
    private LineRecordReader reader = new LineRecordReader();
    @Override
    public void close() throws IOException {
        // TODO Auto-generated method stub
        reader.close();
    }
    @Override
    public MyKey getCurrentKey() throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        return key;
    }
    @Override
    public MyValue getCurrentValue() throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        return value;
    }
    @Override
    public float getProgress() throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        return reader.getProgress();
    }
    @Override
    public void initialize(InputSplit is, TaskAttemptContext tac)
            throws IOException, InterruptedException {
        reader.initialize(is, tac);
    }
    @Override
    public boolean nextKeyValue() throws IOException, InterruptedException {
        // TODO Auto-generated method stub
        boolean gotNextKeyValue = reader.nextKeyValue();
        if(gotNextKeyValue){
            if(key==null){
                key = new MyKey();
            }
            if(value == null){
                value = new MyValue();
            }
            Text line = reader.getCurrentValue();
            String[] tokens = line.toString().split("t");
            key.setSensorType(new Text(tokens[0]));
            key.setTimestamp(new Text(tokens[1]));
            key.setStatus(new Text(tokens[2]));
            value.setValue1(new Text(tokens[3]));
            value.setValue2(new Text(tokens[4]));
        }
        else {
            key = null;
            value = null;
        }
        return gotNextKeyValue;
    }
}

我使用的是hadoop客户端2.6.0和hadoop mapreduce客户端核心2.7.0。我想这是因为MyFile中的job.waitForCompletion(true);.行,但我不确定!

<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-core -->
<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-mapreduce-client-core</artifactId>
    <version>0.23.1</version>
</dependency>

我已经在maven文件中添加了上述dependency,然后它对我来说很好。

我解决了这个问题,实际上存在版本不合格,所以我更改了hadoop-client 2.6.0 to 2.7.0

最新更新