Hadoop程序中的类型不匹配错误


import java.io.IOException;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class CommonFriends {
public static class TokenizerMapper
extends Mapper<Object, Text, Text, IntWritable>{
private IntWritable friend = new IntWritable();
private Text friends = new Text();
public void map(Object key, Text value, Context context )     throws IOException, InterruptedException {
StringTokenizer itr = new     StringTokenizer(value.toString(),"n");
while (itr.hasMoreTokens()) {
String[] line = itr.nextToken().split(" ");
if(line.length > 2 ){
int person = Integer.parseInt(line[0]);
for(int i=1; i<line.length;i++){
int ifriend = Integer.parseInt(line[i]);
friends.set((person < ifriend ? person+"-"+ifriend : ifriend+"-"+person));
for(int j=1; j< line.length; j++ ){
if( i != j ){
friend.set(Integer.parseInt(line[j]));
context.write(friends, friend);
}
}
}
}
}
}
}
public static class IntSumReducer extends Reducer<Text,IntWritable,Text,Text> {
private Text result = new Text();
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
HashSet<IntWritable> duplicates = new HashSet();
ArrayList<Integer> tmp = new ArrayList();
for (IntWritable val : values) {
if(duplicates.contains(val))
tmp.add(val.get());
else
duplicates.add(val);
}
result.set(tmp.toString());
context.write(key, result);
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "Common Friends");
job.setJarByClass(CommonFriends.class);
job.setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

错误:java.io.IOException:错误的值类:class org.apache.hadoop.io.Text is not class org.apache.hadoop.io.IntWritable at org.apache.hadoop.mapred.IFile$Writer.append(IFile.java:194) at org.apache.hadoop.mapred.Task$CombineOutputCollector.collect(Task.java:1350) at org.apache.hadoop.mapred.Task$NewCombinerRunner$OutputConverter.write(Task.java:1667) at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89) at org.apache.hadoop.mapreduce.lib.reduce.WrappedReducer$Context.write(WrappedReducer.java:105) at CommonFriends$IntSumReducer.reduce(CommonFriends.java:51) at CommonFriends$IntSumReducer.reduce(CommonFriends.java:38) at org.apache.hadoop.mapreduce.Reducer.run(Reducer.java:171) at org.apache.hadoop.mapred.Task$NewCombinerRunner.combine(Task.java:1688) at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.sortAndSpill(MapTask.java:1637) at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.flush(MapTask.java:1489) at org.apache.hadoop.mapred.MapTask$NewOutputCollector.close(MapTask.java:723) at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:793) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)

这是我的代码,错误消息如下。 知道吗?? 我认为映射器和化简器的输出类配置中的问题 输入文件是文件中的数字列表。 如果需要,将提供更多细节。 程序在朋友之间找到共同的朋友

删除代码中的job.setCombinerClass(IntSumReducer.class);可以解决此问题

刚刚查看了您的代码,似乎您正在使用化简器代码作为组合器代码。

您需要检查的一件事。

您的组合器代码将以<Text, IntWritable>的形式进行输入,组合器的输出将以<Text, Text>格式

。然后,对Reducer的输入将采用< Text, Text>格式,但您已将Reducer的输入指定为< Text, IntWritable >,因此它会引发错误。

可以做两件事:-

1)您可以考虑更改减速器的输出类型。

2)您可以考虑编写单独的组合器代码。

相关内容

  • 没有找到相关文章

最新更新