import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class WordCountMap extends Mapper<LongWritable,Text, Text, IntWritable> {
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//读取每行文本
String line =value.toString();
//splite拆分每行
String[] words =line.split("/t");
//取出每个单词
for(String word : words){
//将单词转变为Text类型
Text wordText = new Text(word);
//将1转变为IntWritable
IntWritable outValue = new IntWritable(1);
//写出单词跟对应1
context.write(wordText,outValue);
}
}
}