package com.sky.mr.wordcount;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.junit.Test;
import java.io.IOException;
public class WordcountMapper extends Mapper<LongWritable,Text, Text, IntWritable> {
//由于每讀一行文本數(shù)據(jù),就要調(diào)用一次map方法,為了避免多次創(chuàng)建對(duì)象,浪費(fèi)內(nèi)存資源,將Text,IntWritable對(duì)象創(chuàng)建在
//map方法之外
Text k = new Text();
IntWritable v = new IntWritable(1);
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//獲取每一行的文本內(nèi)容
String line = value.toString();
//按空格分割
String[] words = line.split(" ");
//轉(zhuǎn)換數(shù)據(jù)格式,輸出
for ( String word: words) {
k.set(word);
context.write(k, v);
}
}
}
package com.sky.mr.wordcount;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class WordcountReducer extends Reducer<Text, IntWritable, Text,IntWritable> {
IntWritable v = new IntWritable();
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
//求每組相同key的總個(gè)數(shù)
int sum = 0;
for ( IntWritable count:values) {
sum += count.get();
}
//輸出
v.set(sum);
context.write(key, v);
}
}
package com.sky.mr.wordcount;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class WordcountDriver {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
//1、獲取配置信息以及job對(duì)象
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
//2、設(shè)置jar包路徑
job.setJarByClass(WordcountDriver.class);
//3、關(guān)聯(lián)自定義mapper和reducer類(lèi)
job.setMapperClass(WordcountMapper.class);
job.setReducerClass(WordcountReducer.class);
//4、設(shè)置Map輸出key和value類(lèi)型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
//5、設(shè)置最終結(jié)果key,value類(lèi)型
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
//6、設(shè)置文件輸入輸出路徑
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job,new Path(args[1]));
//7、將封裝了MapReduce程序運(yùn)行參數(shù)的job對(duì)象,提交到Y(jié)arn集群
boolean result = job.waitForCompletion(true);
System.exit(result?0:1);
}
}
import org apache hadoop io
import org apache hadoop io
import org apache hadoop
import java io IOException

創(chuàng)新互聯(lián)致力于互聯(lián)網(wǎng)網(wǎng)站建設(shè)與網(wǎng)站營(yíng)銷(xiāo),提供成都做網(wǎng)站、成都網(wǎng)站設(shè)計(jì)、網(wǎng)站開(kāi)發(fā)、seo優(yōu)化、網(wǎng)站排名、互聯(lián)網(wǎng)營(yíng)銷(xiāo)、重慶小程序開(kāi)發(fā)、公眾號(hào)商城、等建站開(kāi)發(fā),創(chuàng)新互聯(lián)網(wǎng)站建設(shè)策劃專(zhuān)家,為不同類(lèi)型的客戶(hù)提供良好的互聯(lián)網(wǎng)應(yīng)用定制解決方案,幫助客戶(hù)在新的全球化互聯(lián)網(wǎng)環(huán)境中保持優(yōu)勢(shì)。
IOException 1
apache 3
hadoop 3
import 4
io 3
java 1
org 3
網(wǎng)頁(yè)標(biāo)題:MapReduce單詞統(tǒng)計(jì)
URL網(wǎng)址:http://chinadenli.net/article40/gdgeho.html
成都網(wǎng)站建設(shè)公司_創(chuàng)新互聯(lián),為您提供網(wǎng)站導(dǎo)航、面包屑導(dǎo)航、網(wǎng)站內(nèi)鏈、服務(wù)器托管、網(wǎng)站設(shè)計(jì)公司、企業(yè)網(wǎng)站制作
聲明:本網(wǎng)站發(fā)布的內(nèi)容(圖片、視頻和文字)以用戶(hù)投稿、用戶(hù)轉(zhuǎn)載內(nèi)容為主,如果涉及侵權(quán)請(qǐng)盡快告知,我們將會(huì)在第一時(shí)間刪除。文章觀點(diǎn)不代表本網(wǎng)站立場(chǎng),如需處理請(qǐng)聯(lián)系客服。電話:028-86922220;郵箱:631063699@qq.com。內(nèi)容未經(jīng)允許不得轉(zhuǎn)載,或轉(zhuǎn)載時(shí)需注明來(lái)源: 創(chuàng)新互聯(lián)