hadoop-wordcount demo

hadoop-wordcount demo

package com.billstudy.hdfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * 单词统计
 * @author Bill
 * @since V1.0 2015年4月19日 - 上午11:11:06
 */
public class WordCount {
	
	private static final Path INPUT_PATH = new Path("hdfs://hadoop01:9000/word_count_in/");
	
	private static final Path OUTPUT_PATH = new Path("hdfs://hadoop01:9000/word_count_out4/");
	
	public static void main(String[] args) throws Exception {
		Configuration configuration = new Configuration();
		
		// 1.创建任务,指定任务名
		Job job = new Job(configuration,WordCount.class.getSimpleName());
		
		// 2.指定输入文件
		FileInputFormat.setInputPaths(job, INPUT_PATH);
		
		// 3.指定输出文件		
		FileOutputFormat.setOutputPath(job, OUTPUT_PATH);
		
		// 4.指定Mapper
		job.setMapperClass(WordCountMapper.class);
		
		// 5.指定Reduce
		job.setReducerClass(WordCountReduce.class);
		
		// 6.指定Reduce 输出类型
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);
		
		int exitCode = job.waitForCompletion(true) ? 0 : 1; 
		System.err.println("process finsh , exit code is : " + exitCode);
		System.exit(exitCode);
		
		/*Set> entrySet = System.getenv().entrySet();
		
		for (Entry entry : entrySet) {
			System.out.println(entry.getKey() + ":" + entry.getValue());
		}*/
	}

	
	/**
	 * 单词处理
	 * 读取HDFS文件系统指定文件每一行,对每一对键值进行mapper
	 * @author Bill
	 * @since V1.0 2015年4月19日 - 上午11:17:03
	 */										/** 单词偏移量	 	     行文本 	行中单词	单词出现次数 **/
	static class WordCountMapper extends Mapper{
		/** 默认出现一次 **/
		private static final LongWritable DEFAULT_ONE_LONG = new LongWritable(1L);
		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			
			String[] words = value.toString().split(" ");
			for (String word : words) {
				context.write(new Text(word), DEFAULT_ONE_LONG);
			}
		}
	}
	
	
	/**
	 * 对mapper处理过的信息进一步统计并输出
	 * @author Bill
	 * @since V1.0 2015年4月19日 - 上午11:23:28
	 */										/** 行中单词	单词出现次数	 输出类型	输出次数类型	 **/
	static class WordCountReduce extends Reducer{
		
		@Override
		protected void reduce(Text key, Iterable values, Context context)
				throws IOException, InterruptedException {
		
			long times = 0L;
			
			for (LongWritable count : values) {
				times += count.get();
			}
			
			context.write(key, new LongWritable(times));
			
		}
		
	}
	
}

版权声明:本文为博主原创文章,未经博主允许不得转载。

发表回复

您的电子邮箱地址不会被公开。 必填项已用*标注