千家信息网

Hadoop 实践(二) Mapreduce 编程

发表于:2025-02-04 作者:千家信息网编辑
千家信息网最后更新 2025年02月04日,Mapreduce 编程,本文以WordCount 为例:实现文件字符统计在eclipse 里面搭建一个java项目,引入hadoop lib目录下的jar,和 hadoop主目录下的jar。新建Wo
千家信息网最后更新 2025年02月04日Hadoop 实践(二) Mapreduce 编程

Mapreduce 编程,本文以WordCount 为例:实现文件字符统计

在eclipse 里面搭建一个java项目,引入hadoop lib目录下的jar,和 hadoop主目录下的jar。

新建WordCount 类:

package org.scf.wordcount;

import java.io.IOException;

import java.util.*;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.conf.*;

import org.apache.hadoop.io.*;

import org.apache.hadoop.mapred.*;

import org.apache.hadoop.util.*;


public class WordCount {

public static class Map extends MapReduceBase implements Mapper {

private final static IntWritable one = new IntWritable(1);

private Text word = new Text();

public void map(LongWritable key, Text value, OutputCollector output, Reporter reporter) throws IOException {

String line = value.toString();

StringTokenizer tokenizer = new StringTokenizer(line);

while (tokenizer.hasMoreTokens()) {

word.set(tokenizer.nextToken());

output.collect(word, one);

}

}

}

public static class Reduce extends MapReduceBase implements Reducer {

public void reduce(Text key, Iterator values, OutputCollector output, Reporter reporter) throws IOException {

int sum = 0;

while (values.hasNext()) {

sum += values.next().get();

}

output.collect(key, new IntWritable(sum));

}

}

public static void main(String[] args) throws Exception {

JobConf conf = new JobConf(WordCount.class);

conf.setJobName("wordcount");

conf.setOutputKeyClass(Text.class);

conf.setOutputValueClass(IntWritable.class);

conf.setMapperClass(Map.class);

conf.setCombinerClass(Reduce.class);

conf.setReducerClass(Reduce.class);

conf.setInputFormat(TextInputFormat.class);

conf.setOutputFormat(TextOutputFormat.class);

FileInputFormat.setInputPaths(conf, new Path(args[0]));

FileOutputFormat.setOutputPath(conf, new Path(args[1]));

JobClient.runJob(conf);

}

}


2.编译,运行该类

cd /home/Hadoop/

mkdir wordcount_classes


javac -classpath /usr/hadoop-1.0.4/hadoop-core-1.0.4.jar -d /home/Hadoop/wordcount_classes WordCount.java


jar -cvf /home/Hadoop/wordcount.jar -C /home/Hadoop/wordcount_classes/ .


hadoop dfs -put /home/Hadoop/test.txt /user/root/wordcount/input/file2

hadoop dfs -put /home/Hadoop/test1.txt /user/root/wordcount/input/file3


hadoop jar /home/Hadoop/wordcount.jar org.scf.wordcount.WordCount /user/root/wordcount/input /user/root/wordcount/output


hadoop dfs -ls /user/root/wordcount/output


hadoop dfs -cat /user/root/wordcount/output/part-00000













0