本机的环境如下:

Eclipse 3.6

Hadoop-0.20.2

Hive-0.5.0-dev

1. 安装hadoop-0.20.2-eclipse-plugin的插件。注意:Hadoop目录中的\hadoop-0.20.2\contrib \eclipse-plugin\hadoop-0.20.2-eclipse-plugin.jar在Eclipse3.6下有问题,无法在 Hadoop Server上运行,可以从http://code.google.com/p/hadoop-eclipse-plugin/下载

2. 选择Map/Reduce视图:window ->  open pers.. ->  other.. ->  map/reduce

3. 增加DFS Locations:点击Map/Reduce Locations---> New Hadoop Loaction,填写对应的host和port

Map/Reduce Master:   
Host: 10.10.xx.xx 
Port: 9001   
DFS Master:   
Host: 10.10.xx.xx(选中 User M/R Master host即可)   
Port: 9000   
User name: root

更改Advance parameters 中的 hadoop.job.ugi, 默认是 DrWho,Tardis, 改成:root,Tardis。如果看不到选项,则使用Eclipse -clean重启Eclipse 
否则,可能会报错org.apache.hadoop.security.AccessControlException

4. 设置本机的Host:

10.10.xx.xx zw-hadoop-master. zw-hadoop-master   
   
#注意后面需要还有一个zw-hadoop-master.,否则运行Map/Reduce时会报错:   
java.lang.IllegalArgumentException: Wrong FS: hdfs://zw-hadoop-master:9000/user/root/oplog/out/_temporary/_attempt_201008051742_0135_m_000007_0, expected: hdfs://zw-hadoop-master.:9000   
    at org.apache.hadoop.fs.FileSystem.checkPath(FileSystem.java:352)   

5. 新建一个Map/Reduce Project,新建Mapper,Reducer,Driver类,注意,自动生成的代码是基于老版本的Hadoop,自己修改:

package com.sohu.hadoop.test;   
   
import java.util.StringTokenizer;   
import org.apache.hadoop.io.IntWritable;   
import org.apache.hadoop.io.Text;   
import org.apache.hadoop.mapreduce.Mapper;   
   
public class MapperTest extends Mapper<Object, Text, Text, IntWritable> {   
    private final static IntWritable one = new IntWritable(1);   
   
    public void map(Object key, Text value, Context context)   
            throws IOException, InterruptedException {   
        String userid = value.toString().split("[|]")[2];   
        context.write(new Text(userid), new IntWritable(1));   
    }   
}   
   
   
package com.sohu.hadoop.test;   
   
import java.io.IOException;   
import org.apache.hadoop.io.IntWritable;   
import org.apache.hadoop.io.Text;   
import org.apache.hadoop.mapreduce.Reducer;   
   
public class ReducerTest extends Reducer<Text, IntWritable, Text, IntWritable> {   
       
    private IntWritable result = new IntWritable();   
   
    public void reduce(Text key, Iterable<IntWritable> values, Context context)   
            throws IOException, InterruptedException {   
        int sum = 0;   
        for (IntWritable val : values) {   
            sum += val.get();   
        }   
        result.set(sum);   
        context.write(key, result);   
    }   
}   
   
   
package com.sohu.hadoop.test;   
   
import org.apache.hadoop.conf.Configuration;   
import org.apache.hadoop.fs.Path;   
import org.apache.hadoop.io.IntWritable;   
import org.apache.hadoop.io.Text;   
import org.apache.hadoop.io.compress.CompressionCodec;   
import org.apache.hadoop.io.compress.GzipCodec;   
import org.apache.hadoop.mapreduce.Job;   
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;   
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;   
import org.apache.hadoop.util.GenericOptionsParser;   
   
public class DriverTest {   
    public static void main(String[] args) throws Exception {   
        Configuration conf = new Configuration();   
        String[] otherArgs = new GenericOptionsParser(conf, args)   
                .getRemainingArgs();   
        if (otherArgs.length != 2)    
        {   
            System.err.println("Usage: DriverTest <in> <out>");   
            System.exit(2);   
        }   
        Job job = new Job(conf, "Driver Test");   
        job.setJarByClass(DriverTest.class);   
        job.setMapperClass(MapperTest.class);   
        job.setCombinerClass(ReducerTest.class);   
        job.setReducerClass(ReducerTest.class);   
        job.setOutputKeyClass(Text.class);   
        job.setOutputValueClass(IntWritable.class);   
           
        conf.setBoolean("mapred.output.compress", true);   
        conf.setClass("mapred.output.compression.codec", GzipCodec.class,CompressionCodec.class);   
           
        FileInputFormat.addInputPath(job, new Path(otherArgs[0]));   
        FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));   
           
        System.exit(job.waitForCompletion(true) ? 0 : 1);   
    }   
}   

6. 在DriverTest上,点击Run As ---> Run on Hadoop,选择对应的Hadoop Locaion即可