package org.apache.hadoop.examples; import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class WordCount2{ public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { Job job = Job.getInstance(); job.setJobName("WordCount2"); job.setJarByClass(WordCount2.class); job.setMapperClass(doMapper.class); job.setReducerClass(doReducer.class); job.setoutputKeyClass(Text.class); job.setoutputValueClass(NullWritable.class); Path in = new Path("hdfs://localhost:9000/user/hadoop/output2/part-r-00000"); Path out = new Path("hdfs://localhost:9000/user/hadoop/output3"); FileInputFormat.addInputPath(job, in); FileOutputFormat.setoutputPath(job, out); System.exit(job.waitForCompletion(true) ? 0 : 1); } public static class doMapper extends Mapper<Object, Text, Text, NullWritable>{ public static Text word = new Text(); @Override protected void map(Object key, Text value, Context context) throws IOException, InterruptedException { int i=0; String line = value.toString(); String arr[] = line.split(" "); String arrtime[]=arr[1].split("/"); String arrtimedate[]=arrtime[2].split(":"); String arr2[]=arrtimedate[3].split(" "); if(arrtime[1].equals("Nov")){ i=11; } String time=arrtimedate[0]+"-"+i+"-"+arrtime[0]+" "+arrtimedate[1]+":"+arrtimedate[2]+":"+arr2[0]; String[] type=arr[3].split("/"); word.set(arr[0]+" "+time+" "+arrtime[0]+" "+arr[2]+" "+type[0]+" "+type[1]); context.write(word, NullWritable.get()); } } public static class doReducer extends Reducer<Text, NullWritable, Text, NullWritable>{ @Override protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException { context.write(key, NullWritable.get()); } } }
运行结果:
package org.apache.hadoop.examples; import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class WordCount3{ public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { Job job = Job.getInstance(); job.setJobName("WordCount3"); job.setJarByClass(WordCount3.class); job.setMapperClass(doMapper.class); job.setReducerClass(doReducer.class); job.setoutputKeyClass(Text.class); job.setoutputValueClass(NullWritable.class); Path in = new Path("hdfs://localhost:9000/user/hadoop/output3/part-r-00000"); Path out = new Path("hdfs://localhost:9000/user/hadoop/output3"); FileInputFormat.addInputPath(job, in); FileOutputFormat.setoutputPath(job, out); System.exit(job.waitForCompletion(true) ? 0 : 1); } public static class doMapper extends Mapper<Object, Text, Text, NullWritable>{ public static Text word = new Text(); @Override protected void map(Object key, Text value, Context context) throws IOException, InterruptedException { int i=0; String line = value.toString(); String arr[] = line.split(" "); String arrtime[]=arr[1].split("/"); String arrtimedate[]=arrtime[2].split(":"); String arr2[]=arrtimedate[3].split(" "); if(arrtime[1].equals("Nov")){ i=11; } String time=arrtimedate[0]+"-"+i+"-"+arrtime[0]+" "+arrtimedate[1]+":"+arrtimedate[2]+":"+arr2[0]; String[] type=arr[3].split("/"); word.set(arr[0]+" "+time+" "+arrtime[0]+" "+arr[2]+" "+type[0]+" "+type[1]); context.write(word, NullWritable.get()); } } public static class doReducer extends Reducer<Text, NullWritable, Text, NullWritable>{ @Override protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException { context.write(key, NullWritable.get()); } } }
运行结果:
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。