流程图java
1363157985066 13726230503 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200 1363157995052 13826544101 5C-0E-8B-C7-F1-E0:CMCC 120.197.40.4 4 0 264 0 200 1363157991076 13926435656 20-10-7A-28-CC-0A:CMCC 120.196.100.99 2 4 132 1512 200 1363154400022 13926251106 5C-0E-8B-8B-B1-50:CMCC 120.197.40.4 4 0 240 0 200 1363157993044 18211575961 94-71-AC-CD-E6-18:CMCC-EASY 120.196.100.99 iface.qiyi.com 视频网站 15 12 1527 2106 200 1363157995074 84138413 5C-0E-8B-8C-E8-20:7DaysInn 120.197.40.4 122.72.52.12 20 16 4116 1432 200 1363157993055 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200 1363157995033 15920133257 5C-0E-8B-C7-BA-20:CMCC 120.197.40.4 sug.so.360.cn 信息安全 20 20 3156 2936 200 1363157983019 13719199419 68-A1-B7-03-07-B1:CMCC-EASY 120.196.100.82 4 0 240 0 200 1363157984041 13660577991 5C-0E-8B-92-5C-20:CMCC-EASY 120.197.40.4 s19.cnzz.com 站点统计 24 9 6960 690 200 1363157973098 15013685858 5C-0E-8B-C7-F7-90:CMCC 120.197.40.4 rank.ie.sogou.com 搜索引擎 28 27 3659 3538 200 1363157986029 15989002119 E8-99-C4-4E-93-E0:CMCC-EASY 120.196.100.99 www.umeng.com 站点统计 3 3 1938 180 200 1363157992093 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 15 9 918 4938 200 1363157986041 13480253104 5C-0E-8B-C7-FC-80:CMCC-EASY 120.197.40.4 3 3 180 180 200 1363157984040 13602846565 5C-0E-8B-8B-B6-00:CMCC 120.197.40.4 2052.flash2-http.qq.com 综合门户 15 12 1938 2910 200 1363157995093 13922314466 00-FD-07-A2-EC-BA:CMCC 120.196.100.82 img.qfc.cn 12 12 3008 3720 200 1363157982040 13502468823 5C-0A-5B-6A-0B-D4:CMCC-EASY 120.196.100.99 y0.ifengimg.com 综合门户 57 102 7335 110349 200 1363157986072 18320173382 84-25-DB-4F-10-1A:CMCC-EASY 120.196.100.99 input.shouji.sogou.com 搜索引擎 21 18 9531 2412 200 1363157990043 13925057413 00-1F-64-E1-E6-9A:CMCC 120.196.100.55 t3.baidu.com 搜索引擎 69 63 11058 48243 200 1363157988072 13760778710 00-FD-07-A4-7B-08:CMCC 120.196.100.82 2 2 120 120 200 1363157985066 13726238888 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200 1363157993055 13560436666 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200
思路:map阶段:将每一行按tab切分红各字段,提取其中的手机号做为输出key,流量信息封装到FlowBean对象中,做为输出的value算法
要点:自定义类型如何实现Hadoop的序列化接口apache
FlowBean:这种自定义数据类型必须实现Hadoop的序列化接口:Writable安全
实现其中的两个方法:app
1.readFields(in)——反序列化方法ide
2.write(out)——序列化方法oop
reduce阶段:遍历一组数据的全部value(flowbean),进行累加,而后以手机号做为key输出,以总流量信息bean做为value输出。网站
1.FlowBean this
import org.apache.hadoop.io.Writable; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; /** * 本案例功能:演示自定义数据类型如何实现Hadoop的序列化接口 * 1,该类必定要保留空参构造器 * 2.write方法中输出字段二进制数据的顺序要与readFiles方法读取数据的顺序一致 */ public class FlowBean implements Writable { private int upFlow; private int dFlow; private String phone; private int amountFlow; public int getUpFlow() { return upFlow; } public void setUpFlow(int upFlow) { this.upFlow = upFlow; } public int getdFlow() { return dFlow; } public void setdFlow(int dFlow) { this.dFlow = dFlow; } public int getAmountFlow() { return amountFlow; } public void setAmountFlow(int amountFlow) { this.amountFlow = amountFlow; } public FlowBean() { } public FlowBean(int upFlow, int dFlow,String phone) { this.upFlow = upFlow; this.dFlow = dFlow; this.phone=phone; this.amountFlow=upFlow+dFlow; } /** * hadoop 系统在序列化该类的对象时要调用得方法 * @param dataOutput * @throws IOException */ public void write(DataOutput dataOutput) throws IOException { dataOutput.writeInt(upFlow); dataOutput.writeUTF(phone); dataOutput.writeInt(dFlow); dataOutput.writeInt(amountFlow); } /** * hadoop系统在反序列化时要调用的方法 * @param dataInput * @throws IOException */ public void readFields(DataInput dataInput) throws IOException { this.upFlow=dataInput.readInt(); this.phone=dataInput.readUTF(); this.dFlow=dataInput.readInt(); this.amountFlow=dataInput.readInt(); } @Override public String toString() { return this.upFlow+","+this.dFlow+","+this.amountFlow; } }
2.FlowCountMapper 搜索引擎
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; import java.io.IOException; public class FlowCountMapper extends Mapper<LongWritable, Text, Text, FlowBean> { @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { String line = value.toString(); String[] fields = line.split("\t"); String phone = fields[1]; int upFlow=Integer.parseInt(fields[fields.length-3]); int dFlow=Integer.parseInt(fields[fields.length-2]); context.write(new Text(phone),new FlowBean(upFlow,dFlow,phone)); } }
3.FlowCountReduce
import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Reducer; import java.io.IOException; public class FlowCountReduce extends Reducer<Text,FlowBean,Text,FlowBean> { /** * * @param key:手机号 * @param values:某个手机号所产生的全部访问记录中的流量数据 * @param context * @throws IOException * @throws InterruptedException */ @Override protected void reduce(Text key, Iterable<FlowBean> values, Context context) throws IOException, InterruptedException { int upSum=0; int dSum=0; for(FlowBean value:values){ upSum +=value.getUpFlow(); dSum +=value.getdFlow(); } context.write(key,new FlowBean(upSum,dSum,key.toString())); } }
4.JobSubmitter
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; public class JobSubmitter{ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); job.setJarByClass(JobSubmitter.class); job.setMapperClass(FlowCountMapper.class); job.setReducerClass(FlowCountReduce.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(FlowBean.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(FlowBean.class); FileInputFormat.setInputPaths(job,new Path("F:\\mrdata\\flow\\input")); FileOutputFormat.setOutputPath(job,new Path("F:\\mrdata\\flow\\output")); boolean res = job.waitForCompletion(true); System.exit(res ? 0:-1); } }
5.JobSubmitter程序运行统计结果【手机号 上行流量 下行流量 总流量】
13480253104 180,180,360 13502468823 7335,110349,117684 13560436666 1116,954,2070 13560439658 2034,5892,7926 13602846565 1938,2910,4848 13660577991 6960,690,7650 13719199419 240,0,240 13726230503 2481,24681,27162 13726238888 2481,24681,27162 13760778710 120,120,240 13826544101 264,0,264 13922314466 3008,3720,6728 13925057413 11058,48243,59301 13926251106 240,0,240 13926435656 132,1512,1644 15013685858 3659,3538,7197 15920133257 3156,2936,6092 15989002119 1938,180,2118 18211575961 1527,2106,3633 18320173382 9531,2412,11943 84138413 4116,1432,5548
流程图
代码实现
1.ProvinceParttioner 自定义分区算法
/** * 本类提供给MapTask使用的 * MapTask经过这个类的getPartition方法,来计算它所产生的每一对kv数据该分发给那个reduce task */ public class ProvinceParttioner extends Partitioner<Text,FlowBean> { static HashMap<String,Integer>codeMap=new HashMap<String,Integer>(); static{ /** * 模拟数据 */ codeMap.put("135",0); codeMap.put("136",1); codeMap.put("137",2); codeMap.put("138",3); codeMap.put("139",4); } @Override public int getPartition(Text key, FlowBean value, int numPartitions) { Integer code = codeMap.get(key.toString().substring(0, 3)); return code==null?5:code; } }
2.若是不指定,默认使用HashPartitioner进行分区
3.修改JobSubmitter,指定自定义分区算法
public class JobSubmitter{ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); job.setJarByClass(JobSubmitter.class); job.setMapperClass(FlowCountMapper.class); job.setReducerClass(FlowCountReduce.class); /** * 设置参数map task在作数据分区时用那个分区逻辑类 * 若是不指定,会使用默认的HashPartitioner */ job.setPartitionerClass(ProvinceParttioner.class); /** * 因为咱们的ProvincePartitioner可能会产生6种分区 * 因此须要6个map task来接收 */ job.setNumReduceTasks(6); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(FlowBean.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(FlowBean.class); FileInputFormat.setInputPaths(job,new Path("F:\\mrdata\\flow\\input")); FileOutputFormat.setOutputPath(job,new Path("F:\\mrdata\\flow\\province-output")); boolean res = job.waitForCompletion(true); System.exit(res ? 0:-1); } }
结果输出
part-r-0000
13502468823 7335,110349,117684 13560436666 1116,954,2070 13560439658 2034,5892,7926
part-r-0001
13602846565 1938,2910,4848 13660577991 6960,690,7650
part-r-0002
13719199419 240,0,240 13726230503 2481,24681,27162 13726238888 2481,24681,27162 13760778710 120,120,240
part-r-0003
13826544101 264,0,264
part-r-0004
13922314466 3008,3720,6728 13925057413 11058,48243,59301 13926251106 240,0,240 13926435656 132,1512,1644
part-r-0005
13480253104 180,180,360 15013685858 3659,3538,7197 15920133257 3156,2936,6092 15989002119 1938,180,2118 18211575961 1527,2106,3633 18320173382 9531,2412,11943 84138413 4116,1432,5548