头歌:共享单车之数据分析

这篇具有很好参考价值的文章主要介绍了头歌:共享单车之数据分析。希望对大家有所帮助。如果存在错误或未考虑完全的地方,请大家不吝赐教,您也可以点击"举报违法"按钮提交疑问。

第1关 统计共享单车每天的平均使用时间

package com.educoder.bigData.sharedbicycle;
 
import java.io.IOException;
import java.text.ParseException;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Scanner;
import java.math.RoundingMode;
import java.math.BigDecimal;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
 
import com.educoder.bigData.util.HBaseUtil;
 
/**
 * 统计共享单车每天的平均使用时间
 */
public class AveragetTimeMapReduce extends Configured implements Tool {
 
	public static final byte[] family = "info".getBytes();
 
	public static class MyMapper extends TableMapper<Text, BytesWritable> {
		protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
				throws IOException, InterruptedException {
			/********** Begin *********/
		 long beginTime = Long.parseLong(Bytes.toString(result.getValue(family, "beginTime".getBytes())));
		 long endTime = Long.parseLong(Bytes.toString(result.getValue(family, "endTime".getBytes())));
		 String format = DateFormatUtils.format(beginTime, "yyyy-MM-dd", Locale.CHINA);
		 long useTime = endTime - beginTime;
		 BytesWritable bytesWritable = new BytesWritable(Bytes.toBytes(format + "_" + useTime));
		 context.write(new Text("avgTime"), bytesWritable);		 
			/********** End *********/
		}
	}
 
	public static class MyTableReducer extends TableReducer<Text, BytesWritable, ImmutableBytesWritable> {
		@Override
		public void reduce(Text key, Iterable<BytesWritable> values, Context context)
				throws IOException, InterruptedException {
			/********** Begin *********/
		     double sum = 0;
			int length = 0;
			Map<String, Long> map = new HashMap<String, Long>();
			for (BytesWritable price : values) {
				byte[] copyBytes = price.copyBytes();
				String string = Bytes.toString(copyBytes);
				String[] split = string.split("_");
				if (map.containsKey(split[0])) {
					Long integer = map.get(split[0]) + Long.parseLong(split[1]);
					map.put(split[0], integer);
				} else {
					map.put(split[0], Long.parseLong(split[1]));
				}
			}
			Collection<Long> values2 = map.values();
			for (Long i : values2) {
				length++;
				sum += i;
			}
			BigDecimal decimal = new BigDecimal(sum / length /1000);
			BigDecimal setScale = decimal.setScale(2, RoundingMode.HALF_DOWN);
			Put put = new Put(Bytes.toBytes(key.toString()));
			put.addColumn(family, "avgTime".getBytes(), Bytes.toBytes(setScale.toString()));
			context.write(null, put);	 
			/********** End *********/
		}
 
	}
 
	public int run(String[] args) throws Exception {
		// 配置Job
		Configuration conf = HBaseUtil.conf;
		// Scanner sc = new Scanner(System.in);
		// String arg1 = sc.next();
		// String arg2 = sc.next();
		String arg1 = "t_shared_bicycle";
		String arg2 = "t_bicycle_avgtime";
		try {
			HBaseUtil.createTable(arg2, new String[] { "info" });
		} catch (Exception e) {
			// 创建表失败
			e.printStackTrace();
		}
		Job job = configureJob(conf, new String[] { arg1, arg2 });
		return job.waitForCompletion(true) ? 0 : 1;
	}
 
	private Job configureJob(Configuration conf, String[] args) throws IOException {
		String tablename = args[0];
		String targetTable = args[1];
		Job job = new Job(conf, tablename);
		Scan scan = new Scan();
		scan.setCaching(300);
		scan.setCacheBlocks(false);// 在mapreduce程序中千万不要设置允许缓存
		// 初始化Mapreduce程序
		TableMapReduceUtil.initTableMapperJob(tablename, scan, MyMapper.class, Text.class, BytesWritable.class, job);
		// 初始化Reduce
		TableMapReduceUtil.initTableReducerJob(targetTable, // output table
				MyTableReducer.class, // reducer class
				job);
		job.setNumReduceTasks(1);
		return job;
	}
}

第2关 统计共享单车在指定地点的每天平均次数 

package com.educoder.bigData.sharedbicycle;
 
 
 
import java.io.IOException;
 
import java.math.BigDecimal;
 
import java.math.RoundingMode;
 
import java.util.ArrayList;
 
import java.util.Collection;
 
import java.util.HashMap;
 
import java.util.Locale;
 
import java.util.Map;
 
import java.util.Scanner;
 
import org.apache.commons.lang3.time.DateFormatUtils;
 
import org.apache.hadoop.conf.Configuration;
 
import org.apache.hadoop.conf.Configured;
 
import org.apache.hadoop.hbase.CompareOperator;
 
import org.apache.hadoop.hbase.client.Put;
 
import org.apache.hadoop.hbase.client.Result;
 
import org.apache.hadoop.hbase.client.Scan;
 
import org.apache.hadoop.hbase.filter.BinaryComparator;
 
import org.apache.hadoop.hbase.filter.Filter;
 
import org.apache.hadoop.hbase.filter.FilterList;
 
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
 
import org.apache.hadoop.hbase.filter.SubstringComparator;
 
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 
import org.apache.hadoop.hbase.mapreduce.TableMapper;
 
import org.apache.hadoop.hbase.mapreduce.TableReducer;
 
import org.apache.hadoop.hbase.util.Bytes;
 
import org.apache.hadoop.io.BytesWritable;
 
import org.apache.hadoop.io.DoubleWritable;
 
import org.apache.hadoop.io.Text;
 
import org.apache.hadoop.mapreduce.Job;
 
import org.apache.hadoop.util.Tool;
 
import com.educoder.bigData.util.HBaseUtil;
 
 
 
/**
 * 共享单车每天在韩庄村的平均空闲时间
 */
 
public class AverageVehicleMapReduce extends Configured implements Tool {
  
 
    public static final byte[] family = "info".getBytes();
 
  
    public static class MyMapper extends TableMapper<Text, BytesWritable> {
 
        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
 
                throws IOException, InterruptedException {
 
            /********** Begin *********/
 
           String beginTime = Bytes.toString(result.getValue(family, "beginTime".getBytes()));
 
           String format = DateFormatUtils.format(Long.parseLong(beginTime), "yyyy-MM-dd", Locale.CHINA);
 
           BytesWritable bytesWritable = new BytesWritable(Bytes.toBytes(format));
 
           context.write(new Text("河北省保定市雄县-韩庄村"), bytesWritable);
 
            /********** End *********/
 
        }
 
    }
 
 
 
    public static class MyTableReducer extends TableReducer<Text, BytesWritable, ImmutableBytesWritable> {
 
        @Override
 
        public void reduce(Text key, Iterable<BytesWritable> values, Context context)
 
                throws IOException, InterruptedException {
 
            /********** Begin *********/
 
          double sum = 0;
 
            int length = 0;
 
            Map<String, Integer> map = new HashMap<String, Integer>();
 
            for (BytesWritable price : values) {
 
                byte[] copyBytes = price.copyBytes();
 
                String string = Bytes.toString(copyBytes);
 
                if (map.containsKey(string)) {
 
                    Integer integer = map.get(string) + 1;
 
                    map.put(string, integer);
 
                } else {
 
                    map.put(string, new Integer(1));
 
                }
 
            }
 
            
 
            Collection<Integer> values2 = map.values();
 
            for (Integer i : values2) {
 
                length++;
 
                sum += i;
 
            }
 
            BigDecimal decimal = new BigDecimal(sum / length);
 
            BigDecimal setScale = decimal.setScale(2, RoundingMode. HALF_DOWN);
 
            Put put = new Put(Bytes.toBytes(key.toString()));
 
            put.addColumn(family, "avgNum".getBytes(), Bytes.toBytes(setScale.toString()));
 
            context.write(null, put);
 
 
            /********** End *********/
 
        }
 
 
 
    }
 
 
 
    public int run(String[] args) throws Exception {
 
        // 配置Job
 
        Configuration conf = HBaseUtil.conf;
 
        //Scanner sc = new Scanner(System.in);
 
        //String arg1 = sc.next();
 
        //String arg2 = sc.next();
 
        String arg1 = "t_shared_bicycle";
 
        String arg2 = "t_bicycle_avgnum";
 
        try {
 
            HBaseUtil.createTable(arg2, new String[] { "info" });
 
        } catch (Exception e) {
 
            // 创建表失败
 
            e.printStackTrace();
 
        }
 
        Job job = configureJob(conf, new String[] { arg1, arg2 });
 
        return job.waitForCompletion(true) ? 0 : 1;
 
    }
 
 
 
    private Job configureJob(Configuration conf, String[] args) throws IOException {
 
        String tablename = args[0];
 
        String targetTable = args[1];
 
        Job job = new Job(conf, tablename);
 
        Scan scan = new Scan();
 
        scan.setCaching(300);
 
        scan.setCacheBlocks(false);// 在mapreduce程序中千万不要设置允许缓存
 
        /********** Begin *********/
 
         //设置过滤
 
         ArrayList<Filter> listForFilters = new ArrayList<Filter>();
 
         Filter destinationFilter =new SingleColumnValueFilter(Bytes.toBytes("info"), Bytes.toBytes("destination"),
 
               CompareOperator.EQUAL, new SubstringComparator("韩庄村"));
 
        Filter departure = new SingleColumnValueFilter(Bytes.toBytes("info"), Bytes.toBytes("departure"),
 
               CompareOperator.EQUAL, Bytes.toBytes("河北省保定市雄县"));
 
        listForFilters.add(departure);
 
        listForFilters.add(destinationFilter);
 
        scan.setCaching(300);
 
        scan.setCacheBlocks(false);
 
        Filter filters = new FilterList(listForFilters);
 
        scan.setFilter(filters);
 
            /********** End *********/
 
        // 初始化Mapreduce程序
 
        TableMapReduceUtil.initTableMapperJob(tablename, scan, MyMapper.class, Text.class, BytesWritable.class, job);
 
        // 初始化Reduce
 
        TableMapReduceUtil.initTableReducerJob(targetTable, // output table
 
                MyTableReducer.class, // reducer class
 
                job);
 
        job.setNumReduceTasks(1);
 
        return job;
 
    }
 
}

第3关 统计共享单车指定车辆每次使用的空闲平均时间 

package com.educoder.bigData.sharedbicycle;

import java.io.IOException;

import java.math.BigDecimal;

import java.math.RoundingMode;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.conf.Configured;

import org.apache.hadoop.hbase.CompareOperator;

import org.apache.hadoop.hbase.client.Put;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.Scan;

import org.apache.hadoop.hbase.filter.Filter;

import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;

import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

import org.apache.hadoop.hbase.mapreduce.TableMapper;

import org.apache.hadoop.hbase.mapreduce.TableReducer;

import org.apache.hadoop.hbase.util.Bytes;

import org.apache.hadoop.io.BytesWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.util.Tool;

import com.educoder.bigData.util.HBaseUtil;

/**

 * 

 * 统计5996共享单车每次使用的空闲平均时间

 */

public class FreeTimeMapReduce extends Configured implements Tool {
    public static final byte[] family = "info".getBytes();

    public static class MyMapper extends TableMapper<Text, BytesWritable> {
        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)

                throws IOException, InterruptedException {
            /********** Begin *********/

            long beginTime = Long.parseLong(Bytes.toString(result.getValue(family, "beginTime".getBytes())));

            long endTime = Long.parseLong(Bytes.toString(result.getValue(family, "endTime".getBytes())));

            BytesWritable bytesWritable = new BytesWritable(Bytes.toBytes(beginTime + "_" + endTime));

            context.write(new Text("5996"), bytesWritable);      

            /********** End *********/

        }

    }

    public static class MyTableReducer extends TableReducer<Text, BytesWritable, ImmutableBytesWritable> {
        @Override

        public void reduce(Text key, Iterable<BytesWritable> values, Context context)

                throws IOException, InterruptedException {
            /********** Begin *********/

            long freeTime = 0;

            long beginTime = 0;

            int length = 0;

            for (BytesWritable time : values) {
                byte[] copyBytes = time.copyBytes();

                String timeLong = Bytes.toString(copyBytes);

                String[] split = timeLong.split("_");

                if(beginTime == 0) {
                    beginTime = Long.parseLong(split[0]);

                    continue;

                }

                else {
                    freeTime = freeTime + beginTime - Long.parseLong(split[1]);

                    beginTime = Long.parseLong(split[0]);

                    length ++;

                }

            }

            Put put = new Put(Bytes.toBytes(key.toString()));

            BigDecimal decimal = new BigDecimal(freeTime / length /1000 /60 /60);

            BigDecimal setScale = decimal.setScale(2, RoundingMode.HALF_DOWN);

            put.addColumn(family, "freeTime".getBytes(), Bytes.toBytes(setScale.toString()));

            context.write(null, put);

         

         

         

         

         

            /********** End *********/

        }

    }

    public int run(String[] args) throws Exception {
        // 配置Job

        Configuration conf = HBaseUtil.conf;

        // Scanner sc = new Scanner(System.in);

        // String arg1 = sc.next();

        // String arg2 = sc.next();

        String arg1 = "t_shared_bicycle";

        String arg2 = "t_bicycle_freetime";

        try {
            HBaseUtil.createTable(arg2, new String[] { "info" });

        } catch (Exception e) {
            // 创建表失败

            e.printStackTrace();

        }

        Job job = configureJob(conf, new String[] { arg1, arg2 });

        return job.waitForCompletion(true) ? 0 : 1;

    }

    private Job configureJob(Configuration conf, String[] args) throws IOException {
        String tablename = args[0];

        String targetTable = args[1];

        Job job = new Job(conf, tablename);

        Scan scan = new Scan();

        scan.setCaching(300);

        scan.setCacheBlocks(false);// 在mapreduce程序中千万不要设置允许缓存

        /********** Begin *********/

         //设置过滤条件

        Filter filter = new SingleColumnValueFilter(Bytes.toBytes("info"), Bytes.toBytes("bicycleId"), CompareOperator.EQUAL, Bytes.toBytes("5996"));

        scan.setFilter(filter); 

         

         

         

            /********** End *********/

        // 初始化Mapreduce程序

        TableMapReduceUtil.initTableMapperJob(tablename, scan, MyMapper.class, Text.class, BytesWritable.class, job);

        // 初始化Reduce

        TableMapReduceUtil.initTableReducerJob(targetTable, // output table

                MyTableReducer.class, // reducer class

                job);

        job.setNumReduceTasks(1);

        return job;

    }

}

第4关 统计指定时间共享单车使用次数

package com.educoder.bigData.sharedbicycle;

import java.io.IOException;

import java.util.ArrayList;

import org.apache.commons.lang3.time.FastDateFormat;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.conf.Configured;

import org.apache.hadoop.hbase.CompareOperator;

import org.apache.hadoop.hbase.client.Put;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.Scan;

import org.apache.hadoop.hbase.filter.Filter;

import org.apache.hadoop.hbase.filter.FilterList;

import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;

import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

import org.apache.hadoop.hbase.mapreduce.TableMapper;

import org.apache.hadoop.hbase.mapreduce.TableReducer;

import org.apache.hadoop.hbase.util.Bytes;

import org.apache.hadoop.io.IntWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.util.Tool;

import com.educoder.bigData.util.HBaseUtil;

/**

 * 共享单车使用次数统计

 */

public class UsageRateMapReduce extends Configured implements Tool {

    public static final byte[] family = "info".getBytes();

    public static class MyMapper extends TableMapper<Text, IntWritable> {

        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)throws IOException, InterruptedException {
            /********** Begin *********/
           IntWritable doubleWritable = new IntWritable(1);
           context.write(new Text("departure"), doubleWritable);
            /********** End *********/

        }

    }

    public static class MyTableReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {

        @Override

        public void reduce(Text key, Iterable<IntWritable> values, Context context)

                throws IOException, InterruptedException {

            /********** Begin *********/        

            int totalNum = 0;
            for (IntWritable num : values) {
                int d = num.get();
                totalNum += d;
            }
            Put put = new Put(Bytes.toBytes(key.toString()));
            put.addColumn(family, "usageRate".getBytes(), Bytes.toBytes(String.valueOf(totalNum)));
            context.write(null, put);
           /********** End *********/
        }
    }

    public int run(String[] args) throws Exception {
        // 配置Job
        Configuration conf = HBaseUtil.conf;
        // Scanner sc = new Scanner(System.in);
        // String arg1 = sc.next();
        // String arg2 = sc.next();
        String arg1 = "t_shared_bicycle";
        String arg2 = "t_bicycle_usagerate";
        try {
            HBaseUtil.createTable(arg2, new String[] { "info" });
        } catch (Exception e) {
            // 创建表失败
            e.printStackTrace();
        }
        Job job = configureJob(conf, new String[] { arg1, arg2 });
        return job.waitForCompletion(true) ? 0 : 1;
    }
    private Job configureJob(Configuration conf, String[] args) throws IOException {
        String tablename = args[0];
        String targetTable = args[1];
        Job job = new Job(conf, tablename);
        ArrayList<Filter> listForFilters = new ArrayList<Filter>();
        FastDateFormat instance = FastDateFormat.getInstance("yyyy-MM-dd");
        Scan scan = new Scan();
        scan.setCaching(300);
        scan.setCacheBlocks(false);// 在mapreduce程序中千万不要设置允许缓存
         /********** Begin *********/
        try {
        Filter destinationFilter = new SingleColumnValueFilter(Bytes.toBytes("info"), Bytes.toBytes("beginTime"), CompareOperator.GREATER_OR_EQUAL, Bytes.toBytes(String.valueOf(instance.parse("2017-08-01").getTime())));
        Filter departure = new SingleColumnValueFilter(Bytes.toBytes("info"), Bytes.toBytes("endTime"), CompareOperator.LESS_OR_EQUAL, Bytes.toBytes(String.valueOf(instance.parse("2017-09-01").getTime())));
        listForFilters.add(departure);
        listForFilters.add(destinationFilter);
        }catch (Exception e) {
            e.printStackTrace();
            return null;
        }
        Filter filters = new FilterList(listForFilters);
        scan.setFilter(filters);
        
        /********** End *********/
        // 初始化Mapreduce程序
        TableMapReduceUtil.initTableMapperJob(tablename, scan, MyMapper.class, Text.class, IntWritable.class, job);
        // 初始化Reduce

        TableMapReduceUtil.initTableReducerJob(targetTable, // output table

                MyTableReducer.class, // reducer class

                job);

        job.setNumReduceTasks(1);

        return job;

    }

}

 第5关 统计共享单车线路流量文章来源地址https://www.toymoban.com/news/detail-829195.html

package com.educoder.bigData.sharedbicycle;
 
import java.io.IOException;
 
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
 
import com.educoder.bigData.util.HBaseUtil;
 
/**
 * 共享单车线路流量统计
 */
public class LineTotalMapReduce extends Configured implements Tool {
 
	public static final byte[] family = "info".getBytes();
 
	public static class MyMapper extends TableMapper<Text, IntWritable> {
		protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
				throws IOException, InterruptedException {
			            /********** Begin *********/
						String start_latitude = Bytes.toString(result.getValue(family, "start_latitude".getBytes()));
						String start_longitude = Bytes.toString(result.getValue(family, "start_longitude".getBytes()));
						String stop_latitude = Bytes.toString(result.getValue(family, "stop_latitude".getBytes()));
						String stop_longitude = Bytes.toString(result.getValue(family, "stop_longitude".getBytes()));
						String departure = Bytes.toString(result.getValue(family, "departure".getBytes()));
						String destination = Bytes.toString(result.getValue(family, "destination".getBytes()));
						IntWritable doubleWritable = new IntWritable(1);
						context.write(new Text(start_latitude + "-" + start_longitude + "_" + stop_latitude + "-" + stop_longitude + "_" + departure + "-" + destination), doubleWritable);
		 
		 
		 
		 
		 
			/********** End *********/
		}
	}
 
	public static class MyTableReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
		@Override
		public void reduce(Text key, Iterable<IntWritable> values, Context context)
				throws IOException, InterruptedException {
			            /********** Begin *********/
						int totalNum = 0;
						for (IntWritable num : values) {
							int d = num.get();
							totalNum += d;
						}
						Put put = new Put(Bytes.toBytes(key.toString() + totalNum ));
						put.addColumn(family, "lineTotal".getBytes(), Bytes.toBytes(String.valueOf(totalNum)));
						context.write(null, put);
		 
		 
		 
		 
		 
			/********** End *********/
		}
 
	}
 
	public int run(String[] args) throws Exception {
		// 配置Job
		Configuration conf = HBaseUtil.conf;
		// Scanner sc = new Scanner(System.in);
		// String arg1 = sc.next();
		// String arg2 = sc.next();
		String arg1 = "t_shared_bicycle";
		String arg2 = "t_bicycle_linetotal";
		try {
			HBaseUtil.createTable(arg2, new String[] { "info" });
		} catch (Exception e) {
			// 创建表失败
			e.printStackTrace();
		}
		Job job = configureJob(conf, new String[] { arg1, arg2 });
		return job.waitForCompletion(true) ? 0 : 1;
	}
 
	private Job configureJob(Configuration conf, String[] args) throws IOException {
		String tablename = args[0];
		String targetTable = args[1];
		Job job = new Job(conf, tablename);
		Scan scan = new Scan();
		scan.setCaching(300);
		scan.setCacheBlocks(false);// 在mapreduce程序中千万不要设置允许缓存
		// 初始化Mapreduce程序
		TableMapReduceUtil.initTableMapperJob(tablename, scan, MyMapper.class, Text.class, IntWritable.class, job);
		// 初始化Reduce
		TableMapReduceUtil.initTableReducerJob(targetTable, // output table
				MyTableReducer.class, // reducer class
				job);
		job.setNumReduceTasks(1);
		return job;
	}
}

到了这里,关于头歌:共享单车之数据分析的文章就介绍完了。如果您还想了解更多内容,请在右上角搜索TOY模板网以前的文章或继续浏览下面的相关文章,希望大家以后多多支持TOY模板网!

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处: 如若内容造成侵权/违法违规/事实不符,请点击违法举报进行投诉反馈,一经查实,立即删除!

领支付宝红包 赞助服务器费用

相关文章

  • 大数据毕业设计 共享单车数据分析与可视化系统 - Python

    🔥 这两年开始毕业设计和毕业答辩的要求和难度不断提升,传统的毕设题目缺少创新和亮点,往往达不到毕业答辩的要求,这两年不断有学弟学妹告诉学长自己做的项目系统达不到老师的要求。 为了大家能够顺利以及最少的精力通过毕设,学长分享优质毕业设计项目,今天

    2024年02月06日
    浏览(41)
  • 大数据毕设项目 - 基于大数据的共享单车数据分析与可视化

    🔥 这两年开始毕业设计和毕业答辩的要求和难度不断提升,传统的毕设题目缺少创新和亮点,往往达不到毕业答辩的要求,这两年不断有学弟学妹告诉学长自己做的项目系统达不到老师的要求。 为了大家能够顺利以及最少的精力通过毕设,学长分享优质毕业设计项目,今天

    2024年03月13日
    浏览(40)
  • 【计算机毕设选题】基于大数据的共享单车数据分析与可视化

    🔥 这两年开始毕业设计和毕业答辩的要求和难度不断提升,传统的毕设题目缺少创新和亮点,往往达不到毕业答辩的要求,这两年不断有学弟学妹告诉学长自己做的项目系统达不到老师的要求。 为了大家能够顺利以及最少的精力通过毕设,学长分享优质毕业设计项目,今天

    2024年02月21日
    浏览(41)
  • 【头歌】共享单车之数据存储

    任务描述 本关任务:获取 data.xls 文件中的数据。 相关知识 获取工作簿中的信息,我们可以使用 Java POI ( POI 是一个提供 API 给 Java 程序对 Microsoft Office 格式档案读和写的功能)提供的 Workbook 类来操作。 为了完成本关任务,你需要掌握:如何获取 Wookbook 的数据。 编程要求

    2024年02月10日
    浏览(33)
  • 头歌Educoder云计算与大数据——实验五 Java API分布式存储

    原始电商数据都是存储在关系型数据库或 NoSQL 数据库上的,是面向OLTP(联机事务处理过程)的;数据都是面向业务的,而不是面向分析。因此数据比较复杂,表很多关联的数据是分散的,不利于统计分析;因此需要把数据从多个表里导出来、联合起来,找出分析所需要的数据项

    2023年04月09日
    浏览(41)
  • 【头歌-数据分析与实践-python】数据分析与实践-python——python基础

    注意 : 本文档仅供参考使用,本章节程序绝大多数程序面向对象输出,一旦测试用例改变,会导致无法通过,请悉知 ! ! ! 请勿盲目使用 第1关 字符串常量的输出 第2关 输入及输出 第3关 书写一个完整的Python程序 第1关 关系运算符与单分支选择语句 第2关,3个数按从大到小依

    2024年01月23日
    浏览(37)
  • Educoder/头歌JAVA——jQuery基础

    目录 第1关:jQuery入门 相关知识 环境安装 第一个程序 id选择器  第2关:jQuery基本选择器 相关知识 类选择器 元素选择器 编程要求  第3关:过滤选择器 (一) 相关知识 设置css属性 编程要求 第4关:过滤选择器 (二) 相关知识 :not 选择器 编程要求  第5关:tab选项卡 相关知

    2024年02月07日
    浏览(31)
  • python大数据作业-客户价值分析-实训头歌

    一、实验目的与要求 1、掌握使用numpy和pandas库处理数据的基本方法。 2、掌握使用RFM分析模型对客户信息进行特征提取的基本方法。 3、掌握对特征数据进行标准化处理的基本方法。 4、掌握使用Sklearn库对K-Means聚类算法的实现及其评价方法。 5、掌握使用matplotlib结合pandas库对

    2023年04月17日
    浏览(28)
  • 头歌平台python数据分析——(9)Matplotlib图形配置

    ,根据输入数据绘制热成像图并隐藏坐标轴,具体要求如下: 图形的figsize为(10, 10); 图形保存到Task1/img/T1.png。 根据函数参数file_name读取文件,统计每年births的总和并作折线图,为最高/最低出生数年份设置注释,具体要求如下: 对数据进行去空值处理; 注释文字的坐标位置

    2024年02月10日
    浏览(113)
  • ​​​​​​​头歌(EduCoder)Java实训作业答案

    搜集整理了一份最新最全的头歌(EduCoder)Java实训作业答案,分享给大家~ (EduCoder)是信息技术类实践教学平台。(EduCoder)涵盖了计算机、大数据、云计算、人工智能、软件工程、物联网等专业课程。超60000个实训案例,建立学、练、评、测一体化实验环境。   第一关 第二关 C

    2024年02月08日
    浏览(27)

觉得文章有用就打赏一下文章作者

支付宝扫一扫打赏

博客赞助

微信扫一扫打赏

请作者喝杯咖啡吧~博客赞助

支付宝扫一扫领取红包,优惠每天领

二维码1

领取红包

二维码2

领红包