├── PM25city.txt ├── 空气质量分析报告.pdf ├── 需求及数据说明.txt ├── AQI ├── AQIReducer.java ├── IntWritableDecreasingComparator.java ├── AQIMapper.java ├── AQIRunner.java ├── CityAQISort.java └── AQIBean.java ├── AQIIndex ├── TextArrayWritable.java ├── AQIIndexMapper.java ├── AQIIndexRunner.java └── AQIIndexReducer.java ├── AQIClassify ├── AQIClassfyMapper.java ├── AQIClassfyRunner.java └── AQIClassfyReducer.java └── README.md /PM25city.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Mingyang-ZHAI/AQI_analysis/HEAD/PM25city.txt -------------------------------------------------------------------------------- /空气质量分析报告.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Mingyang-ZHAI/AQI_analysis/HEAD/空气质量分析报告.pdf -------------------------------------------------------------------------------- /需求及数据说明.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Mingyang-ZHAI/AQI_analysis/HEAD/需求及数据说明.txt -------------------------------------------------------------------------------- /AQI/AQIReducer.java: -------------------------------------------------------------------------------- 1 | 2 | package AQI; 3 | 4 | import java.io.IOException; 5 | import org.apache.hadoop.io.Text; 6 | import org.apache.hadoop.mapreduce.Reducer; 7 | 8 | public class AQIReducer extends Reducer{ 9 | 10 | @Override 11 | protected void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { 12 | 13 | double IAQI_sum = 0; 14 | double count = 0; 15 | for (AQIBean value : values) { 16 | IAQI_sum += value.getIAQI(); 17 | count += 1; 18 | } 19 | double IAQI_average = IAQI_sum / count; 20 | 21 | context.write(new Text(key), new Text(String.valueOf(IAQI_average))); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /AQI/IntWritableDecreasingComparator.java: -------------------------------------------------------------------------------- 1 | /* 2 | * To change this license header, choose License Headers in Project Properties. 3 | * To change this template file, choose Tools | Templates 4 | * and open the template in the editor. 5 | */ 6 | package AQI; 7 | 8 | import org.apache.hadoop.io.UTF8.Comparator; 9 | import org.apache.hadoop.io.WritableComparable; 10 | 11 | public class IntWritableDecreasingComparator extends Comparator { 12 | @SuppressWarnings("rawtypes") 13 | public int compare( WritableComparable a,WritableComparable b){ 14 | return -super.compare(a, b); 15 | } 16 | public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { 17 | return -super.compare(b1, s1, l1, b2, s2, l2); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /AQI/AQIMapper.java: -------------------------------------------------------------------------------- 1 | 2 | package AQI; 3 | 4 | import java.io.IOException; 5 | import org.apache.hadoop.io.LongWritable; 6 | import org.apache.hadoop.io.Text; 7 | import org.apache.hadoop.mapreduce.Mapper; 8 | 9 | public class AQIMapper extends Mapper{ 10 | 11 | @Override 12 | protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { 13 | if (key.toString().equals("0")) { 14 | return; 15 | } 16 | String line = value.toString(); 17 | String[] fields = line.split(","); 18 | //System.out.println(fields[3]); 19 | double PM25 = Double.parseDouble(fields[3]); 20 | String city = fields[16]; 21 | context.write(new Text(city), new AQIBean(PM25, city)); 22 | } 23 | } 24 | 25 | -------------------------------------------------------------------------------- /AQIIndex/TextArrayWritable.java: -------------------------------------------------------------------------------- 1 | 2 | package AQIIndex; 3 | 4 | import org.apache.hadoop.io.ArrayWritable; 5 | import org.apache.hadoop.io.Text; 6 | 7 | public class TextArrayWritable extends ArrayWritable { 8 | private Text[] myValue = new Text[0]; 9 | 10 | public Text[] getMyValue() { 11 | return myValue; 12 | } 13 | public TextArrayWritable() { 14 | super(Text.class); 15 | } 16 | 17 | public TextArrayWritable(String[] strings) { 18 | super(Text.class); 19 | Text[] texts = new Text[strings.length]; 20 | for (int i = 0; i < strings.length; i++) { 21 | texts[i] = new Text(strings[i]); 22 | } 23 | set(texts); 24 | } 25 | @Override 26 | public String toString() { 27 | 28 | StringBuffer result = new StringBuffer(); 29 | 30 | for(int i =0; i < this.getMyValue().length; i++){ 31 | if(i == this.getMyValue().length -1) 32 | result.append(this.getMyValue()[i].toString()); 33 | else 34 | result.append(this.getMyValue()[i].toString()).append(","); 35 | } 36 | 37 | return result.toString(); 38 | } 39 | } -------------------------------------------------------------------------------- /AQIClassify/AQIClassfyMapper.java: -------------------------------------------------------------------------------- 1 | 2 | package AQIClassify; 3 | 4 | import java.io.IOException; 5 | import org.apache.hadoop.io.DoubleWritable; 6 | import org.apache.hadoop.io.LongWritable; 7 | import org.apache.hadoop.io.Text; 8 | import org.apache.hadoop.mapreduce.Mapper; 9 | 10 | public class AQIClassfyMapper extends Mapper { 11 | 12 | @Override 13 | protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { 14 | if (key.toString().equals("0")) { 15 | return; 16 | } 17 | String line = value.toString(); 18 | String[] fields = line.split(","); 19 | double AQI = Double.parseDouble(fields[10]); 20 | String year = fields[12]; 21 | String month = fields[13]; 22 | String day = fields[14]; 23 | String date = year + month + day; 24 | String city = fields[16]; 25 | String date_city = date+" "+city; 26 | 27 | // 限制时间条件 28 | if ("2019".equals(year) && "2".equals(month) && ("北京".equals(city) || "上海".equals(city) || "成都".equals(city))){ 29 | context.write(new Text(city), new Text(date+" "+String.valueOf(AQI))); 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /AQIClassify/AQIClassfyRunner.java: -------------------------------------------------------------------------------- 1 | 2 | package AQIClassify; 3 | 4 | import java.io.IOException; 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.fs.Path; 7 | import org.apache.hadoop.io.DoubleWritable; 8 | import org.apache.hadoop.io.Text; 9 | import org.apache.hadoop.mapreduce.Job; 10 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 11 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 12 | import org.apache.log4j.BasicConfigurator; 13 | 14 | public class AQIClassfyRunner { 15 | public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { 16 | 17 | BasicConfigurator.configure(); 18 | Configuration con = new Configuration(); 19 | Job job = Job.getInstance(con); 20 | job.setJarByClass(AQIClassfyRunner.class); 21 | 22 | job.setMapperClass(AQIClassfyMapper.class); 23 | job.setReducerClass(AQIClassfyReducer.class); 24 | 25 | job.setMapOutputKeyClass(Text.class); 26 | job.setMapOutputValueClass(Text.class); 27 | 28 | job.setOutputKeyClass(Text.class); 29 | job.setOutputValueClass(Text.class); 30 | 31 | FileInputFormat.setInputPaths(job, new Path("hdfs://192.168.137.192:9000/user/root/AQIinput")); 32 | FileOutputFormat.setOutputPath(job, new Path("hdfs://192.168.137.192:9000/user/root/AQICountoutput")); 33 | 34 | job.waitForCompletion(true); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /AQI/AQIRunner.java: -------------------------------------------------------------------------------- 1 | 2 | package AQI; 3 | 4 | import java.io.IOException; 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.fs.Path; 7 | import org.apache.hadoop.io.Text; 8 | import org.apache.hadoop.mapreduce.Job; 9 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 10 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 11 | import org.apache.log4j.BasicConfigurator; 12 | 13 | public class AQIRunner { 14 | public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { 15 | 16 | BasicConfigurator.configure(); 17 | Configuration con = new Configuration(); 18 | Job job = Job.getInstance(con); 19 | job.setJarByClass(AQIRunner.class); 20 | 21 | job.setMapperClass(AQIMapper.class); 22 | job.setReducerClass(AQIReducer.class); 23 | 24 | /* 25 | job.setMapperClass(InverseMapper.class); //实现map()之后的数据对的key和value交换 26 | job.setNumReduceTasks(1); // reducer个数 27 | */ 28 | job.setMapOutputKeyClass(Text.class); 29 | job.setMapOutputValueClass(AQIBean.class); 30 | 31 | job.setOutputKeyClass(Text.class); 32 | job.setOutputValueClass(Text.class); 33 | job.setSortComparatorClass(IntWritableDecreasingComparator.class); 34 | FileInputFormat.setInputPaths(job, new Path("hdfs://192.168.137.192:9000/user/root/AQIinput")); 35 | FileOutputFormat.setOutputPath(job, new Path("hdfs://192.168.137.192:9000/user/root/AQIoutput10")); 36 | 37 | job.waitForCompletion(true); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /AQIIndex/AQIIndexMapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * To change this license header, choose License Headers in Project Properties. 3 | * To change this template file, choose Tools | Templates 4 | * and open the template in the editor. 5 | */ 6 | package AQIIndex; 7 | 8 | import com.sun.corba.se.spi.orb.StringPair; 9 | import java.io.IOException; 10 | import org.apache.hadoop.io.DoubleWritable; 11 | import org.apache.hadoop.io.LongWritable; 12 | import org.apache.hadoop.io.Text; 13 | import org.apache.hadoop.mapreduce.Mapper; 14 | import org.apache.hadoop.io.ArrayWritable; 15 | 16 | 17 | 18 | public class AQIIndexMapper extends Mapper{ 19 | 20 | protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { 21 | if (key.toString().equals("0")) { 22 | return; 23 | } 24 | String line = value.toString(); 25 | String[] fields = line.split(","); 26 | //double AQI = Double.parseDouble(fields[10]); 27 | String year = fields[12]; 28 | String month = fields[13]; 29 | String day = fields[14]; 30 | String date = year + month + day; 31 | String city = fields[16]; 32 | String PM25 = fields[3]; 33 | String PM10 = fields[4]; 34 | String NO2 = fields[5]; 35 | String SO2 = fields[6]; 36 | String O3 = fields[8]; 37 | String CO = fields[9]; 38 | 39 | String date_air = date+"\t"+PM25+"\t"+PM10+"\t"+NO2+"\t"+SO2+"\t"+O3+"\t"+CO; 40 | 41 | // 限制时间条件 42 | context.write(new Text(city), new Text(date_air)); 43 | } 44 | } 45 | 46 | -------------------------------------------------------------------------------- /AQIIndex/AQIIndexRunner.java: -------------------------------------------------------------------------------- 1 | /* 2 | * To change this license header, choose License Headers in Project Properties. 3 | * To change this template file, choose Tools | Templates 4 | * and open the template in the editor. 5 | */ 6 | package AQIIndex; 7 | 8 | import java.io.IOException; 9 | import org.apache.hadoop.conf.Configuration; 10 | import org.apache.hadoop.fs.Path; 11 | import org.apache.hadoop.io.Text; 12 | import org.apache.hadoop.mapreduce.Job; 13 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 14 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 15 | import org.apache.log4j.BasicConfigurator; 16 | 17 | 18 | public class AQIIndexRunner { 19 | public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { 20 | 21 | BasicConfigurator.configure(); 22 | Configuration con = new Configuration(); 23 | Job job = Job.getInstance(con); 24 | job.setJarByClass(AQIIndexRunner.class); 25 | 26 | job.setMapperClass(AQIIndexMapper.class); 27 | job.setReducerClass(AQIIndexReducer.class); 28 | 29 | job.setMapOutputKeyClass(Text.class); 30 | job.setMapOutputValueClass(Text.class); 31 | 32 | job.setOutputKeyClass(Text.class); 33 | job.setOutputValueClass(Text.class); 34 | 35 | FileInputFormat.setInputPaths(job, new Path("hdfs://192.168.137.192:9000/user/root/AQIinput")); 36 | FileOutputFormat.setOutputPath(job, new Path("hdfs://192.168.137.192:9000/user/root/AQIIndexoutput")); 37 | 38 | job.waitForCompletion(true); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /AQI/CityAQISort.java: -------------------------------------------------------------------------------- 1 | 2 | package AQI; 3 | 4 | import java.util.ArrayList; 5 | import java.util.Collections; 6 | import java.util.Comparator; 7 | import java.util.List; 8 | 9 | 10 | class CityAQISortTest{ 11 | 12 | public static void main(String[] args) { 13 | 14 | String[] cityArr = {"上海","乌鲁木齐","北京","厦门","呼和浩特","天津", 15 | "成都","昆明","济南","海口","郑州","青岛"}; 16 | Double[] AQIArr = {55.18360058440512,105.50207880024911,70.24525967238151,37.90453666222592, 17 | 55.0762874278499,72.85804564495083,74.10958049198001,32.625129602441895,106.56531048165962, 18 | 22.56553826042011,119.8591981616064,78.55928168903378}; 19 | 20 | List list = new ArrayList(); 21 | for (int i=0;i() { 27 | @Override 28 | public int compare(CityAQI o1, CityAQI o2) { 29 | String s1 = o1.getcity(); 30 | String s2 = o2.getcity(); 31 | 32 | int temp = s1.compareTo(s2); 33 | 34 | if(temp != 0){ 35 | return temp; 36 | } 37 | 38 | double m1 = o1.getaqi(); 39 | double m2 = o2.getaqi(); 40 | 41 | return m2>m1?1:-1; 42 | } 43 | } 44 | 45 | System.out.println(list); 46 | } 47 | 48 | // 订单类 49 | public static class CityAQI{ 50 | 51 | public String city; 52 | public double aqi; 53 | 54 | public CityAQI(String city, double aqi) { 55 | this.city = city; 56 | this.aqi = aqi; 57 | } 58 | 59 | public String getcity() { 60 | return city; 61 | } 62 | 63 | public double getaqi() { 64 | return aqi; 65 | } 66 | 67 | @Override 68 | public String toString() { 69 | return "CityAQI{" + 70 | "city='" + city + '\'' + 71 | ", aqi=" + aqi + 72 | '}'; 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /AQI/AQIBean.java: -------------------------------------------------------------------------------- 1 | 2 | package AQI; 3 | 4 | import java.io.DataInput; 5 | import java.io.DataOutput; 6 | import java.io.IOException; 7 | import org.apache.hadoop.io.WritableComparable; 8 | 9 | public class AQIBean implements WritableComparable{ 10 | double PM25; 11 | double IAQI; 12 | String city; 13 | 14 | public AQIBean(){ 15 | } 16 | 17 | public AQIBean(double PM25, String city){ 18 | super(); 19 | this.PM25 = PM25; 20 | this.city = city; 21 | 22 | //数组初始化 23 | //pollutant_levels存储PM25浓度等级,IAQI_levels存储空气质量分指数IAQI等级 24 | int[] pollutant_levels = {0, 35, 75, 115, 150, 250, 350, 500}; 25 | int[] IAQI_levels = {0, 50, 100, 150, 200, 300, 400, 500}; 26 | 27 | //计算IAQI 28 | for (int i = 0; i < 7; i++){ 29 | if (PM25 > pollutant_levels[i] && PM25 < pollutant_levels[i+1]){ 30 | this.IAQI = (IAQI_levels[i+1] - IAQI_levels[i]) * (PM25 - pollutant_levels[i]) / (pollutant_levels[i+1] - pollutant_levels[i]) + IAQI_levels[i]; 31 | } 32 | } 33 | } 34 | 35 | public double getPM25() { 36 | return PM25; 37 | } 38 | 39 | public void setPM25(double PM25) { 40 | this.PM25 = PM25; 41 | } 42 | 43 | public double getIAQI() { 44 | return IAQI; 45 | } 46 | 47 | public void setIAQI(double IAQI) { 48 | this.IAQI = IAQI; 49 | } 50 | 51 | public String getCity() { 52 | return city; 53 | } 54 | 55 | public void setCity(String city) { 56 | this.city = city; 57 | } 58 | 59 | @Override 60 | public void write(DataOutput out) throws IOException { 61 | out.writeDouble(PM25); 62 | out.writeDouble(IAQI); 63 | out.writeUTF(city); 64 | } 65 | 66 | @Override 67 | public void readFields(DataInput in) throws IOException { 68 | PM25 = in.readDouble(); 69 | IAQI = in.readDouble(); 70 | city = in.readUTF(); 71 | } 72 | @Override 73 | public int compareTo(AQIBean a) { 74 | // TODO Auto-generated method stub 75 | return (this.IAQI > a.getIAQI())?-1:1; 76 | } 77 | 78 | @Override 79 | public String toString() { 80 | // TODO Auto-generated method stub 81 | return city+"\t"+IAQI; 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AQI_analysis 2 | AQI air quality analysis is based on Hadoop MapReduce 3 | 4 | [TOC] 5 | 6 | ## Introduction 7 | 8 | Based on the MapReduce framework, analyze the air quality levels of each city. 9 | 10 | 11 | Research Objectives: 12 | 13 | 1. Using the Air Quality Sub Index (IAQI) of PM25 as a measurement indicator, compare the air quality levels of various cities from August 2018 to June 2019. 14 | 15 | 2. Taking Beijing, Shanghai, and Chengdu as examples, using AQI as the analysis indicator, **calculate the distribution of air quality levels** during the Spring Festival in these three cities. 16 | 17 | 3. **Construct a comprehensive air quality index system**, including all 12 cities in the analysis framework, and calculate a comprehensive score for each city. 18 | 19 | ## File Description: 20 | 21 | | File | Content | 22 | | -------------------- | ------------------------------ | 23 | | PM25city.txt | Dataset | 24 | | 需求及数据说明.txt | Requirement Description | 25 | | 空气质量分析报告.pdf | Analysis Report | 26 | | AQI | Comparison of air quality levels | 27 | | AQIClassify | Distribution of air quality levels | 28 | | AQIIndex | Air Quality Comprehensive Index System | 29 | 30 | 31 | 32 | ## Environment: 33 | 34 | > Hadoop Version:2.6.5 35 | > 36 | > Virtual Machine Software:VMware 11.0.0 build-2305329 37 | > 38 | > Operating system:Linux master 2.6.32-504.el6.x86_64 #1 SMP Wed Oct 15 04:27:16 UTC 2014 x86_64 x86_64 x86_64 GNU/Linux 39 | > 40 | > Linux Version: CentOS release 6.6 (Final) 41 | > 42 | > Hadoop colony:a master,two slavers: slaver1,slaver2 43 | 44 | 45 | 46 | ## Data Description: 47 | This experiment mainly relies on air quality data, which is sourced from the internet and has a data scale of 400000 rows. The period is from 0:00 on August 1, 2018, to 23:00 on June 10, 2019, with 16 fields. The fields are station number, longitude, latitude, PM25, PM10, NO2, SO2, O3-1, O3-8h, CO, AQI, level, year, month, day, hour, and city. Separate each field with a ",". Among them, the "City" field is a string type; the rest are numerical data. 48 | 49 | In addition, cities include Beijing, Shanghai, Tianjin, Qingdao, Jinan, Xiamen, Zhengzhou, Urumqi, Chengdu, Hohhot, Haikou, and Kunming, totalling 12 cities; Each city has a different number of air quality monitoring stations, with a collection frequency of one record per hour. 50 | -------------------------------------------------------------------------------- /AQIClassify/AQIClassfyReducer.java: -------------------------------------------------------------------------------- 1 | 2 | package AQIClassify; 3 | 4 | import java.io.IOException; 5 | import java.util.Arrays; 6 | import java.util.HashMap; 7 | import org.apache.hadoop.io.Text; 8 | import org.apache.hadoop.mapreduce.Reducer; 9 | import java.util.ArrayList; 10 | import java.util.Map; 11 | 12 | public class AQIClassfyReducer extends Reducer{ 13 | //@Override 14 | protected void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException { 15 | String city = key.toString(); 16 | //创建一个日期-aqi数组的map 17 | HashMap> date_aqi = new HashMap<>(); 18 | for(Text val : values){ 19 | double aqi = Double.parseDouble(val.toString().split(" ")[1]); 20 | String dateString = val.toString().split(" ")[0]; 21 | if (date_aqi.containsKey(dateString)) { 22 | ArrayList aqi_list = date_aqi.get(dateString); 23 | aqi_list.add(aqi); 24 | } 25 | else{ 26 | ArrayList aqi_list = new ArrayList<>(); 27 | aqi_list.add(aqi); 28 | date_aqi.put(dateString, aqi_list); 29 | } 30 | } 31 | // 计算每日平均AQI 32 | 33 | 34 | for(Map.Entry> entry : date_aqi.entrySet()){ 35 | double aqi_sum = 0; 36 | ArrayList aqiList = entry.getValue(); 37 | for (int i = 0; i < aqiList.size(); i++) { 38 | aqi_sum = aqi_sum + aqiList.get(i); 39 | } 40 | double aqi_avg = aqi_sum/aqiList.size(); 41 | aqiList.add(aqi_avg); 42 | } 43 | 44 | //System.out.println(aqi_avg); 45 | 46 | int[][] city_count = new int[3][6]; 47 | HashMap city_code = new HashMap(); 48 | String[] citys = {"北京","上海","成都"}; 49 | for(int t = 0; t < citys.length; t++){ 50 | city_code.put(citys[t], t); 51 | } 52 | for(Map.Entry> entry : date_aqi.entrySet()){ 53 | ArrayList aqiList2 = entry.getValue(); 54 | Double aqi_avg = aqiList2.get(aqiList2.size()-1) ; 55 | if (aqi_avg<=50) { 56 | city_count[city_code.get(city)][0] += 1; 57 | } 58 | else if (aqi_avg<=100) { 59 | city_count[city_code.get(city)][1] += 1; 60 | } 61 | else if (aqi_avg<=150) { 62 | city_count[city_code.get(city)][2] += 1; 63 | } 64 | else if (aqi_avg<=200) { 65 | city_count[city_code.get(city)][3] += 1; 66 | } 67 | else if (aqi_avg<=300) { 68 | city_count[city_code.get(city)][4] += 1; 69 | } 70 | else{ 71 | city_count[city_code.get(city)][5] += 1; 72 | } 73 | } 74 | 75 | String[] results = new String[3]; 76 | for (int i = 0;i{ 17 | 18 | private static final double[] LIMITS = {75.0,150.0,80.0,150.0,160.0,4.0}; 19 | protected void reduce(Text key, Iterable values, Context context) 20 | throws IOException, InterruptedException { 21 | String city = key.toString(); 22 | //创建一个日期-ap数组的map 23 | HashMap> date_ap = new HashMap<>(); 24 | for(Text val : values){ 25 | Double PM25 = Double.parseDouble(val.toString().split("\t")[1]); 26 | Double PM10 = Double.parseDouble(val.toString().split("\t")[2]); 27 | Double NO2 = Double.parseDouble(val.toString().split("\t")[3]); 28 | Double SO2 = Double.parseDouble(val.toString().split("\t")[4]); 29 | Double O3 = Double.parseDouble(val.toString().split("\t")[5]); 30 | Double CO = Double.parseDouble(val.toString().split("\t")[6]); 31 | String dateString = val.toString().split("\t")[0]; 32 | Double[] AP = {PM25,PM10,NO2,SO2,O3,CO}; 33 | if (date_ap.containsKey(dateString)) { 34 | ArrayList ap_list = date_ap.get(dateString); 35 | ap_list.add(AP); 36 | } 37 | else{ 38 | ArrayList ap_list = new ArrayList(); 39 | ap_list.add(AP); 40 | date_ap.put(dateString,ap_list); 41 | } 42 | } 43 | // 计算每日各项污染物的平均水平并计算单项指数 44 | for(Map.Entry> entry : date_ap.entrySet()){ 45 | double[] ap_sum = {0,0,0,0,0,0}; 46 | ArrayList apList = entry.getValue(); 47 | for (int i = 0; i < apList.size(); i++) { 48 | Double[] ap_i = apList.get(i); 49 | for (int j =0;j city_code = new HashMap(); 63 | String[] citys = 64 | {"北京","上海","成都","天津", 65 | "青岛","济南", "厦门","郑州", 66 | "乌鲁木齐","呼和浩特","海口","昆明"}; 67 | for(int t = 0; t < citys.length; t++){ 68 | city_code.put(citys[t], t); 69 | } 70 | for(Map.Entry> entry : date_ap.entrySet()){ 71 | ArrayList apList2 = entry.getValue(); 72 | Double[] ap_avg_index = apList2.get(apList2.size()-1) ; 73 | for(int t = 0; t < ap_avg_index.length; t++){ 74 | city_index[city_code.get(city)] = ap_avg_index[t]; 75 | } 76 | } 77 | //String result = results.toString("\n"); 78 | context.write(key, new Text((city_index[city_code.get(city)]).toString())); 79 | } 80 | } 81 | 82 | 83 | --------------------------------------------------------------------------------