├── .DS_Store
├── README.md
└── hadoop
├── .DS_Store
├── target
├── maven-status
│ └── maven-compiler-plugin
│ │ ├── testCompile
│ │ └── default-testCompile
│ │ │ ├── createdFiles.lst
│ │ │ └── inputFiles.lst
│ │ └── compile
│ │ └── default-compile
│ │ ├── createdFiles.lst
│ │ └── inputFiles.lst
├── HW3-1.0-SNAPSHOT.jar
├── test-classes
│ └── HW3
│ │ └── AppTest.class
├── maven-archiver
│ └── pom.properties
└── surefire-reports
│ ├── HW3.AppTest.txt
│ └── TEST-HW3.AppTest.xml
├── src
└── main
│ └── java
│ ├── parser
│ ├── Parser.java
│ └── ParserImpl.java
│ ├── enums
│ └── PageRankEnums.java
│ ├── topk
│ ├── TopK.java
│ ├── TopKReducer.java
│ └── TopKMapper.java
│ ├── driver
│ └── DriverProgram.java
│ ├── pagerank
│ ├── PageRankImpl.java
│ ├── PageRankReducer.java
│ └── PageRankMapper.java
│ ├── model
│ └── Node.java
│ └── parserjob
│ └── ParserJob.java
├── Readme.txt
├── pom.xml
├── Makefile
└── HW3.iml
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/manthanthakker/hadoop-page-rank/master/.DS_Store
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PageRankMapReduce
2 | PageRank Implementation for Map Reduce in Hadoop and Apache spark
3 |
--------------------------------------------------------------------------------
/hadoop/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/manthanthakker/hadoop-page-rank/master/hadoop/.DS_Store
--------------------------------------------------------------------------------
/hadoop/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst:
--------------------------------------------------------------------------------
1 | HW3/AppTest.class
2 |
--------------------------------------------------------------------------------
/hadoop/target/HW3-1.0-SNAPSHOT.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/manthanthakker/hadoop-page-rank/master/hadoop/target/HW3-1.0-SNAPSHOT.jar
--------------------------------------------------------------------------------
/hadoop/target/test-classes/HW3/AppTest.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/manthanthakker/hadoop-page-rank/master/hadoop/target/test-classes/HW3/AppTest.class
--------------------------------------------------------------------------------
/hadoop/target/maven-archiver/pom.properties:
--------------------------------------------------------------------------------
1 | #Generated by Maven
2 | #Sun Feb 25 13:59:30 EST 2018
3 | version=1.0-SNAPSHOT
4 | groupId=HW3
5 | artifactId=HW3
6 |
--------------------------------------------------------------------------------
/hadoop/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst:
--------------------------------------------------------------------------------
1 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/test/java/HW3/AppTest.java
2 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/parser/Parser.java:
--------------------------------------------------------------------------------
1 | package parser;
2 |
3 | /**
4 | * @author Manthan Thakker
5 | * @project HW3
6 | * @date 2/19/18
7 | * @email thakker.m@husky.neu.edu
8 | */
9 | public interface Parser {
10 | }
11 |
--------------------------------------------------------------------------------
/hadoop/target/surefire-reports/HW3.AppTest.txt:
--------------------------------------------------------------------------------
1 | -------------------------------------------------------------------------------
2 | Test set: HW3.AppTest
3 | -------------------------------------------------------------------------------
4 | Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.011 sec
5 |
--------------------------------------------------------------------------------
/hadoop/Readme.txt:
--------------------------------------------------------------------------------
1 | 3 simple steps to run the Job (Used Joes file):
2 |
3 | 1. Open the Makefile configure
4 |
5 | local.input= ### MENTION INPUT PATH LOCATION
6 | local.output=### MENTION OUTPUT PATH LOCATION
7 |
8 | 2. Type make alone on the terminal
9 |
10 | 3. You can see the output folder topkresults in the folder.
11 |
12 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/enums/PageRankEnums.java:
--------------------------------------------------------------------------------
1 | package enums;
2 |
3 | /**
4 | * @author Manthan Thakker
5 | * @project HW3
6 | * @date 2/23/18
7 | * @email thakker.m@husky.neu.edu
8 | */
9 |
10 | /**
11 | * Global Counters across mao reduce program
12 | */
13 | public enum PageRankEnums {
14 | UNIQUEPAGES,
15 | DANGLINGNODESNEW,
16 | K
17 | }
18 |
--------------------------------------------------------------------------------
/hadoop/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst:
--------------------------------------------------------------------------------
1 | parserjob/ParserJob$ParserMapper.class
2 | model/Value.class
3 | HW3/App.class
4 | parser/ParserImpl$WikiParser.class
5 | parser/Parser.class
6 | topk/TopKMapper$1.class
7 | pagerank/PageRankImpl.class
8 | parserjob/ParserJob$ParserMapper$WikiParser.class
9 | parser/ParserImpl.class
10 | topk/TopKReducer$1.class
11 | pagerank/PageRankMapper.class
12 | Enums/PageRankEnums.class
13 | driver/DriverProgram.class
14 | pagerank/PageRankReducer.class
15 | pagerank/PageRank.class
16 | topk/TopKReducer.class
17 | parserjob/ParserJob.class
18 | topk/TopKMapper.class
19 | topk/TopK.class
20 |
--------------------------------------------------------------------------------
/hadoop/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst:
--------------------------------------------------------------------------------
1 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/pagerank/PageRank.java
2 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/pagerank/PageRankMapper.java
3 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/topk/TopKMapper.java
4 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/pagerank/PageRankReducer.java
5 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/parserjob/ParserJob.java
6 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/driver/DriverProgram.java
7 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/parser/Parser.java
8 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/topk/TopKReducer.java
9 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/topk/TopK.java
10 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/model/Value.java
11 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/parser/ParserImpl.java
12 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/Enums/PageRankEnums.java
13 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/HW3/App.java
14 | /Users/trailbrazer/Desktop/MR/git/MR/MR/HW3/src/main/java/pagerank/PageRankImpl.java
15 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/topk/TopK.java:
--------------------------------------------------------------------------------
1 | package topk;
2 |
3 | import model.Node;
4 |
5 | import org.apache.hadoop.conf.Configuration;
6 | import org.apache.hadoop.fs.Path;
7 | import org.apache.hadoop.io.NullWritable;
8 | import org.apache.hadoop.io.Text;
9 | import org.apache.hadoop.mapreduce.Job;
10 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
11 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
12 |
13 | /**
14 | * @author Manthan Thakker
15 | * @project HW3
16 | * @date 2/23/18
17 | * @email thakker.m@husky.neu.edu
18 | */
19 | public class TopK {
20 |
21 |
22 | public static void main(String[] args) throws Exception {
23 |
24 | Configuration conf = new Configuration();
25 | Job job = Job.getInstance(conf, "Top K");
26 | job.getConfiguration().set("UNIQUEPAGES", args[2]);
27 |
28 | job.getConfiguration().set("K",args[3]);
29 | // Setup
30 | job.setJarByClass(TopK.class);
31 | job.setMapperClass(TopKMapper.class);
32 |
33 | //Mapper
34 | job.setMapOutputKeyClass(NullWritable.class);
35 | job.setMapOutputValueClass(Node.class);
36 |
37 | job.setReducerClass(TopKReducer.class);
38 | //Reducer
39 | job.setOutputKeyClass(Text.class);
40 | job.setOutputValueClass(Text.class);
41 |
42 | FileInputFormat.addInputPath(job, new Path(args[0]));
43 | FileOutputFormat.setOutputPath(job, new Path(args[1]));
44 | System.exit(job.waitForCompletion(true) ? 0 : 1);
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/driver/DriverProgram.java:
--------------------------------------------------------------------------------
1 | package driver;
2 |
3 | import pagerank.PageRankImpl;
4 | import parserjob.ParserJob;
5 | import topk.TopK;
6 |
7 | import java.io.FileInputStream;
8 | import java.io.InputStream;
9 | import java.util.Properties;
10 |
11 | /**
12 | * @author Manthan Thakker
13 | * @project HW3
14 | * @date 2/22/18
15 | * @email thakker.m@husky.neu.edu
16 | */
17 | public class DriverProgram {
18 |
19 | /**
20 | * Initiates the execution
21 | * @param args: The input and the ouput paths
22 | * @throws Exception
23 | */
24 | public static void main(String args[]) throws Exception {
25 |
26 | // Phase 1
27 | final String dataSetInput;
28 | final String dataSetOutput;
29 |
30 | // Phase 2
31 | final String pageRankInput;
32 | final String pageRankOutput;
33 |
34 | // Phase 3
35 | long UNIQUEPAGES;
36 | final String topKInput;
37 | final String topKoutput;
38 |
39 | long K=10;
40 |
41 |
42 | topKInput = args[1] + "/10";
43 | topKoutput = args[1] + "/output";
44 |
45 | String commandLine[] = new String[4];
46 | commandLine[0] = args[0];
47 | commandLine[1] = args[1];
48 | UNIQUEPAGES = ParserJob.main(commandLine);
49 |
50 | commandLine[0] = args[1];
51 | commandLine[2] = UNIQUEPAGES + "";
52 | PageRankImpl.main(commandLine);
53 |
54 | commandLine[0] = topKInput;
55 | commandLine[1] = topKoutput;
56 | commandLine[2] = UNIQUEPAGES + "";
57 | commandLine[3]=K+"";
58 | TopK.main(commandLine);
59 |
60 | }
61 |
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/hadoop/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | HW3
7 | HW3
8 | 1.0-SNAPSHOT
9 | jar
10 |
11 | HW3
12 | http://maven.apache.org
13 |
14 |
15 | UTF-8
16 |
17 |
18 |
19 |
20 |
21 |
22 | org.apache.hadoop
23 | hadoop-common
24 | 2.7.3
25 | provided
26 |
27 |
28 |
29 |
30 | org.apache.hadoop
31 | hadoop-mapreduce-client-core
32 | 3.0.0
33 |
34 |
35 |
36 | org.apache.hadoop
37 | hadoop-client
38 | 2.2.0
39 |
40 |
41 |
42 |
43 | org.apache.hadoop
44 | hadoop-core
45 | 1.2.1
46 |
47 |
48 |
49 |
50 | junit
51 | junit
52 | 3.8.1
53 | test
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/pagerank/PageRankImpl.java:
--------------------------------------------------------------------------------
1 | package pagerank;
2 |
3 | import enums.PageRankEnums;
4 | import model.Node;
5 | import org.apache.hadoop.conf.Configuration;
6 | import org.apache.hadoop.fs.Path;
7 | import org.apache.hadoop.io.Text;
8 | import org.apache.hadoop.mapreduce.Job;
9 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
10 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
11 |
12 |
13 | /**
14 | * @author Manthan Thakker
15 | * @project HW3
16 | * @date 2/20/18
17 | * @email thakker.m@husky.neu.edu
18 | */
19 | public class PageRankImpl {
20 |
21 |
22 | public static void main(String[] args) throws Exception {
23 |
24 | long deltaNew = 0l;
25 |
26 |
27 | for (int i = 1; i < 11; i++) {
28 |
29 | Configuration conf = new Configuration();
30 |
31 | Job job = Job.getInstance(conf, "Page Rank");
32 |
33 |
34 | job.setJarByClass(PageRankImpl.class);
35 | // SETTING CONTEXT VARIABLES
36 | job.getConfiguration().set("deltaOld", deltaNew + "");
37 | job.getConfiguration().set("UNIQUEPAGES", args[2]);
38 |
39 |
40 | // Mapper
41 | job.setMapperClass(PageRankMapper.class);
42 | job.setMapOutputKeyClass(Text.class);
43 | job.setMapOutputValueClass(Node.class);
44 | job.setReducerClass(PageRankReducer.class);
45 |
46 | //Reducer
47 | job.setOutputKeyClass(Text.class);
48 | job.setOutputValueClass(Text.class);
49 | FileInputFormat.addInputPath(job, new Path(args[0] + "/" + (i - 1) ));
50 | FileOutputFormat.setOutputPath(job, new Path(args[0] + "/" + (i) ));
51 | job.waitForCompletion(true);
52 |
53 | //Getting the number of nodes
54 | job.getConfiguration().setLong("numberOfNodes", 18000);
55 | job.getConfiguration().setBoolean("iterate", true);
56 |
57 | // Assigning the dangling node value to old value to use in the next iteration
58 | deltaNew = job.getCounters().findCounter(PageRankEnums.DANGLINGNODESNEW).getValue();
59 | // initializiong the new delata dangling node to 0
60 | job.getCounters().findCounter(PageRankEnums.DANGLINGNODESNEW).setValue(0l);
61 |
62 | }
63 | }
64 |
65 | }
66 |
67 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/model/Node.java:
--------------------------------------------------------------------------------
1 | package model;
2 |
3 | import org.apache.hadoop.io.WritableComparable;
4 |
5 | import java.io.DataInput;
6 | import java.io.DataOutput;
7 | import java.io.IOException;
8 | import java.util.ArrayList;
9 | import java.util.List;
10 |
11 | /**
12 | * @author Manthan Thakker
13 | * @project HW3
14 | * @date 2/20/18
15 | * @email thakker.m@husky.neu.edu
16 | */
17 | public class Node implements WritableComparable {
18 |
19 | public String id="DEFAULT";
20 | public Double pageRank = -1.0;
21 | public List neighbors;
22 | public boolean isNode=false;
23 | public static final long SCALEUP = 1000000000l;
24 |
25 | public Node() {
26 | this.id=id;
27 | neighbors = new ArrayList();
28 | isNode = true;
29 | }
30 |
31 | public Node(String id) {
32 | this.id = id.trim();
33 | this.pageRank = pageRank;
34 | neighbors = new ArrayList();
35 | isNode = true;
36 | }
37 |
38 | public Node(String id, Double pageRank) {
39 | this.id = id.trim();
40 | this.pageRank = pageRank;
41 | neighbors = new ArrayList();
42 | isNode = false;
43 | }
44 |
45 | // SERILIZATION AND DESERILIZATION METHODS
46 |
47 | public void write(DataOutput dataOutput) throws IOException {
48 |
49 |
50 | dataOutput.writeUTF(id.trim());
51 | dataOutput.writeBoolean(isNode);
52 | dataOutput.writeDouble(pageRank);
53 | String accumulate = "";
54 | for (String neighbor : neighbors)
55 | accumulate += neighbor.trim() + ",";
56 | if (accumulate.length() > 0)
57 | dataOutput.writeUTF(accumulate.substring(0, accumulate.length()));
58 | else {
59 | dataOutput.writeUTF(accumulate);
60 | }
61 |
62 | }
63 |
64 |
65 |
66 | public void readFields(DataInput dataInput) throws IOException {
67 |
68 | id = dataInput.readUTF().trim();
69 | isNode = dataInput.readBoolean();
70 | pageRank = dataInput.readDouble();
71 | neighbors = new ArrayList();
72 | String nei = dataInput.readUTF();
73 |
74 | String neighborsName[] = nei.split(",");
75 | for (String neighbor : neighborsName) {
76 | neighbors.add(neighbor.trim());
77 | }
78 |
79 |
80 | }
81 |
82 | @Override
83 | public String toString() {
84 | return "#" + pageRank + "#" + neighbors + "#" + isNode;
85 | }
86 |
87 |
88 | public int compareTo(Object o) {
89 | return pageRank.compareTo(((Node)o).pageRank);
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/pagerank/PageRankReducer.java:
--------------------------------------------------------------------------------
1 | package pagerank;
2 |
3 | import enums.PageRankEnums;
4 | import model.Node;
5 | import org.apache.hadoop.conf.Configuration;
6 | import org.apache.hadoop.io.Text;
7 | import org.apache.hadoop.mapreduce.Reducer;
8 |
9 | import java.io.IOException;
10 | import java.util.Iterator;
11 |
12 | /**
13 | * @author Manthan Thakker
14 | * @project HW3
15 | * @date 2/23/18
16 | * @email thakker.m@husky.neu.edu
17 | */
18 | public class PageRankReducer extends Reducer {
19 |
20 | Long numberOfNodes;
21 | Configuration configuration;
22 | private final long SCALE_FACTOR = 1000000000000l;
23 |
24 | /**
25 | * Initializes the state variables
26 | * @param context
27 | */
28 | public void setup(Context context) {
29 | configuration = context.getConfiguration();
30 | numberOfNodes = Long.parseLong(context.getConfiguration().get("UNIQUEPAGES"));
31 | }
32 |
33 | /**
34 | *
35 | * @param key: The Node id
36 | * @param values: List of Nodes/Pagerank Contributions(isNode will be false)
37 | * @param context: Context
38 | * @throws IOException
39 | * @throws InterruptedException
40 | * All partial pagerank contributions for same nodeId will be routed to the same reduce call.
41 | * A copy of the Node itself will be routed to the same reduce call.
42 | * We add up all the partial contributions and then emit the new node with the updated pageRank.
43 | */
44 | public void reduce(Text key, Iterable values, Context context) throws IOException, InterruptedException {
45 |
46 | // Intializing variables:
47 | Double pageRankTotal = 0.0;
48 | Iterator iterable = values.iterator();
49 | Node M = null;
50 |
51 | // Preparing the string to be outputed
52 | String MString = "";
53 |
54 | while (iterable.hasNext()) {
55 | Node node = iterable.next();
56 | // If its a pagerank contribution or Actual Node.
57 | if (node.isNode) {
58 | M = node;
59 | MString = "#" + M.neighbors + "#" + M.isNode;
60 | } else {
61 | pageRankTotal += node.pageRank;
62 | }
63 | }
64 |
65 | // The pagerank formula
66 | pageRankTotal = (0.15 / (numberOfNodes)) + (0.85 * pageRankTotal);
67 |
68 | if (M != null) {
69 | context.write(new Text(M.id.trim()), new Text("#" + pageRankTotal + MString));
70 | } else {
71 | long pageRank = Double.valueOf(pageRankTotal * SCALE_FACTOR).longValue();
72 | context.getCounter(PageRankEnums.DANGLINGNODESNEW).increment(pageRank);
73 | context.write(key, new Text("#" + pageRankTotal + "#[]#true"));
74 | }
75 | }
76 |
77 |
78 | }
79 |
80 |
--------------------------------------------------------------------------------
/hadoop/src/main/java/topk/TopKReducer.java:
--------------------------------------------------------------------------------
1 | package topk;
2 |
3 | import enums.PageRankEnums;
4 | import model.Node;
5 | import org.apache.hadoop.io.NullWritable;
6 | import org.apache.hadoop.io.Text;
7 | import org.apache.hadoop.mapreduce.Reducer;
8 |
9 | import java.io.IOException;
10 | import java.util.*;
11 |
12 | /**
13 | * @author Manthan Thakker
14 | * @project HW3
15 | * @date 2/23/18
16 | * @email thakker.m@husky.neu.edu
17 | */
18 | public class TopKReducer extends Reducer {
19 | private Map pages;
20 | private long topK;
21 |
22 | /**
23 | * Initialzes all the variables
24 | *
25 | * @param context
26 | */
27 | public void setup(Context context) {
28 | pages = new HashMap();
29 | this.topK = Long.parseLong(context.getConfiguration().get("K"));
30 | }
31 |
32 |
33 | /**
34 | * @param key: The Index of the Line
35 | * @param nodeIterator: List of Nodes
36 | * @param context As we know all records will route to the same reduce call.
37 | * Inserts each node and then just sorts and emits the top k results.
38 | */
39 | public void reduce(NullWritable key, Iterable nodeIterator, Context context) throws IOException, InterruptedException {
40 | Iterator iterator = nodeIterator.iterator();
41 | while (iterator.hasNext()) {
42 | Node node = iterator.next();
43 |
44 | pages.put(node.pageRank + "#" + node.id, node);
45 | }
46 | pages = sortByComparator(pages, false);
47 | int i = 0;
48 | for (String page : pages.keySet()) {
49 | context.write(new Text(""), new Text(page));
50 | i++;
51 | if (i > topK)
52 | break;
53 | }
54 | }
55 |
56 | /**
57 | * Sorts the given Unsorted Map by PageRank Values
58 | *
59 | * @param unsortMap : The map to be sorted
60 | * @param order: False to be ascending
61 | * @return Sorted Map Order
62 | * Picked up sorting from the IR Project done in last semester.
63 | */
64 | private static Map sortByComparator(Map unsortMap, final boolean order) {
65 |
66 | List> list = new LinkedList>(unsortMap.entrySet());
67 |
68 | // Sorting the list based on values
69 | Collections.sort(list, new Comparator>() {
70 | public int compare(Map.Entry o1,
71 | Map.Entry o2) {
72 | if (order) {
73 | return o1.getValue().pageRank.compareTo(o2.getValue().pageRank);
74 | } else {
75 | return (int) (((Double.parseDouble(o2.getKey().split("#")[0]) * 10000000000.0) - (Double.parseDouble(o1.getKey().split("#")[0])) * 10000000000.0) * 10000.0);
76 |
77 | }
78 | }
79 | });
80 |
81 | // Maintaining insertion order with the help of LinkedList
82 | Map sortedMap = new LinkedHashMap();
83 | for (Map.Entry entry : list) {
84 | sortedMap.put(entry.getKey(), entry.getValue());
85 | }
86 |
87 | return sortedMap;
88 | }
89 | }
--------------------------------------------------------------------------------
/hadoop/src/main/java/pagerank/PageRankMapper.java:
--------------------------------------------------------------------------------
1 | package pagerank;
2 |
3 | import enums.PageRankEnums;
4 | import model.Node;
5 | import org.apache.hadoop.conf.Configuration;
6 | import org.apache.hadoop.io.Text;
7 | import org.apache.hadoop.mapreduce.Mapper;
8 |
9 | import java.io.IOException;
10 | import java.util.Arrays;
11 |
12 | /**
13 | * @author Manthan Thakker
14 | * @project HW3
15 | * @date 2/23/18
16 | * @email thakker.m@husky.neu.edu
17 | */
18 |
19 | public class PageRankMapper extends Mapper