├── .classpath
├── .project
├── README.md
├── bin
├── twitter_driver.class
├── twitter_mapper.class
└── twitter_reducer.class
├── input.PNG
├── src
├── twitter_driver.java
├── twitter_mapper.java
└── twitter_reducer.java
├── twitter stream.PNG
└── twitterhdfs.PNG
/.classpath:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
--------------------------------------------------------------------------------
/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | Twitter
4 |
5 |
6 |
7 |
8 |
9 | org.eclipse.jdt.core.javabuilder
10 |
11 |
12 |
13 |
14 |
15 | org.apache.hadoop.eclipse.Nature
16 | org.eclipse.jdt.core.javanature
17 |
18 |
19 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Twitter-Sentiment-Analysis
2 | Made by Adriealle Dsouza and Anant Mehra
3 | ## 1. PROJECT OBJECTIVE
4 | Sentiment Analysis on twitter streamed data using flume and hadoop
5 |
6 | ## 2. Project Description
7 |
8 | ### 2.1 Flume Introduction
9 |
10 | Apache Flume is a distributed, reliable, and available service for efficiently collecting, aggregating, and moving large amounts of log data. It has a simple and flexible architecture based on streaming data flows. It is robust and fault tolerant with tunable reliability mechanisms and many failover and recovery mechanisms. It uses a simple extensible data model that allows for online analytic application.
11 |
12 | ### 2.2 FLUME INSTALLATION
13 |
14 | * Download Flume
15 | Download apache-flume-1.7.0-bin.tar.gz
16 |
17 |
18 | * Extract using 7-Zip
19 | Move to C:\flume\apache-flume-1.7.0-bin directory
20 |
21 |
22 | * Set Path and Classpath for Flume
23 | FLUME_HOME=C:\flume\apache-flume-1.7.0-bin
24 |
25 | FLUME_CONF=%FLUME_HOME%\conf
26 |
27 | CLASSPATH=%FLUME_HOME%\lib\*
28 |
29 | PATH=C:\flume\apache-flume-1.7.0-bin\bin
30 |
31 |
32 | * Edit log4j.properties file
33 | flume.root.logger=DEBUG,console
34 |
35 | #flume.root.logger=INFO,LOGFILE
36 |
37 | * Copy flume-env.ps1.template as flume-env.ps1.
38 | Add below configuration:
39 |
40 | $JAVA_OPTS="-Xms500m -Xmx1000m -Dcom.sun.management.jmxremote"
41 |
42 |
43 |
44 | * Twitter keys generation
45 |
46 | Make a twitter application in twitter developer app and generate the necessary keys which will be used to extract the twitter data using flume.
47 |
48 |
49 | * Make a twitter.conf file in conf folder
50 |
51 | Write the details about the source,sink and channel of twitter in the twitter.conf file along with the twitter keys we have generated.
52 |
53 |
54 | ### 2.3 WHAT IS SENTIMENT ANALYSIS ?
55 |
56 | Sentiment analysis is a process of computationally identifying and categorizing opinions expressed in a piece of text, especially in order to determine whether the writer's attitude towards a particular topic, product, etc. is positive, negative, or neutral.
57 | In this project we are doing the sentiment analysis on twitter data to analyse whether the tweets posted by people are positive or negative or neutral by checking the tweets with the AFFIN dictionary which has a set of 2500 words along with the value of each word ranging from -5 to +5 denoting whether tweets are positive or negative.
58 |
59 |
60 | ### 2.4 HOW IS SENTIMENT ANALYSIS WORKING ?
61 |
62 | Here are the 4 steps we have followed to perform Sentiment Analysis:
63 |
64 |
65 | 1.Implementing Distributed Caching
66 | 2. Writing a mapper class to calculate the sentiments
67 | 3. Writing a reducer class to display all the mapper output
68 | 4. Writing a Driver class for our mapreduce program
69 |
70 | #### Implementing Distributed Caching
71 |
72 | In Map Reduce, map-side joins are carried out by distributed cache. Distributed cache is applied when we have two datasets, where the smaller dataset size is limited to the cache memory of the cluster. Here, the dictionary is the smaller dataset, so we are using distributed cache. Here is the implementation of the distributed cache.
73 |
74 | #### Mapper Class
75 | The map method takes each record as input and the record is converted into a string, using the toStringmethod. After this, we have created a jsonobject called jsonparser, which parses each record which is in JSON format.
76 |
77 | #### Reducer Class
78 | In the reducer class, we are just passing the input of the mapper as its output.
79 |
80 | #### Driver Class
81 | In the Driver class, we need to provide the path for the cached dataset, using the below line.We also need to provide the input(tweets_folder) path and the output folder path as arguments.
82 |
83 | ### 2.5 FLUME WORKING COMMANDS TO PERFORM SENTIMENT ANALYSIS
84 |
85 |
86 | * ##### cd %FLUME_HOME%/bin
87 |
88 |
89 | * ##### flume-ng agent –conf %FLUME_CONF% –conf-file %FLUME_CONF%/flume-conf.properties.template –name agent
90 |
91 |
92 | * #### flume-ng agent –conf %FLUME_CONF% –conf-file %FLUME_CONF%/flume-conf.properties –name TwitterAgent
93 |
94 |
95 | #### Fig: Twitter stream command on cmd
96 |
97 | 
98 | #### Fig: Scrubbed data retrieved
99 |
100 |
101 |
102 | 
103 | #### Fig.Data streaming from twitter
104 |
105 |
106 | 
107 | #### Fig. Data into HDFS (Log files)
108 |
109 |
110 | ### 3. Technologies Used
111 | In the above project we have used apache Flume – 1.7.0 to stream the data from twitter in the form of log files and then store it in HDFS to apply sentiment analysis on that data.
112 |
113 |
114 | ### 4. Java Code with result snapshots
115 |
116 | #### 4.1 Mapper
117 |
118 |
119 | ```java
120 |
121 | import org.apache.hadoop.io.LongWritable;
122 | import org.apache.hadoop.io.Text;
123 | import org.apache.hadoop.mapreduce.Mapper;
124 | import java.io.BufferedReader;
125 | import java.io.IOException;
126 | import java.io.InputStreamReader;
127 | import java.net.URI;
128 | import java.util.HashMap;
129 | import org.apache.hadoop.conf.Configuration;
130 | import org.apache.hadoop.conf.Configured;
131 | import org.apache.hadoop.filecache.DistributedCache;
132 | import org.apache.hadoop.fs.FSDataInputStream;
133 | import org.apache.hadoop.fs.FileSystem;
134 | import org.apache.hadoop.fs.Path;
135 | import org.apache.hadoop.io.LongWritable;
136 | import org.apache.hadoop.io.NullWritable;
137 | import org.apache.hadoop.io.Text;
138 | import org.apache.hadoop.mapreduce.Job;
139 | import org.apache.hadoop.mapreduce.Mapper;
140 | import org.apache.hadoop.mapreduce.Reducer;
141 | import org.apache.hadoop.mapreduce.Mapper.Context;
142 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
143 | import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
144 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
145 | import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
146 | import org.apache.hadoop.util.Tool;
147 | import org.apache.hadoop.util.ToolRunner;
148 | import org.json.simple.JSONObject;
149 | import org.json.simple.parser.JSONParser;
150 |
151 |
152 | public class twitter_mapper extends Mapper {
153 |
154 |
155 | private URI[] files;
156 |
157 | private HashMap AFINN_map = new HashMap();
158 |
159 | @Override
160 |
161 | public void setup(Context context) throws IOException
162 |
163 | {
164 |
165 |
166 |
167 | files = DistributedCache.getCacheFiles(context.getConfiguration());
168 |
169 | System.out.println("files:"+ files);
170 |
171 | Path path = new Path(files[0]);
172 |
173 | FileSystem fs = FileSystem.get(context.getConfiguration());
174 |
175 | FSDataInputStream in = fs.open(path);
176 |
177 | BufferedReader br = new BufferedReader(new InputStreamReader(in));
178 |
179 | String line="";
180 |
181 | while((line = br.readLine())!=null)
182 |
183 | {
184 |
185 | String splits[] = line.split("\t");
186 |
187 | AFINN_map.put(splits[0], splits[1]);
188 |
189 | }
190 |
191 | br.close();
192 |
193 | in.close();
194 |
195 | }
196 |
197 | public void map(LongWritable key, Text value, Context context)
198 | throws IOException, InterruptedException {
199 |
200 | String name;
201 | String twt;
202 | String line = value.toString();
203 | String[] tuple = line.split("\\n");
204 | JSONParser jsonParser = new JSONParser();
205 |
206 | try{
207 |
208 | for(int i=0;i\t"+new Text(Integer.toString(sentiment_sum))));
235 |
236 | }
237 |
238 | }catch(Exception e){
239 |
240 | e.printStackTrace();
241 |
242 | }
243 |
244 | }
245 |
246 | }
247 |
248 | ```
249 |
250 |
251 | #### 4.2 Reducer
252 |
253 | ```java
254 | import java.io.IOException;
255 |
256 | import org.apache.hadoop.io.Text;
257 | import org.apache.hadoop.mapreduce.Reducer;
258 |
259 |
260 | public class twitter_reducer extends Reducer {
261 |
262 | public void reduce(Text key,Text value, Context context)
263 | throws IOException, InterruptedException {
264 | // process values
265 |
266 | context.write(key,value);
267 |
268 | }
269 |
270 | }
271 |
272 | ```
273 |
274 | #### 4.3 MapReduce Driver
275 | ```java
276 |
277 | import java.util.Map;
278 | import java.io.BufferedReader;
279 | import java.io.IOException;
280 | import java.io.InputStreamReader;
281 | import java.net.URI;
282 | import java.util.*;
283 | import org.apache.hadoop.conf.Configuration;
284 | import org.apache.hadoop.conf.Configured;
285 | import org.apache.hadoop.filecache.DistributedCache;
286 | import org.apache.hadoop.fs.FSDataInputStream;
287 | import org.apache.hadoop.fs.FileSystem;
288 | import org.apache.hadoop.fs.Path;
289 | import org.apache.hadoop.io.LongWritable;
290 | import org.apache.hadoop.io.NullWritable;
291 | import org.apache.hadoop.io.Text;
292 | import org.apache.hadoop.mapreduce.Job;
293 | import org.apache.hadoop.mapreduce.Mapper;
294 | import org.apache.hadoop.mapreduce.Reducer;
295 | import org.apache.hadoop.mapreduce.Mapper.Context;
296 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
297 | import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
298 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
299 | import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
300 | import org.apache.hadoop.util.Tool;
301 | import org.apache.hadoop.util.ToolRunner;
302 | import org.json.simple.JSONObject;
303 | import org.json.simple.parser.JSONParser;
304 | import org.apache.hadoop.fs.Path;
305 | import org.apache.hadoop.io.Text;
306 | import org.apache.hadoop.mapreduce.Job;
307 | import org.apache.hadoop.mapreduce.Mapper;
308 | import org.apache.hadoop.mapreduce.Reducer;
309 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
310 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
311 |
312 |
313 | public class twitter_driver implements Tool {
314 |
315 | public static void main(String[] args) throws Exception {
316 |
317 |
318 | //public static void main(String[] args) throws Exception {
319 | //ToolRunner.run(new twitter_driver(),args);
320 |
321 |
322 | //public int run(String[] args) throws Exception {
323 |
324 | //TODO Auto-generated method stub
325 |
326 | {
327 | Configuration conf =new Configuration();
328 |
329 |
330 |
331 | //DistributedCache.addCacheFile(new URI("C:/flume/apache-flume-1.7.0-bin/bin/AFINN.txt"),conf);
332 |
333 | Job job =Job.getInstance(conf,"Sentiment Analysis");
334 | job.addCacheFile(new Path("hdfs://localhost:9000/user/AFINN.txt").toUri());
335 |
336 | job.setJarByClass(twitter_driver.class);
337 |
338 | job.setMapperClass(twitter_mapper.class);
339 |
340 | job.setReducerClass(twitter_reducer.class);
341 |
342 | job.setMapOutputKeyClass(Text.class);
343 |
344 | job.setMapOutputValueClass(Text.class);
345 |
346 | job.setOutputKeyClass(NullWritable.class);
347 |
348 | job.setOutputValueClass(Text.class);
349 |
350 | job.setInputFormatClass(TextInputFormat.class);
351 |
352 | job.setOutputFormatClass(TextOutputFormat.class);
353 |
354 | FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/user/test.txt"));
355 |
356 | FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/user/o2.txt"));
357 |
358 | System.exit(job.waitForCompletion(true) ? 0 : 1);
359 | }
360 | }
361 |
362 |
363 | public Configuration getConf() {
364 | // TODO Auto-generated method stub
365 | return null;
366 | }
367 |
368 | public void setConf(Configuration arg0) {
369 | // TODO Auto-generated method stub
370 |
371 | }
372 |
373 |
374 |
375 | @Override
376 | public int run(String[] arg0) throws Exception {
377 | // TODO Auto-generated method stub
378 | return 0;
379 | }
380 | }
381 | ```
382 |
--------------------------------------------------------------------------------
/bin/twitter_driver.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/adriel1997/Twitter-Sentiment-Analysis/db7299ae275c3a322c64bfa98305ae5942c08321/bin/twitter_driver.class
--------------------------------------------------------------------------------
/bin/twitter_mapper.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/adriel1997/Twitter-Sentiment-Analysis/db7299ae275c3a322c64bfa98305ae5942c08321/bin/twitter_mapper.class
--------------------------------------------------------------------------------
/bin/twitter_reducer.class:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/adriel1997/Twitter-Sentiment-Analysis/db7299ae275c3a322c64bfa98305ae5942c08321/bin/twitter_reducer.class
--------------------------------------------------------------------------------
/input.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/adriel1997/Twitter-Sentiment-Analysis/db7299ae275c3a322c64bfa98305ae5942c08321/input.PNG
--------------------------------------------------------------------------------
/src/twitter_driver.java:
--------------------------------------------------------------------------------
1 | import java.util.Map;
2 | import java.io.BufferedReader;
3 | import java.io.IOException;
4 | import java.io.InputStreamReader;
5 | import java.net.URI;
6 | import java.util.*;
7 | import org.apache.hadoop.conf.Configuration;
8 | import org.apache.hadoop.conf.Configured;
9 | import org.apache.hadoop.filecache.DistributedCache;
10 | import org.apache.hadoop.fs.FSDataInputStream;
11 | import org.apache.hadoop.fs.FileSystem;
12 | import org.apache.hadoop.fs.Path;
13 | import org.apache.hadoop.io.LongWritable;
14 | import org.apache.hadoop.io.NullWritable;
15 | import org.apache.hadoop.io.Text;
16 | import org.apache.hadoop.mapreduce.Job;
17 | import org.apache.hadoop.mapreduce.Mapper;
18 | import org.apache.hadoop.mapreduce.Reducer;
19 | import org.apache.hadoop.mapreduce.Mapper.Context;
20 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
21 | import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
22 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
23 | import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
24 | import org.apache.hadoop.util.Tool;
25 | import org.apache.hadoop.util.ToolRunner;
26 | import org.json.simple.JSONObject;
27 | import org.json.simple.parser.JSONParser;
28 | import org.apache.hadoop.fs.Path;
29 | import org.apache.hadoop.io.Text;
30 | import org.apache.hadoop.mapreduce.Job;
31 | import org.apache.hadoop.mapreduce.Mapper;
32 | import org.apache.hadoop.mapreduce.Reducer;
33 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
34 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
35 |
36 |
37 | public class twitter_driver implements Tool {
38 |
39 | public static void main(String[] args) throws Exception {
40 |
41 |
42 | //public static void main(String[] args) throws Exception {
43 | ToolRunner.run(new twitter_driver(),args);
44 | //}
45 |
46 | //public int run(String[] args) throws Exception {
47 |
48 | //TODO Auto-generated method stub
49 |
50 | //{
51 | Configuration conf =new Configuration();
52 |
53 |
54 |
55 | //DistributedCache.addCacheFile(new URI("C:/Users/Adriel/Downloads/AFINN.txt"),conf);
56 |
57 | Job job =Job.getInstance(conf,"Sentiment Analysis");
58 |
59 | job.addCacheFile(new Path("C:/Users/Adriel/Downloads/AFINN.txt").toUri());
60 |
61 | job.setJarByClass(twitter_driver.class);
62 |
63 | job.setMapperClass(twitter_mapper.class);
64 |
65 | job.setReducerClass(twitter_reducer.class);
66 |
67 | job.setMapOutputKeyClass(Text.class);
68 |
69 | job.setMapOutputValueClass(Text.class);
70 |
71 | job.setOutputKeyClass(NullWritable.class);
72 |
73 | job.setOutputValueClass(Text.class);
74 |
75 | job.setInputFormatClass(TextInputFormat.class);
76 |
77 | job.setOutputFormatClass(TextOutputFormat.class);
78 |
79 | FileInputFormat.addInputPath(job, new Path(args[0]));
80 |
81 | FileOutputFormat.setOutputPath(job, new Path(args[1]));
82 |
83 | System.exit(job.waitForCompletion(true) ? 0 : 1);
84 |
85 | //return 0;
86 | }
87 |
88 | //}
89 |
90 | @Override
91 | public Configuration getConf() {
92 | // TODO Auto-generated method stub
93 | return null;
94 | }
95 |
96 | @Override
97 | public void setConf(Configuration arg0) {
98 | // TODO Auto-generated method stub
99 |
100 | }
101 |
102 | @Override
103 | public int run(String[] arg0) throws Exception {
104 | // TODO Auto-generated method stub
105 | return 0;
106 | }
107 | }
--------------------------------------------------------------------------------
/src/twitter_mapper.java:
--------------------------------------------------------------------------------
1 | import java.io.IOException;
2 |
3 |
4 |
5 | import org.apache.hadoop.io.LongWritable;
6 | import org.apache.hadoop.io.Text;
7 | import org.apache.hadoop.mapreduce.Mapper;
8 | import java.io.BufferedReader;
9 | import java.io.IOException;
10 | import java.io.InputStreamReader;
11 | import java.net.URI;
12 | import java.util.HashMap;
13 | import org.apache.hadoop.conf.Configuration;
14 | import org.apache.hadoop.conf.Configured;
15 | import org.apache.hadoop.filecache.DistributedCache;
16 | import org.apache.hadoop.fs.FSDataInputStream;
17 | import org.apache.hadoop.fs.FileSystem;
18 | import org.apache.hadoop.fs.Path;
19 | import org.apache.hadoop.io.LongWritable;
20 | import org.apache.hadoop.io.NullWritable;
21 | import org.apache.hadoop.io.Text;
22 | import org.apache.hadoop.mapreduce.Job;
23 | import org.apache.hadoop.mapreduce.Mapper;
24 | import org.apache.hadoop.mapreduce.Reducer;
25 | import org.apache.hadoop.mapreduce.Mapper.Context;
26 | import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
27 | import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
28 | import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
29 | import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
30 | import org.apache.hadoop.util.Tool;
31 | import org.apache.hadoop.util.ToolRunner;
32 | import org.json.simple.JSONObject;
33 | import org.json.simple.parser.JSONParser;
34 |
35 |
36 | public class twitter_mapper extends Mapper {
37 |
38 |
39 | private URI[] files;
40 |
41 | private HashMap AFINN_map = new HashMap();
42 |
43 | @Override
44 |
45 | public void setup(Context context) throws IOException
46 |
47 | {
48 |
49 | files = DistributedCache.getCacheFiles(context.getConfiguration());
50 |
51 | System.out.println("files:"+ files);
52 |
53 | Path path = new Path(files[0]);
54 |
55 | FileSystem fs = FileSystem.get(context.getConfiguration());
56 |
57 | FSDataInputStream in = fs.open(path);
58 |
59 | BufferedReader br = new BufferedReader(new InputStreamReader(in));
60 |
61 | String line="";
62 |
63 | while((line = br.readLine())!=null)
64 |
65 | {
66 |
67 | String splits[] = line.split("\t");
68 |
69 | AFINN_map.put(splits[0], splits[1]);
70 |
71 | }
72 |
73 | br.close();
74 |
75 | in.close();
76 |
77 | }
78 |
79 | public void map(LongWritable ikey, Text ivalue, Context context) throws IOException, InterruptedException {
80 |
81 |
82 | String name;
83 | String twt;
84 | String line = ivalue.toString();
85 | String[] tuple = line.split("\\n");
86 | JSONParser jsonParser = new JSONParser();
87 |
88 | try{
89 |
90 | for(int i=0;i\t"+new Text(Integer.toString(sentiment_sum))));
117 |
118 | }
119 |
120 | }catch(Exception e){
121 |
122 | e.printStackTrace();
123 |
124 | }
125 |
126 | }
127 |
128 |
129 |
130 | }
131 |
132 |
133 |
134 |
--------------------------------------------------------------------------------
/src/twitter_reducer.java:
--------------------------------------------------------------------------------
1 | import java.io.IOException;
2 |
3 | import org.apache.hadoop.io.Text;
4 | import org.apache.hadoop.mapreduce.Reducer;
5 |
6 | public class twitter_reducer extends Reducer {
7 |
8 | public void reduce(Text key,Text value, Context context) throws IOException, InterruptedException {
9 | // process value
10 |
11 | context.write(key,value);
12 |
13 | }
14 |
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/twitter stream.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/adriel1997/Twitter-Sentiment-Analysis/db7299ae275c3a322c64bfa98305ae5942c08321/twitter stream.PNG
--------------------------------------------------------------------------------
/twitterhdfs.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/adriel1997/Twitter-Sentiment-Analysis/db7299ae275c3a322c64bfa98305ae5942c08321/twitterhdfs.PNG
--------------------------------------------------------------------------------