Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
keyur9 authored Nov 29, 2016
1 parent 4218d86 commit 7b96600
Show file tree
Hide file tree
Showing 23 changed files with 642 additions and 0 deletions.
35 changes: 35 additions & 0 deletions PageRank/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>edu.stevens.cs549</groupId>
<artifactId>PageRank</artifactId>
<version>1.0.0</version>
<name>PageRank</name>
<build>
<sourceDirectory>src</sourceDirectory>
<resources>
<resource>
<directory>src</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.5.1</version>
</dependency>
</dependencies>
</project>
24 changes: 24 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/DiffMap1.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.IOException;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class DiffMap1 extends Mapper<LongWritable, Text, Text, Text> {

public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException,
IllegalArgumentException {
String line = value.toString(); // Converts Line to a String
String[] sections = line.split("\t"); // Splits each line
if (sections.length > 2) // checks for incorrect data format
{
throw new IOException("Incorrect data format");
}
/**
* TODO: read node-rank pair and emit: key:node, value:rank
*/

}

}
20 changes: 20 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/DiffMap2.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.IOException;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class DiffMap2 extends Mapper<LongWritable, Text, Text, Text> {

public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException,
IllegalArgumentException {
String s = value.toString(); // Converts Line to a String

/*
* TODO: emit: key:"Difference" value:difference calculated in DiffRed1
*/

}

}
17 changes: 17 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/DiffRed1.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.*;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class DiffRed1 extends Reducer<Text, Text, Text, Text> {

public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
double[] ranks = new double[2];
/*
* TODO: The list of values should contain two ranks. Compute and output their difference.
*/

}
}
17 changes: 17 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/DiffRed2.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.*;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class DiffRed2 extends Reducer<Text, Text, Text, Text> {

public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
double diff_max = 0.0; // sets diff_max to a default value
/*
* TODO: Compute and emit the maximum of the differences
*/

}
}
20 changes: 20 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/FinMapper.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.IOException;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class FinMapper extends Mapper<LongWritable, Text, DoubleWritable, Text> {

public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException, IllegalArgumentException {
String line = value.toString(); // Converts Line to a String
/*
* TODO output key:-rank, value: node
* See IterMapper for hints on parsing the output of IterReducer.
*/

}

}
18 changes: 18 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/FinReducer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.IOException;

import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

public class FinReducer extends Reducer<DoubleWritable, Text, Text, Text> {

public void reduce(DoubleWritable key, Iterable<Text> values, Context context) throws IOException,
InterruptedException {
/*
* TODO: For each value, emit: key:value, value:-rank
*/

}
}
20 changes: 20 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/InitMapper.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.IOException;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class InitMapper extends Mapper<LongWritable, Text, Text, Text> {

public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException,
IllegalArgumentException {
String line = value.toString(); // Converts Line to a String
/*
* TODO: Just echo the input, since it is already in adjacency list format.
*/


}

}
16 changes: 16 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/InitReducer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.*;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class InitReducer extends Reducer<Text, Text, Text, Text> {

public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
/*
* TODO: Output key: node+rank, value: adjacency list
*/

}
}
33 changes: 33 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/IterMapper.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.IOException;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class IterMapper extends Mapper<LongWritable, Text, Text, Text> {

public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException,
IllegalArgumentException {
String line = value.toString(); // Converts Line to a String
String[] sections = line.split("\t"); // Splits it into two parts. Part 1: node+rank | Part 2: adj list

if (sections.length > 2) // Checks if the data is in the incorrect format
{
throw new IOException("Incorrect data format");
}
if (sections.length != 2) {
return;
}

/*
* TODO: emit key: adj vertex, value: computed weight.
*
* Remember to also emit the input adjacency list for this node!
* Put a marker on the string value to indicate it is an adjacency list.
*/


}

}
19 changes: 19 additions & 0 deletions PageRank/src/edu/stevens/cs549/hadoop/pagerank/IterReducer.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package edu.stevens.cs549.hadoop.pagerank;

import java.io.*;
import java.util.*;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.*;

public class IterReducer extends Reducer<Text, Text, Text, Text> {

public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
double d = PageRankDriver.DECAY; // Decay factor
/*
* TODO: emit key:node+rank, value: adjacency list
* Use PageRank algorithm to compute rank from weights contributed by incoming edges.
* Remember that one of the values will be marked as the adjacency list for the node.
*/
}
}
Loading

0 comments on commit 7b96600

Please sign in to comment.