-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
13 changed files
with
277 additions
and
69 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
91 changes: 91 additions & 0 deletions
91
qgen/src/main/java/bigframe/qgen/engineDriver/HiveGiraphEngineDriver.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,91 @@ | ||
package bigframe.qgen.engineDriver; | ||
|
||
import java.sql.Connection; | ||
import java.sql.DriverManager; | ||
import java.sql.SQLException; | ||
import java.util.ArrayList; | ||
import java.util.List; | ||
|
||
import org.apache.hadoop.fs.Path; | ||
import org.apache.hadoop.hive.conf.HiveConf; | ||
import org.apache.log4j.Logger; | ||
|
||
import bigframe.bigif.BigConfConstants; | ||
import bigframe.bigif.WorkflowInputFormat; | ||
import bigframe.workflows.runnable.HiveGiraphRunnable; | ||
import bigframe.workflows.runnable.HiveRunnable; | ||
|
||
public class HiveGiraphEngineDriver extends EngineDriver { | ||
|
||
private HiveConf hive_config; | ||
private static final Logger LOG = Logger.getLogger(HadoopEngineDriver.class); | ||
private List<HiveGiraphRunnable> queries = new ArrayList<HiveGiraphRunnable>(); | ||
|
||
private Connection connection; | ||
private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver"; | ||
|
||
public HiveGiraphEngineDriver(WorkflowInputFormat workIF) { | ||
super(workIF); | ||
hive_config = new HiveConf(); | ||
hive_config.addResource(new Path(workIF.getHiveHome() | ||
+ "/conf/hive-site.xml")); | ||
// TODO Auto-generated constructor stub | ||
} | ||
|
||
@Override | ||
public int numOfQueries() { | ||
// TODO Auto-generated method stub | ||
return queries.size(); | ||
} | ||
|
||
@Override | ||
public void init() { | ||
try { | ||
Class.forName(driverName); | ||
} catch (ClassNotFoundException e) { | ||
// TODO Auto-generated catch block | ||
e.printStackTrace(); | ||
System.exit(1); | ||
} | ||
|
||
try { | ||
LOG.info("Connectiong to Hive JDBC server!!!"); | ||
connection = DriverManager.getConnection(workIF.getHiveJDBCServer(), "", ""); | ||
if(connection == null) { | ||
LOG.error("Cannot connect to JDBC server! " + | ||
"Make sure the HiveServer is running!"); | ||
System.exit(1); | ||
} | ||
else | ||
LOG.info("Successful!!!"); | ||
|
||
for(HiveGiraphRunnable query : queries) { | ||
LOG.info("Prepare tables..."); | ||
query.prepareHiveGiraphTables(connection); | ||
} | ||
|
||
} catch (SQLException e) { | ||
// TODO Auto-generated catch block | ||
e.printStackTrace(); | ||
System.exit(1); | ||
} | ||
} | ||
|
||
@Override | ||
public void run() { | ||
for(HiveGiraphRunnable query : queries) { | ||
query.runGiraph(hive_config); | ||
} | ||
|
||
} | ||
|
||
@Override | ||
public void cleanup() { | ||
// TODO Auto-generated method stub | ||
|
||
} | ||
|
||
public void addQuery(HiveGiraphRunnable query) { | ||
queries.add(query); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
37 changes: 37 additions & 0 deletions
37
...n/scala/bigframe/workflows/BusinessIntelligence/graph/exploratory/TwitterRankVertex.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
package bigframe.workflows.BusinessIntelligence.graph.exploratory | ||
|
||
import org.apache.giraph.graph.Vertex | ||
|
||
import org.apache.hadoop.io.Writable | ||
import org.apache.hadoop.io.DoubleWritable | ||
import org.apache.hadoop.io.Text | ||
|
||
import scala.collection.JavaConversions._ | ||
|
||
class TwitterRankVertex extends Vertex[Text, DoubleWritable, DoubleWritable, DoubleWritable] { | ||
|
||
override def compute(messages: java.lang.Iterable[DoubleWritable]): Unit = { | ||
if (getSuperstep() > 0) { | ||
var twitterRank = 0.0; | ||
val messages_iter = messages.iterator | ||
while (messages_iter.hasNext) { | ||
twitterRank += messages_iter.next.get; | ||
} | ||
setValue(new DoubleWritable(twitterRank)); | ||
} | ||
|
||
if (getSuperstep() < 10) { | ||
sendMessageToAllEdges(new DoubleWritable(getValue.get)) | ||
} else { | ||
voteToHalt(); | ||
} | ||
|
||
} | ||
|
||
override def sendMessageToAllEdges(message: DoubleWritable): Unit = { | ||
getEdges.iterator.foreach(e => sendMessage(e.getTargetVertexId, | ||
new DoubleWritable(message.get * | ||
getEdgeValue(e.getTargetVertexId).get))) | ||
} | ||
|
||
} |
Oops, something went wrong.