Skip to content
Browse files

Added TDB local.

  • Loading branch information...
1 parent 48d1ce0 commit a6e56defe276159c30a36b1a2b776fdf58fbfbb9 @castagna committed Feb 22, 2011
Showing with 641 additions and 29 deletions.
  1. +294 −0 bsbm-2011-02-18.sh
  2. +20 −19 bsbm.sh
  3. +284 −0 bsbmtools.patch
  4. +7 −1 bsbmtools.sh
  5. +8 −8 fuseki.sh
  6. +28 −1 tdb.sh
View
294 bsbm-2011-02-18.sh
@@ -0,0 +1,294 @@
+#!/bin/bash
+
+##
+# Copyright © 2011 Talis Systems Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+ROOT_PATH=`pwd`
+BSBM_ROOT_PATH=/tmp/bsbm
+
+BSBM_NUM_QUERY_MIXES=500
+BSBM_NUM_QUERY_WARM_UP=50
+BSBM_SCALE_FACTOR_VALUES=( 1001 2002 )
+BSBM_CONCURRENT_CLIENTS_VALUES=( 1 2 )
+
+BSBM_SEED=1212123
+BSBM_QUERY_TIMEOUT=0
+
+TDB_LOADER=tdbloader
+FUSEKI_SPARQL_QUERY_URL="http://127.0.0.1:3030/bsbm/query"
+FUSEKI_SPARQL_UPDATE_URL="http://127.0.0.1:3030/bsbm/update"
+
+if [ ! -d "$BSBM_ROOT_PATH" ]; then
+ mkdir $BSBM_ROOT_PATH
+fi
+
+if [ ! -d "$BSBM_ROOT_PATH/results" ]; then
+ mkdir $BSBM_ROOT_PATH/results
+fi
+
+
+free_os_caches() {
+ echo "== Freeing OS caches..."
+ sync
+ sudo bash -c "echo 3 > /proc/sys/vm/drop_caches"
+ echo "== Done."
+}
+
+
+setup_bsbmtools() {
+ if [ ! -d "$BSBM_ROOT_PATH/bsbmtools" ]; then
+ echo "==== Checking out and compiling BSBM Tools source code ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ cd $BSBM_ROOT_PATH
+ svn co https://bsbmtools.svn.sourceforge.net/svnroot/bsbmtools/trunk bsbmtools
+ cd $BSBM_ROOT_PATH/bsbmtools
+ ant jar
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ else
+ echo "==== [skipped] Checking out and compiling BSBM Tools source code ..."
+ fi
+}
+
+
+generate_bsbmtools_dataset() {
+ if [ ! -d "$BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR" ]; then
+ echo "==== Generating dataset: scale=$BSBM_SCALE_FACTOR ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ cd $BSBM_ROOT_PATH/bsbmtools
+ CMD="-Xmx1024M -server benchmark.generator.Generator -fc -ud -pc $BSBM_SCALE_FACTOR -s nt"
+ echo "== java -cp \"lib/*\" $CMD"
+ java -cp "lib/*" $CMD > $BSBM_ROOT_PATH/results/bsbmtools-generator-$BSBM_SCALE_FACTOR.txt
+ if [ ! -d "$BSBM_ROOT_PATH/datasets" ]; then
+ mkdir $BSBM_ROOT_PATH/datasets
+ fi
+ mkdir $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR
+ mv $BSBM_ROOT_PATH/bsbmtools/dataset* $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/
+ mv $BSBM_ROOT_PATH/bsbmtools/td_data $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ else
+ echo "==== [skipped] Generating dataset: scale=$BSBM_SCALE_FACTOR ..."
+ fi
+}
+
+
+run_bsbmtools_rampup() {
+ SYSTEM_UNDER_TEST=`echo $1 | tr '[:upper:]' '[:lower:]'`
+ SPARQL_QUERY_URL=$2
+ SPARQL_UPDATE_URL=$3
+ USE_CASE=$4
+
+ if [[ $4 == "bi" ]] ; then
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/businessIntelligence/sparql.txt"
+ elif [[ $4 == "update" ]] ; then
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/exploreAndUpdate/sparql.txt"
+ elif [[ $4 == "explore" ]] ; then
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/explore/sparql.txt"
+ else
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/explore/sparql.txt"
+ fi
+
+ if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-$SYSTEM_UNDER_TEST-$USE_CASE-$BSBM_CONCURRENT_CLIENTS-rampup.txt" ]; then
+ echo "==== Running BSBM: sut=$SYSTEM_UNDER_TEST, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=$USE_CASE ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ cd $BSBM_ROOT_PATH/bsbmtools
+ RESULT_FILENAME=$BSBM_SCALE_FACTOR-$SYSTEM_UNDER_TEST-$USE_CASE-$BSBM_CONCURRENT_CLIENTS-rampup
+ CMD="-Xmx256M benchmark.testdriver.TestDriver -rampup -runs 8000 -seed 1212123 -idir $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/td_data -u $SPARQL_UPDATE_URL -udataset $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/dataset_update.nt -o $BSBM_ROOT_PATH/results/$RESULT_FILENAME.xml $SPARQL_QUERY_URL"
+ echo "== java -cp \"lib/*\" $CMD"
+ java -cp "lib/*" $CMD > $BSBM_ROOT_PATH/results/$RESULT_FILENAME.txt
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ fi
+}
+
+
+run_bsbmtools() {
+ SYSTEM_UNDER_TEST=`echo $1 | tr '[:upper:]' '[:lower:]'`
+ SPARQL_QUERY_URL=$2
+ SPARQL_UPDATE_URL=$3
+ USE_CASE=$4
+
+ if [[ $4 == "bi" ]] ; then
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/businessIntelligence/sparql.txt"
+ elif [[ $4 == "update" ]] ; then
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/exploreAndUpdate/sparql.txt"
+ elif [[ $4 == "explore" ]] ; then
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/explore/sparql.txt"
+ else
+ USE_CASE=$4
+ USE_CASE_FILENAME="usecases/explore/sparql.txt"
+ fi
+
+ if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-$SYSTEM_UNDER_TEST-$USE_CASE-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+ echo "==== Running BSBM: sut=$SYSTEM_UNDER_TEST, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=$USE_CASE ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ cd $BSBM_ROOT_PATH/bsbmtools
+ RESULT_FILENAME=$BSBM_SCALE_FACTOR-$SYSTEM_UNDER_TEST-$USE_CASE-$BSBM_CONCURRENT_CLIENTS
+ CMD="-Xmx256M benchmark.testdriver.TestDriver -runs $BSBM_NUM_QUERY_MIXES -w $BSBM_NUM_QUERY_WARM_UP -mt $BSBM_CONCURRENT_CLIENTS -t $BSBM_QUERY_TIMEOUT -ucf $USE_CASE_FILENAME -seed $BSBM_SEED -idir $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/td_data -u $SPARQL_UPDATE_URL -udataset $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/dataset_update.nt -o $BSBM_ROOT_PATH/results/$RESULT_FILENAME.xml $SPARQL_QUERY_URL"
+ echo "== java -cp \"lib/*\" $CMD"
+ java -cp "lib/*" $CMD > $BSBM_ROOT_PATH/results/$RESULT_FILENAME.txt
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ fi
+}
+
+
+load_tdb() {
+ if [ ! -d "$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR" ]; then
+ echo "==== Loading data in TDB: scale=$BSBM_SCALE_FACTOR ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ free_os_caches
+ export TDBROOT=$BSBM_ROOT_PATH/tdb
+ export PATH=$PATH:$BSBM_ROOT_PATH/tdb/bin2
+ mkdir $BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR
+ if [ ! -d "$BSBM_ROOT_PATH/results" ]; then
+ mkdir $BSBM_ROOT_PATH/results
+ fi
+ $TDB_LOADER -v --loc=$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/dataset.nt > $BSBM_ROOT_PATH/results/tdb-$BSBM_SCALE_FACTOR-tdbload.txt
+ tdbstats --loc $BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB > $BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB/stats.opt
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ else
+ echo "==== [skipped] Loading data in TDB: scale=$BSBM_SCALE_FACTOR ..."
+ fi
+}
+
+
+setup_tdb() {
+ if [ ! -d "$BSBM_ROOT_PATH/tdb" ]; then
+ echo "==== Checking-out and compiling TDB source code ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ cd $BSBM_ROOT_PATH
+ svn co https://jena.svn.sourceforge.net/svnroot/jena/TDB/trunk/ tdb
+ cd $BSBM_ROOT_PATH/tdb
+ mvn package
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ else
+ echo "==== [skipped] Checking-out and compiling TDB source code ..."
+ fi
+}
+
+
+setup_fuseki() {
+ if [ ! -d "$BSBM_ROOT_PATH/fuseki" ]; then
+ echo "==== Checking-out and compiling Fuseki source code ..."
+ echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
+ cd $BSBM_ROOT_PATH
+ svn co http://jena.svn.sourceforge.net/svnroot/jena/Fuseki/trunk fuseki
+ cd $BSBM_ROOT_PATH/fuseki
+ mvn package
+ echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
+ else
+ echo "==== [skipped] Checking-out and compiling Fuseki source code ..."
+ fi
+}
+
+
+run_fuseki() {
+ echo "== Starting Fuseki ..."
+# java -jar $BSBM_ROOT_PATH/fuseki/target/fuseki-0.2.0-SNAPSHOT-sys.jar --update --loc=$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB /bsbm &>> /dev/null &
+ java -jar $BSBM_ROOT_PATH/fuseki/target/fuseki-0.2.0-SNAPSHOT-sys.jar --update --loc=$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB /bsbm &>> $BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-$1-$BSBM_CONCURRENT_CLIENTS.log &
+ sleep 4
+ echo "== Done."
+}
+
+
+shutdown_fuseki() {
+ echo "== Shutting down Fuseki ..."
+ kill `ps -ef | grep fuseki | grep -v grep | awk '{print $2}'`
+ sleep 1
+ echo "== Done."
+}
+
+
+test_fuseki() {
+ shutdown_fuseki
+
+ if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-explore-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+ run_fuseki "explore"
+ free_os_caches
+ run_bsbmtools_rampup "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "explore"
+ run_bsbmtools "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "explore"
+ shutdown_fuseki
+ else
+ echo "==== [skipped] Running BSBM: sut=Fuseki, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=explore ..."
+ fi
+
+ if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-update-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+ run_fuseki "update"
+ free_os_caches
+ run_bsbmtools_rampup "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "update"
+ run_bsbmtools "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "update"
+ shutdown_fuseki
+ else
+ echo "==== [skipped] Running BSBM: sut=Fuseki, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=update ..."
+ fi
+
+ if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-bi-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+ run_fuseki "bi"
+ free_os_caches
+ run_bsbmtools_rampup "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "bi"
+ run_bsbmtools "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "bi"
+ shutdown_fuseki
+ else
+ echo "==== [skipped] Running BSBM: sut=Fuseki, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=bi ..."
+ fi
+}
+
+
+setup_bsbmtools
+setup_tdb
+setup_fuseki
+
+
+for BSBM_SCALE_FACTOR in ${BSBM_SCALE_FACTOR_VALUES[@]}
+do
+ generate_bsbmtools_dataset
+ load_tdb
+done
+
+
+for BSBM_SCALE_FACTOR in ${BSBM_SCALE_FACTOR_VALUES[@]}
+do
+ for BSBM_CONCURRENT_CLIENTS in ${BSBM_CONCURRENT_CLIENTS_VALUES[@]}
+ do
+ test_fuseki
+ done
+done
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
View
39 bsbm.sh
@@ -31,8 +31,8 @@ BSBM_SEED=1212123
BSBM_QUERY_TIMEOUT=60000
source common.sh
+source tdb.sh # TDB must be before BSBM Tools (since we patch it to run local TDB tests)
source bsbmtools.sh
-source tdb.sh
source fuseki.sh
source joseki.sh
source sesame2.sh
@@ -43,40 +43,41 @@ fi
setup_bsbmtools
setup_tdb
setup_fuseki
-setup_joseki
-setup_sesame2
-if [[ -n $BIGOWLIM_HOME ]] ; then
- setup_bigowlim
-fi
+#setup_joseki
+#setup_sesame2
+#if [[ -n $BIGOWLIM_HOME ]] ; then
+# setup_bigowlim
+#fi
-BSBM_SCALE_FACTOR_VALUES=( 1001 2002 )
-BSBM_CONCURRENT_CLIENTS_VALUES=( 1 )
+BSBM_SCALE_FACTOR_VALUES=( 1000 10000 100000 )
+BSBM_CONCURRENT_CLIENTS_VALUES=( 1 4 8 32 )
-run_sesame2
+#run_sesame2
for BSBM_SCALE_FACTOR in ${BSBM_SCALE_FACTOR_VALUES[@]}
do
generate_bsbmtools_dataset
load_tdb
- load_sesame2
- if [[ -n $BIGOWLIM_HOME ]] ; then
- load_bigowlim
- fi
+# load_sesame2
+# if [[ -n $BIGOWLIM_HOME ]] ; then
+# load_bigowlim
+# fi
done
-shutdown_sesame2
+#shutdown_sesame2
for BSBM_SCALE_FACTOR in ${BSBM_SCALE_FACTOR_VALUES[@]}
do
for BSBM_CONCURRENT_CLIENTS in ${BSBM_CONCURRENT_CLIENTS_VALUES[@]}
do
+ test_tdb
test_fuseki
- test_joseki
- test_sesame2
- if [[ -n $BIGOWLIM_HOME ]] ; then
- test_bigowlim
- fi
+# test_joseki
+# test_sesame2
+# if [[ -n $BIGOWLIM_HOME ]] ; then
+# test_bigowlim
+# fi
done
done
View
284 bsbmtools.patch
@@ -0,0 +1,284 @@
+Index: src/benchmark/testdriver/TestDriver.java
+===================================================================
+--- src/benchmark/testdriver/TestDriver.java (revision 77)
++++ src/benchmark/testdriver/TestDriver.java (working copy)
+@@ -36,6 +36,8 @@
+ import org.apache.log4j.Logger;
+ import org.apache.log4j.Level;
+
++import com.talis.labs.bsbm.LocalTDBConnection;
++
+ import java.io.*;
+ import java.util.StringTokenizer;
+
+@@ -124,9 +126,14 @@
+ if (doSQL)
+ server = new SQLConnection(sparqlEndpoint, timeout,
+ driverClassName);
+- else
+- server = new SPARQLConnection(sparqlEndpoint,
+- sparqlUpdateEndpoint, defaultGraph, timeout);
++ else {
++ if ( sparqlEndpoint.startsWith(LocalTDBConnection.TDB_PREFIX) ) {
++ server = new LocalTDBConnection(sparqlEndpoint);
++ } else {
++ server = new SPARQLConnection(sparqlEndpoint,
++ sparqlUpdateEndpoint, defaultGraph, timeout);
++ }
++ }
+ } else if (multithreading) {
+ // do nothing
+ } else {
+Index: src/benchmark/testdriver/ClientManager.java
+===================================================================
+--- src/benchmark/testdriver/ClientManager.java (revision 77)
++++ src/benchmark/testdriver/ClientManager.java (working copy)
+@@ -1,5 +1,9 @@
+ package benchmark.testdriver;
+
++import com.hp.hpl.jena.query.Dataset;
++import com.hp.hpl.jena.tdb.TDBFactory;
++import com.talis.labs.bsbm.LocalTDBConnection;
++
+ public class ClientManager {
+ private int activeThreadsInWarmup;
+ private int activeThreadsInRun;
+@@ -12,6 +16,7 @@
+ protected boolean[] ignoreQueries;
+ private ClientThread[] clients;
+ private TestDriver parent;
++ private static Dataset dataset = null;
+
+ ClientManager(AbstractParameterPool pool, TestDriver parent) {
+ activeThreadsInWarmup = 0;
+@@ -34,10 +39,18 @@
+ if(parent.doSQL)
+ sConn = new SQLConnection(parent.sparqlEndpoint, parent.timeout, parent.driverClassName);
+ else {
+- if(parent.sparqlUpdateEndpoint==null)
+- sConn = new SPARQLConnection(parent.sparqlEndpoint, parent.defaultGraph, parent.timeout);
+- else
+- sConn = new SPARQLConnection(parent.sparqlEndpoint, parent.sparqlUpdateEndpoint, parent.defaultGraph, parent.timeout);
++ if ( parent.sparqlEndpoint.startsWith(LocalTDBConnection.TDB_PREFIX) ) {
++ if ( dataset == null ) {
++ String location = parent.sparqlEndpoint.substring(LocalTDBConnection.TDB_PREFIX.length()) ;
++ dataset = TDBFactory.createDataset(location) ;
++ }
++ sConn = new LocalTDBConnection(dataset);
++ } else {
++ if(parent.sparqlUpdateEndpoint==null)
++ sConn = new SPARQLConnection(parent.sparqlEndpoint, parent.defaultGraph, parent.timeout);
++ else
++ sConn = new SPARQLConnection(parent.sparqlEndpoint, parent.sparqlUpdateEndpoint, parent.defaultGraph, parent.timeout);
++ }
+ }
+
+ clients[i] = new ClientThread(pool, sConn, ignoreQueries.length, this, i+1);
+Index: src/com/talis/labs/bsbm/LocalTDBConnection.java
+===================================================================
+--- src/com/talis/labs/bsbm/LocalTDBConnection.java (revision 0)
++++ src/com/talis/labs/bsbm/LocalTDBConnection.java (revision 0)
+@@ -0,0 +1,202 @@
++/*
++ * Copyright © 2011 Talis Systems Ltd.
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package com.talis.labs.bsbm;
++
++import org.apache.log4j.Level;
++import org.apache.log4j.Logger;
++import org.openjena.atlas.lib.NotImplemented;
++import org.xml.sax.Attributes;
++import org.xml.sax.SAXException;
++import org.xml.sax.helpers.DefaultHandler;
++
++import benchmark.qualification.QueryResult;
++import benchmark.testdriver.CompiledQuery;
++import benchmark.testdriver.CompiledQueryMix;
++import benchmark.testdriver.Query;
++import benchmark.testdriver.QueryMix;
++import benchmark.testdriver.ServerConnection;
++
++import com.hp.hpl.jena.query.Dataset;
++import com.hp.hpl.jena.query.QueryExecution;
++import com.hp.hpl.jena.query.QueryExecutionFactory;
++import com.hp.hpl.jena.query.QueryFactory;
++import com.hp.hpl.jena.query.ResultSetFormatter;
++import com.hp.hpl.jena.sparql.util.Timer;
++import com.hp.hpl.jena.tdb.TDBFactory;
++
++public class LocalTDBConnection implements ServerConnection {
++
++ public static final String TDB_PREFIX = "tdb://";
++ private static Logger logger = Logger.getLogger( LocalTDBConnection.class );
++ private Dataset dataset ;
++
++ public LocalTDBConnection(String sparqlEndpoint) {
++ String location = sparqlEndpoint.substring(TDB_PREFIX.length()) ;
++ this.dataset = TDBFactory.createDataset(location) ;
++ }
++
++ public LocalTDBConnection(Dataset dataset) {
++ this.dataset = dataset;
++ }
++
++ @Override
++ public void executeQuery(Query query, byte queryType) {
++ executeQuery(query.getQueryString(), queryType, query.getNr(), query.getQueryMix());
++ }
++
++ @Override
++ public QueryResult executeValidation(Query query, byte queryType) {
++ throw new NotImplemented();
++ }
++
++
++ @Override
++ public void executeQuery(CompiledQuery query, CompiledQueryMix queryMix) {
++ executeQuery(query.getQueryString(), query.getQueryType(), query.getNr(), queryMix);
++ }
++
++ @Override
++ public void close() {
++ }
++
++ private void executeQuery(String queryString, byte queryType, int queryNr, QueryMix queryMix) {
++ double timeInSeconds = 0.0 ;
++ int queryMixRun = queryMix.getRun() + 1;
++
++ int resultCount = 0;
++ Timer timer = new Timer() ;
++ timer.startTimer() ;
++ try {
++ resultCount = executeQuery1(queryString, dataset) ;
++ } catch (Throwable th) {
++ System.err.println("Throwable: "+th.getMessage()) ;
++ long timeMilli = timer.readTimer() ;
++ System.out.println("Query " + queryNr + ": " + (timeMilli/1000.0) + " seconds timeout!");
++ queryMix.reportTimeOut(); //inc. timeout counter
++ queryMix.setCurrent(0, 1.0*timeMilli);
++
++ }
++ long timeMilli = timer.endTimer() ;
++ timeInSeconds = timeMilli/1000.0 ; // qe.getExecutionTimeInSeconds();
++
++ if(logger.isEnabledFor( Level.ALL ) && queryMixRun > 0)
++ logResultInfo(queryNr, queryMixRun, timeInSeconds, queryString, queryType, resultCount);
++ queryMix.setCurrent(resultCount, timeInSeconds);
++ }
++
++ private void executeQuery(String queryString, byte queryType, int queryNr, CompiledQueryMix queryMix) {
++ double timeInSeconds = 0.0 ;
++ int queryMixRun = queryMix.getRun() + 1;
++
++ int resultCount = 0;
++ Timer timer = new Timer() ;
++ timer.startTimer() ;
++ try {
++ resultCount = executeQuery1(queryString, dataset) ;
++ } catch (Throwable th) {
++ System.err.println("Throwable: "+th.getMessage()) ;
++ long timeMilli = timer.readTimer() ;
++ System.out.println("Query " + queryNr + ": " + (timeMilli/1000.0) + " seconds timeout!");
++ queryMix.reportTimeOut(); //inc. timeout counter
++ queryMix.setCurrent(0, 1.0*timeMilli);
++
++ }
++ long timeMilli = timer.endTimer() ;
++ timeInSeconds = timeMilli/1000.0 ; // qe.getExecutionTimeInSeconds();
++
++ if(logger.isEnabledFor( Level.ALL ) && queryMixRun > 0)
++ logResultInfo(queryNr, queryMixRun, timeInSeconds, queryString, queryType, resultCount);
++ queryMix.setCurrent(resultCount, timeInSeconds);
++ }
++
++ private static int executeQuery1(String queryString, Dataset dataset) {
++ com.hp.hpl.jena.query.Query query = QueryFactory.create(queryString) ;
++ QueryExecution queryExecution = QueryExecutionFactory.create(query, dataset) ;
++ if ( query.isSelectType() )
++ return doSelectQuery(query, queryExecution) ;
++ if ( query.isDescribeType() )
++ doDescribeQuery(query, queryExecution) ;
++ else if ( query.isConstructType() )
++ doConstructQuery(query, queryExecution) ;
++// else if ( query.isAskType() )
++// doAskQuery(query, queryExecution) ;
++ else
++ System.err.println("Unsupported query type: "+query.getQueryType()) ;
++ queryExecution.close() ;
++ return -1 ;
++ }
++
++ private static int doSelectQuery(com.hp.hpl.jena.query.Query query, QueryExecution queryExecution) {
++ try {
++ return ResultSetFormatter.consume(queryExecution.execSelect()) ;
++ } finally { queryExecution.close(); }
++ }
++
++ private static void doConstructQuery(com.hp.hpl.jena.query.Query query, QueryExecution queryExecution) {
++ try {
++ queryExecution.execConstruct() ;
++ } finally { queryExecution.close(); }
++ }
++
++ private static void doDescribeQuery(com.hp.hpl.jena.query.Query query, QueryExecution queryExecution) {
++ try {
++ queryExecution.execDescribe() ;
++ } finally { queryExecution.close(); }
++ }
++
++ private void logResultInfo(int queryNr, int queryMixRun, double timeInSeconds, String queryString, byte queryType, int resultCount) {
++ StringBuffer sb = new StringBuffer(1000);
++ sb.append("\n\n\tQuery " + queryNr + " of run " + queryMixRun + " has been executed ");
++ sb.append("in " + String.format("%.6f",timeInSeconds) + " seconds.\n" );
++ sb.append("\n\tQuery string:\n\n");
++ sb.append(queryString);
++ sb.append("\n\n");
++
++ //Log results
++ if(queryType==Query.DESCRIBE_TYPE)
++ sb.append("\tQuery(Describe) result (" + resultCount + " Bytes): \n\n");
++ else if(queryType==Query.CONSTRUCT_TYPE)
++ sb.append("\tQuery(Construct) result (" + resultCount + " Bytes): \n\n");
++ else
++ sb.append("\tQuery results (" + resultCount + " results): \n\n");
++
++
++ sb.append("\n__________________________________________________________________________________\n");
++ logger.log(Level.ALL, sb.toString());
++ }
++
++ class ResultHandler extends DefaultHandler {
++ private int count;
++
++ ResultHandler() {
++ count = 0;
++ }
++
++ @Override
++ public void startElement( String namespaceURI, String localName, String qName, Attributes attrs ) throws SAXException {
++ if(qName.equals("result"))
++ count++;
++ }
++
++ public int getCount() {
++ return count;
++ }
++ }
++
++}
++
++
+
View
8 bsbmtools.sh
@@ -24,6 +24,11 @@ setup_bsbmtools() {
cd $BSBM_ROOT_PATH
svn co https://bsbmtools.svn.sourceforge.net/svnroot/bsbmtools/trunk bsbmtools
cd $BSBM_ROOT_PATH/bsbmtools
+ # Patching BSBM Tools to run TDB locally
+ cp $BSBM_ROOT_PATH/tdb/lib/*.jar $BSBM_ROOT_PATH/bsbmtools/lib
+ cp $BSBM_ROOT_PATH/tdb/target/*.jar $BSBM_ROOT_PATH/bsbmtools/lib
+ patch -p0 < $ROOT_PATH/bsbmtools.patch
+ # Compiling BSBM Tools
ant jar
echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
else
@@ -43,6 +48,7 @@ generate_bsbmtools_dataset() {
fi
mkdir $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR
mv $BSBM_ROOT_PATH/bsbmtools/dataset* $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR
+ mv $BSBM_ROOT_PATH/bsbmtools/td_data $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR
echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
else
echo "==== [skipped] Generating dataset: scale=$BSBM_SCALE_FACTOR ..."
@@ -75,7 +81,7 @@ run_bsbmtools() {
echo "== Start: $(date +"%Y-%m-%d %H:%M:%S")"
cd $BSBM_ROOT_PATH/bsbmtools
RESULT_FILENAME=$BSBM_SCALE_FACTOR-$SYSTEM_UNDER_TEST-$USE_CASE-$BSBM_CONCURRENT_CLIENTS
- CMD="-Xmx256M benchmark.testdriver.TestDriver -runs $BSBM_NUM_QUERY_MIXES -w $BSBM_NUM_QUERY_WARM_UP -mt $BSBM_CONCURRENT_CLIENTS -t $BSBM_QUERY_TIMEOUT -ucf $USE_CASE_FILENAME -seed $BSBM_SEED -u $SPARQL_UPDATE_URL -udataset $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/dataset_update.nt -o $BSBM_ROOT_PATH/results/$RESULT_FILENAME.xml $SPARQL_QUERY_URL"
+ CMD="-Xmx256M benchmark.testdriver.TestDriver -idir $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/td_data -runs $BSBM_NUM_QUERY_MIXES -w $BSBM_NUM_QUERY_WARM_UP -mt $BSBM_CONCURRENT_CLIENTS -t $BSBM_QUERY_TIMEOUT -ucf $USE_CASE_FILENAME -seed $BSBM_SEED -u $SPARQL_UPDATE_URL -udataset $BSBM_ROOT_PATH/datasets/bsbm-dataset-$BSBM_SCALE_FACTOR/dataset_update.nt -o $BSBM_ROOT_PATH/results/$RESULT_FILENAME.xml $SPARQL_QUERY_URL"
echo "== java -cp \"lib/*\" $CMD"
java -cp "lib/*" $CMD > $BSBM_ROOT_PATH/results/$RESULT_FILENAME.txt
echo "== Finish: $(date +"%Y-%m-%d %H:%M:%S")"
View
16 fuseki.sh
@@ -74,14 +74,14 @@ test_fuseki() {
# echo "==== [skipped] Running BSBM: sut=Fuseki, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=update ..."
# fi
- if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-bi-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
- run_fuseki "bi"
- free_os_caches
- run_bsbmtools "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "bi"
- shutdown_fuseki
- else
- echo "==== [skipped] Running BSBM: sut=Fuseki, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=bi ..."
- fi
+# if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-bi-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+# run_fuseki "bi"
+# free_os_caches
+# run_bsbmtools "fuseki" $FUSEKI_SPARQL_QUERY_URL $FUSEKI_SPARQL_UPDATE_URL "bi"
+# shutdown_fuseki
+# else
+# echo "==== [skipped] Running BSBM: sut=Fuseki, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=bi ..."
+# fi
}
View
29 tdb.sh
@@ -19,7 +19,6 @@
TDB_LOADER=tdbloader
-
load_tdb() {
if [ ! -d "$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR" ]; then
echo "==== Loading data in TDB: scale=$BSBM_SCALE_FACTOR ..."
@@ -53,3 +52,31 @@ setup_tdb() {
echo "==== [skipped] Checking-out and compiling TDB source code ..."
fi
}
+
+
+test_tdb() {
+
+ TDB_SPARQL_QUERY_URL="tdb://$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB"
+ TDB_SPARQL_UPDATE_URL="tdb://$BSBM_ROOT_PATH/datasets/tdb-$BSBM_SCALE_FACTOR/TDB"
+
+ if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-tdb-explore-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+ free_os_caches
+ run_bsbmtools "tdb" $TDB_SPARQL_QUERY_URL $TDB_SPARQL_UPDATE_URL "explore"
+ else
+ echo "==== [skipped] Running BSBM: sut=TDB, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=explore ..."
+ fi
+
+# if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-update-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+# free_os_caches
+# run_bsbmtools "tdb" $TDB_SPARQL_QUERY_URL $TDB_SPARQL_UPDATE_URL "update"
+# else
+# echo "==== [skipped] Running BSBM: sut=TDB, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=update ..."
+# fi
+
+# if [ ! -f "$BSBM_ROOT_PATH/results/$BSBM_SCALE_FACTOR-fuseki-bi-$BSBM_CONCURRENT_CLIENTS.txt" ]; then
+# free_os_caches
+# run_bsbmtools "tdb" $TDB_SPARQL_QUERY_URL $TDB_SPARQL_UPDATE_URL "bi"
+# else
+# echo "==== [skipped] Running BSBM: sut=TDB, scale=$BSBM_SCALE_FACTOR, clients=$BSBM_CONCURRENT_CLIENTS, usecase=bi ..."
+# fi
+}

0 comments on commit a6e56de

Please sign in to comment.
Something went wrong with that request. Please try again.