Skip to content
Browse files

Add linter to arcanist config. Fix whitespace errors

Summary:
Arcanist text linter to detect basic formatting problems like
tabs and trailing whitespace.

Clean up whitespace errors to avoid noise in future diffs
from whitespace fixes

Test Plan: Still compiles ok

Reviewers: emayanke, dhruba

Reviewed By: emayanke

Differential Revision: https://reviews.facebook.net/D8301
  • Loading branch information...
1 parent e9c7573 commit 6e201764a5de5b477442309586e1784fac1c7906 @timarmstrong timarmstrong committed
Showing with 1,238 additions and 1,235 deletions.
  1. +3 −1 .arcconfig
  2. +12 −12 src/java/com/facebook/LinkBench/Config.java
  3. +33 −33 src/java/com/facebook/LinkBench/ConfigUtil.java
  4. +1 −1 src/java/com/facebook/LinkBench/GraphStore.java
  5. +10 −10 src/java/com/facebook/LinkBench/InvertibleShuffler.java
  6. +5 −5 src/java/com/facebook/LinkBench/Link.java
  7. +70 −71 src/java/com/facebook/LinkBench/LinkBenchDriver.java
  8. +8 −8 src/java/com/facebook/LinkBench/LinkBenchDriverMR.java
  9. +64 −64 src/java/com/facebook/LinkBench/LinkBenchLoad.java
  10. +3 −3 src/java/com/facebook/LinkBench/LinkBenchOp.java
  11. +115 −115 src/java/com/facebook/LinkBench/LinkBenchRequest.java
  12. +1 −1 src/java/com/facebook/LinkBench/LinkCount.java
  13. +23 −23 src/java/com/facebook/LinkBench/LinkStore.java
  14. +69 −69 src/java/com/facebook/LinkBench/LinkStoreMysql.java
  15. +38 −38 src/java/com/facebook/LinkBench/LinkStoreRocksDb.java
  16. +34 −34 src/java/com/facebook/LinkBench/MemoryLinkStore.java
  17. +2 −2 src/java/com/facebook/LinkBench/Node.java
  18. +31 −31 src/java/com/facebook/LinkBench/NodeLoader.java
  19. +13 −13 src/java/com/facebook/LinkBench/NodeStore.java
  20. +22 −22 src/java/com/facebook/LinkBench/RealDistribution.java
  21. +7 −7 src/java/com/facebook/LinkBench/Shuffler.java
  22. +2 −2 src/java/com/facebook/LinkBench/Timer.java
  23. +16 −16 src/java/com/facebook/LinkBench/distributions/AccessDistributions.java
  24. +3 −3 src/java/com/facebook/LinkBench/distributions/ApproxHarmonic.java
  25. +10 −10 src/java/com/facebook/LinkBench/distributions/GeometricDistribution.java
  26. +1 −1 src/java/com/facebook/LinkBench/distributions/Harmonic.java
  27. +29 −29 src/java/com/facebook/LinkBench/distributions/ID2Chooser.java
  28. +5 −5 src/java/com/facebook/LinkBench/distributions/LinkDistributions.java
  29. +8 −8 src/java/com/facebook/LinkBench/distributions/LogNormalDistribution.java
  30. +24 −24 src/java/com/facebook/LinkBench/distributions/PiecewiseLinearDistribution.java
  31. +7 −7 src/java/com/facebook/LinkBench/distributions/ProbabilityDistribution.java
  32. +14 −14 src/java/com/facebook/LinkBench/distributions/UniformDistribution.java
  33. +27 −27 src/java/com/facebook/LinkBench/distributions/ZipfDistribution.java
  34. +3 −3 src/java/com/facebook/LinkBench/generators/DataGenerator.java
  35. +27 −26 src/java/com/facebook/LinkBench/generators/MotifDataGenerator.java
  36. +6 −6 src/java/com/facebook/LinkBench/generators/UniformDataGenerator.java
  37. +40 −40 src/java/com/facebook/LinkBench/stats/LatencyStats.java
  38. +6 −6 src/java/com/facebook/LinkBench/stats/RunningMean.java
  39. +19 −19 src/java/com/facebook/LinkBench/stats/SampledStats.java
  40. +4 −4 src/java/com/facebook/LinkBench/util/ClassLoadUtil.java
  41. +1 −1 src/java/com/facebook/rocks/swift/Code.java
  42. +1 −1 src/java/com/facebook/rocks/swift/CompressionType.java
  43. +1 −1 src/java/com/facebook/rocks/swift/IOError.java
  44. +1 −1 src/java/com/facebook/rocks/swift/ReadOptions.java
  45. +1 −1 src/java/com/facebook/rocks/swift/ResultSnapshot.java
  46. +1 −1 src/java/com/facebook/rocks/swift/RocksIterateResponse.java
  47. +1 −1 src/java/com/facebook/rocks/swift/Snapshot.java
  48. +1 −1 src/java/com/facebook/rocks/swift/WriteOptions.java
  49. +39 −39 src/test/java/com/facebook/LinkBench/DistributionTestBase.java
  50. +10 −10 src/test/java/com/facebook/LinkBench/DummyLinkStore.java
  51. +2 −1 src/test/java/com/facebook/LinkBench/DummyLinkStoreTest.java
  52. +8 −8 src/test/java/com/facebook/LinkBench/GeneratedDataDump.java
  53. +1 −1 src/test/java/com/facebook/LinkBench/GeomDistTest.java
  54. +45 −45 src/test/java/com/facebook/LinkBench/GraphStoreTestBase.java
  55. +4 −4 src/test/java/com/facebook/LinkBench/HarmonicTest.java
  56. +20 −20 src/test/java/com/facebook/LinkBench/ID2ChooserTest.java
  57. +13 −13 src/test/java/com/facebook/LinkBench/InvertibleShufflerTest.java
  58. +130 −130 src/test/java/com/facebook/LinkBench/LinkStoreTestBase.java
  59. +3 −3 src/test/java/com/facebook/LinkBench/LogNormalTest.java
  60. +6 −6 src/test/java/com/facebook/LinkBench/MemoryLinkStoreTest.java
  61. +1 −1 src/test/java/com/facebook/LinkBench/MySqlGraphStoreTest.java
  62. +6 −6 src/test/java/com/facebook/LinkBench/MySqlLinkStoreTest.java
  63. +1 −1 src/test/java/com/facebook/LinkBench/MySqlNodeStoreTest.java
  64. +11 −11 src/test/java/com/facebook/LinkBench/MySqlTestConfig.java
  65. +27 −27 src/test/java/com/facebook/LinkBench/NodeStoreTestBase.java
  66. +6 −6 src/test/java/com/facebook/LinkBench/PiecewiseDistTest.java
  67. +13 −13 src/test/java/com/facebook/LinkBench/TestAccessDistribution.java
  68. +27 −27 src/test/java/com/facebook/LinkBench/TestDataGen.java
  69. +21 −21 src/test/java/com/facebook/LinkBench/TestRealDistribution.java
  70. +1 −1 src/test/java/com/facebook/LinkBench/TestStats.java
  71. +11 −11 src/test/java/com/facebook/LinkBench/TimerTest.java
  72. +2 −2 src/test/java/com/facebook/LinkBench/UniformDistTest.java
  73. +7 −7 src/test/java/com/facebook/LinkBench/ZipfDistTest.java
View
4 .arcconfig
@@ -1,6 +1,8 @@
{
"project_id" : "linkbench",
"conduit_uri" : "https://reviews.facebook.net/",
- "copyright_holder" : ""
+ "copyright_holder" : "",
+ "lint.engine" : "ArcanistSingleLintEngine",
+ "lint.engine.single.linter" : "ArcanistTextLinter"
}
View
24 src/java/com/facebook/LinkBench/Config.java
@@ -24,17 +24,17 @@
public class Config {
public static final String DEBUGLEVEL = "debuglevel";
-
+
/* Control store implementations used */
public static final String LINKSTORE_CLASS = "linkstore";
public static final String NODESTORE_CLASS = "nodestore";
-
+
/* Schema and tables used */
public static final String DBID = "dbid";
public static final String LINK_TABLE = "linktable";
public static final String COUNT_TABLE = "counttable";
public static final String NODE_TABLE = "nodetable";
-
+
/* Control graph structure */
public static final String LOAD_RANDOM_SEED = "load_random_seed";
public static final String MIN_ID = "startid1";
@@ -46,7 +46,7 @@
public static final String NLINKS_CONFIG = "nlinks_config";
public static final String NLINKS_DEFAULT = "nlinks_default";
public static final String LINK_TYPE_COUNT ="link_type_count";
-
+
/* Data generation */
public static final String LINK_DATASIZE = "link_datasize";
public static final String NODE_DATASIZE = "node_datasize";
@@ -69,11 +69,11 @@
/* Loading performance tuning */
public static final String NUM_LOADERS = "loaders";
public static final String LOADER_CHUNK_SIZE = "loader_chunk_size";
-
+
/* Request workload */
public static final String NUM_REQUESTERS = "requesters";
public static final String REQUEST_RANDOM_SEED = "request_random_seed";
-
+
// Distribution of accesses to IDs
public static final String READ_CONFIG_PREFIX = "read_";
public static final String WRITE_CONFIG_PREFIX = "write_";
@@ -93,11 +93,11 @@
public static final String WRITE_UNCORR_FUNCTION = WRITE_UNCORR_CONFIG_PREFIX
+ ACCESS_FUNCTION_SUFFIX;
public static final String BLEND_SUFFIX = "blend";
- public static final String READ_UNCORR_BLEND = READ_UNCORR_CONFIG_PREFIX
+ public static final String READ_UNCORR_BLEND = READ_UNCORR_CONFIG_PREFIX
+ BLEND_SUFFIX;
public static final String WRITE_UNCORR_BLEND = WRITE_UNCORR_CONFIG_PREFIX
+ BLEND_SUFFIX;
-
+
// Probability of different operations
public static final String PR_ADD_LINK = "addlink";
public static final String PR_DELETE_LINK = "deletelink";
@@ -120,20 +120,20 @@
public static final String LINK_MULTIGET_DIST_MIN = "link_multiget_dist_min";
public static final String LINK_MULTIGET_DIST_MAX = "link_multiget_dist_max";
public static final String LINK_MULTIGET_DIST_PREFIX = "link_multiget_dist_";
-
+
/* Probability distribution parameters */
public static final String PROB_MEAN = "mean";
-
+
/* Statistics collection and reporting */
public static final String MAX_STAT_SAMPLES = "maxsamples";
public static final String DISPLAY_FREQ = "displayfreq";
public static final String MAPRED_REPORT_PROGRESS = "reportprogress";
public static final String PROGRESS_FREQ = "progressfreq";
-
+
/* Reporting for progress indicators */
public static String REQ_PROG_INTERVAL = "req_progress_interval";
public static String LOAD_PROG_INTERVAL = "load_progress_interval";
-
+
/* MapReduce specific configuration */
public static final String TEMPDIR = "tempdir";
public static final String LOAD_DATA = "loaddata";
View
66 src/java/com/facebook/LinkBench/ConfigUtil.java
@@ -29,9 +29,9 @@
public class ConfigUtil {
public static final String linkbenchHomeEnvVar = "LINKBENCH_HOME";
public static final String LINKBENCH_LOGGER = "com.facebook.linkbench";
-
+
/**
- * @return null if not set, or if not valid path
+ * @return null if not set, or if not valid path
*/
public static String findLinkBenchHome() {
String linkBenchHome = System.getenv("LINKBENCH_HOME");
@@ -43,18 +43,18 @@ public static String findLinkBenchHome() {
}
return null;
}
-
+
public static Level getDebugLevel(Properties props)
throws LinkBenchConfigError {
if (props == null) {
return Level.DEBUG;
}
String levStr = props.getProperty(Config.DEBUGLEVEL);
-
+
if (levStr == null) {
return Level.DEBUG;
}
-
+
try {
int level = Integer.parseInt(levStr);
if (level <= 0) {
@@ -69,19 +69,19 @@ public static Level getDebugLevel(Properties props)
if (lev != null) {
return lev;
} else {
- throw new LinkBenchConfigError("Invalid setting for debug level: " +
+ throw new LinkBenchConfigError("Invalid setting for debug level: " +
levStr);
- }
+ }
}
}
-
+
/**
* Setup log4j to log to stderr with a timestamp and thread id
* Could add in configuration from file later if it was really necessary
- * @param props
+ * @param props
* @param logFile if not null, info logging will be diverted to this file
- * @throws IOException
- * @throws Exception
+ * @throws IOException
+ * @throws Exception
*/
public static void setupLogging(Properties props, String logFile)
throws LinkBenchConfigError, IOException {
@@ -91,9 +91,9 @@ public static void setupLogging(Properties props, String logFile)
Logger lbLogger = Logger.getLogger(LINKBENCH_LOGGER);
lbLogger.setLevel(logLevel);
ConsoleAppender console = new ConsoleAppender(fmt, "System.err");
-
- /* If logfile is specified, put full stream in logfile and only
- * print important messages to terminal
+
+ /* If logfile is specified, put full stream in logfile and only
+ * print important messages to terminal
*/
if (logFile != null) {
console.setThreshold(Level.WARN); // Only print salient messages
@@ -101,7 +101,7 @@ public static void setupLogging(Properties props, String logFile)
}
lbLogger.addAppender(console);
}
-
+
/**
* Look up key in props, failing if not present
* @param props
@@ -118,12 +118,12 @@ public static String getPropertyRequired(Properties props, String key)
}
return v;
}
-
+
public static int getInt(Properties props, String key)
throws LinkBenchConfigError {
return getInt(props, key, null);
}
-
+
/**
* Retrieve a config key and convert to integer
* @param props
@@ -137,19 +137,19 @@ public static int getInt(Properties props, String key, Integer defaultVal)
return defaultVal;
}
String v = getPropertyRequired(props, key);
- try {
+ try {
return Integer.parseInt(v);
} catch (NumberFormatException e) {
- throw new LinkBenchConfigError("Expected configuration key " + key +
+ throw new LinkBenchConfigError("Expected configuration key " + key +
" to be integer, but was '" + v + "'");
}
}
-
+
public static long getLong(Properties props, String key)
throws LinkBenchConfigError {
- return getLong(props, key, null);
+ return getLong(props, key, null);
}
-
+
/**
* Retrieve a config key and convert to long integer
* @param props
@@ -164,20 +164,20 @@ public static long getLong(Properties props, String key, Long defaultVal)
return defaultVal;
}
String v = getPropertyRequired(props, key);
- try {
+ try {
return Long.parseLong(v);
} catch (NumberFormatException e) {
- throw new LinkBenchConfigError("Expected configuration key " + key +
+ throw new LinkBenchConfigError("Expected configuration key " + key +
" to be long integer, but was '" + v + "'");
}
}
-
-
- public static double getDouble(Properties props, String key)
+
+
+ public static double getDouble(Properties props, String key)
throws LinkBenchConfigError {
return getDouble(props, key, null);
}
-
+
/**
* Retrieve a config key and convert to double
* @param props
@@ -192,17 +192,17 @@ public static double getDouble(Properties props, String key,
return defaultVal;
}
String v = getPropertyRequired(props, key);
- try {
+ try {
return Double.parseDouble(v);
} catch (NumberFormatException e) {
- throw new LinkBenchConfigError("Expected configuration key " + key +
+ throw new LinkBenchConfigError("Expected configuration key " + key +
" to be double, but was '" + v + "'");
}
}
-
+
/**
* Retrieve a config key and convert to boolean.
- * Valid boolean strings are "true" or "false", case insensitive
+ * Valid boolean strings are "true" or "false", case insensitive
* @param props
* @param key
* @return
@@ -217,7 +217,7 @@ public static boolean getBool(Properties props, String key)
} else if (v.equals("false")) {
return false;
} else {
- throw new LinkBenchConfigError("Expected configuration key " + key +
+ throw new LinkBenchConfigError("Expected configuration key " + key +
" to be true or false, but was '" + v + "'");
}
}
View
2 src/java/com/facebook/LinkBench/GraphStore.java
@@ -18,7 +18,7 @@
import java.util.List;
/**
- * An abstract class for storing both nodes and edges
+ * An abstract class for storing both nodes and edges
* @author tarmstrong
*/
public abstract class GraphStore extends LinkStore implements NodeStore {
View
20 src/java/com/facebook/LinkBench/InvertibleShuffler.java
@@ -21,13 +21,13 @@
* Shuffler designed to make computing permutation and inverse easy
*/
public class InvertibleShuffler {
- private final long[] params;
+ private final long[] params;
private final int shuffleGroups;
long n;
long nRoundedUp; // n rounded up to next multiple of shuffleGroups
long nRoundedDown; // n rounded down to next multiple of shuffleGroups
int minGroupSize;
-
+
public InvertibleShuffler(long seed, int shuffleGroups, long n) {
this(new Random(seed), shuffleGroups, n);
}
@@ -40,23 +40,23 @@ public InvertibleShuffler(Random rng, int shuffleGroups, long n) {
this.n = n;
this.params = new long[shuffleGroups];
this.minGroupSize = (int)n / shuffleGroups;
-
+
for (int i = 0; i < shuffleGroups; i++) {
// Positive long
params[i] = Math.abs(rng.nextInt(minGroupSize));
}
- this.nRoundedDown = (n / shuffleGroups) * shuffleGroups;
+ this.nRoundedDown = (n / shuffleGroups) * shuffleGroups;
this.nRoundedUp = n == nRoundedDown ? n : nRoundedDown + shuffleGroups;
}
-
+
public long permute(long i) {
return permute(i, false);
}
-
+
public long invertPermute(long i) {
return permute(i, true);
}
-
+
public long permute(long i, boolean inverse) {
if (i < 0 || i >= n) {
throw new IllegalArgumentException("Bad index to permute: " + i
@@ -64,10 +64,10 @@ public long permute(long i, boolean inverse) {
}
// Number of the group
int group = (int) (i % shuffleGroups);
-
+
// Whether this is a big or small group
boolean bigGroup = group < n % shuffleGroups;
-
+
// Calculate the (positive) rotation
long rotate = params[group];
if (inverse) {
@@ -79,7 +79,7 @@ public long permute(long i, boolean inverse) {
}
assert(rotate >= 0);
}
-
+
long j = (i + shuffleGroups * rotate);
long result;
if (j < n) {
View
10 src/java/com/facebook/LinkBench/Link.java
@@ -48,14 +48,14 @@ public boolean equals(Object other) {
return false;
}
}
-
+
public String toString() {
return String.format("Link(id1=%d, id2=%d, link_type=%d," +
- "visibility=%d, version=%d," +
- "time=%d, data=%s", id1, id2, link_type,
- visibility, version, time, data.toString());
+ "visibility=%d, version=%d," +
+ "time=%d, data=%s", id1, id2, link_type,
+ visibility, version, time, data.toString());
}
-
+
/**
* Clone an existing link
* @param l
View
141 src/java/com/facebook/LinkBench/LinkBenchDriver.java
@@ -60,10 +60,10 @@ Then does read and write requests of various types (addlink, deletelink,
*/
public class LinkBenchDriver {
-
+
public static final int EXIT_BADARGS = 1;
public static final int EXIT_BADCONFIG = 2;
-
+
/* Command line arguments */
private static String configFile = null;
private static String workloadConfigFile = null;
@@ -75,10 +75,10 @@ Then does read and write requests of various types (addlink, deletelink,
private static PrintStream csvStreamFile = null;
private static boolean doLoad = false;
private static boolean doRequest = false;
-
+
private Properties props;
-
- private final Logger logger = Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
+
+ private final Logger logger = Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
LinkBenchDriver(String configfile, Properties
overrideProps, String logFile)
@@ -89,9 +89,9 @@ Then does read and write requests of various types (addlink, deletelink,
for (String key: overrideProps.stringPropertyNames()) {
props.setProperty(key, overrideProps.getProperty(key));
}
-
+
loadWorkloadProps();
-
+
ConfigUtil.setupLogging(props, logFile);
logger.info("Config file: " + configfile);
@@ -146,7 +146,7 @@ private Stores initStores()
throws Exception {
LinkStore linkStore = createLinkStore();
NodeStore nodeStore = createNodeStore(linkStore);
-
+
return new Stores(linkStore, nodeStore);
}
@@ -156,20 +156,20 @@ private LinkStore createLinkStore() throws Exception, IOException {
// for easy access:
// LinkStoreMysql : run benchmark on mySQL
// LinkStoreHBaseGeneralAtomicityTesting : atomicity testing on HBase.
-
- String linkStoreClassName = ConfigUtil.getPropertyRequired(props,
+
+ String linkStoreClassName = ConfigUtil.getPropertyRequired(props,
Config.LINKSTORE_CLASS);
-
+
logger.debug("Using LinkStore implementation: " + linkStoreClassName);
-
+
LinkStore linkStore;
try {
- linkStore = ClassLoadUtil.newInstance(linkStoreClassName,
+ linkStore = ClassLoadUtil.newInstance(linkStoreClassName,
LinkStore.class);
} catch (ClassNotFoundException nfe) {
throw new IOException("Cound not find class for " + linkStoreClassName);
}
-
+
return linkStore;
}
@@ -188,12 +188,12 @@ private NodeStore createNodeStore(LinkStore linkStore) throws Exception,
} else {
logger.debug("Using NodeStore implementation: " + nodeStoreClassName);
}
-
+
if (linkStore != null && linkStore.getClass().getName().equals(
nodeStoreClassName)) {
// Same class, reuse object
if (!NodeStore.class.isAssignableFrom(linkStore.getClass())) {
- throw new Exception("Specified NodeStore class " + nodeStoreClassName
+ throw new Exception("Specified NodeStore class " + nodeStoreClassName
+ " is not a subclass of NodeStore");
}
return (NodeStore)linkStore;
@@ -217,39 +217,39 @@ void load() throws IOException, InterruptedException, Throwable {
}
// load data
int nLinkLoaders = ConfigUtil.getInt(props, Config.NUM_LOADERS);
-
+
boolean bulkLoad = true;
BlockingQueue<LoadChunk> chunk_q = new LinkedBlockingQueue<LoadChunk>();
-
+
// max id1 to generate
long maxid1 = ConfigUtil.getLong(props, Config.MAX_ID);
// id1 at which to start
long startid1 = ConfigUtil.getLong(props, Config.MIN_ID);
-
+
// Create loaders
logger.info("Starting loaders " + nLinkLoaders);
logger.debug("Bulk Load setting: " + bulkLoad);
-
+
Random masterRandom = createMasterRNG(props, Config.LOAD_RANDOM_SEED);
-
+
boolean genNodes = ConfigUtil.getBool(props, Config.GENERATE_NODES);
int nTotalLoaders = genNodes ? nLinkLoaders + 1 : nLinkLoaders;
-
+
LatencyStats latencyStats = new LatencyStats(nTotalLoaders);
List<Runnable> loaders = new ArrayList<Runnable>(nTotalLoaders);
-
- LoadProgress loadTracker = LoadProgress.create(logger, props);
+
+ LoadProgress loadTracker = LoadProgress.create(logger, props);
for (int i = 0; i < nLinkLoaders; i++) {
LinkStore linkStore = createLinkStore();
-
+
bulkLoad = bulkLoad && linkStore.bulkLoadBatchSize() > 0;
- LinkBenchLoad l = new LinkBenchLoad(linkStore, props, latencyStats,
+ LinkBenchLoad l = new LinkBenchLoad(linkStore, props, latencyStats,
csvStreamFile, i, maxid1 == startid1 + 1, chunk_q, loadTracker);
loaders.add(l);
}
-
+
if (genNodes) {
logger.info("Will generate graph nodes during loading");
int loaderId = nTotalLoaders - 1;
@@ -258,7 +258,7 @@ void load() throws IOException, InterruptedException, Throwable {
loaders.add(new NodeLoader(props, logger, nodeStore, rng,
latencyStats, csvStreamFile, loaderId));
}
- enqueueLoadWork(chunk_q, startid1, maxid1, nLinkLoaders,
+ enqueueLoadWork(chunk_q, startid1, maxid1, nLinkLoaders,
new Random(masterRandom.nextLong()));
// run loaders
loadTracker.startTimer();
@@ -277,15 +277,15 @@ void load() throws IOException, InterruptedException, Throwable {
}
latencyStats.displayLatencyStats();
-
+
if (csvStatsFile != null) {
latencyStats.printCSVStats(csvStatsFile, true);
}
-
+
double loadTime_s = (loadTime/1000.0);
logger.info(String.format("LOAD PHASE COMPLETED. " +
" Loaded %d nodes (Expected %d)." +
- " Loaded %d links (%.2f links per node). " +
+ " Loaded %d links (%.2f links per node). " +
" Took %.1f seconds. Links/second = %d",
actualNodes, expectedNodes, actualLinks,
actualLinks / (double) actualNodes, loadTime_s,
@@ -307,22 +307,22 @@ private Random createMasterRNG(Properties props, String configKey) {
logger.info("Using configured random seed " + configKey + "=" + seed);
} else {
seed = System.nanoTime() ^ (long)configKey.hashCode();
- logger.info("Using random seed " + seed + " since " + configKey
+ logger.info("Using random seed " + seed + " since " + configKey
+ " not specified");
}
-
+
SecureRandom masterRandom;
try {
masterRandom = SecureRandom.getInstance("SHA1PRNG");
} catch (NoSuchAlgorithmException e) {
logger.warn("SHA1PRNG not available, defaulting to default SecureRandom" +
- " implementation");
+ " implementation");
masterRandom = new SecureRandom();
}
masterRandom.setSeed(ByteBuffer.allocate(8).putLong(seed).array());
-
+
// Can be used to check that rng is behaving as expected
- logger.debug("First number generated by master " + configKey +
+ logger.debug("First number generated by master " + configKey +
": " + masterRandom.nextLong());
return masterRandom;
}
@@ -331,20 +331,20 @@ private void enqueueLoadWork(BlockingQueue<LoadChunk> chunk_q, long startid1,
long maxid1, int nloaders, Random rng) {
// Enqueue work chunks. Do it in reverse order as a heuristic to improve
// load balancing, since queue is FIFO and later chunks tend to be larger
-
+
int chunkSize = ConfigUtil.getInt(props, Config.LOADER_CHUNK_SIZE, 2048);
long chunk_num = 0;
ArrayList<LoadChunk> stack = new ArrayList<LoadChunk>();
for (long id1 = startid1; id1 < maxid1; id1 += chunkSize) {
- stack.add(new LoadChunk(chunk_num, id1,
+ stack.add(new LoadChunk(chunk_num, id1,
Math.min(id1 + chunkSize, maxid1), rng));
chunk_num++;
}
-
+
for (int i = stack.size() - 1; i >= 0; i--) {
chunk_q.add(stack.get(i));
}
-
+
for (int i = 0; i < nloaders; i++) {
// Add a shutdown signal for each loader
chunk_q.add(LoadChunk.SHUTDOWN);
@@ -368,14 +368,14 @@ void sendrequests() throws IOException, InterruptedException, Throwable {
List<LinkBenchRequest> requesters = new LinkedList<LinkBenchRequest>();
RequestProgress progress = LinkBenchRequest.createProgress(logger, props);
-
+
Random masterRandom = createMasterRNG(props, Config.REQUEST_RANDOM_SEED);
-
+
// create requesters
for (int i = 0; i < nrequesters; i++) {
Stores stores = initStores();
LinkBenchRequest l = new LinkBenchRequest(stores.linkStore,
- stores.nodeStore, props, latencyStats, csvStreamFile,
+ stores.nodeStore, props, latencyStats, csvStreamFile,
progress, new Random(masterRandom.nextLong()), i, nrequesters);
requesters.add(l);
}
@@ -384,8 +384,8 @@ void sendrequests() throws IOException, InterruptedException, Throwable {
concurrentExec(requesters);
long finishTime = System.currentTimeMillis();
// Calculate duration accounting for warmup time
- long benchmarkTime = finishTime - progress.getBenchmarkStartTime();
-
+ long benchmarkTime = finishTime - progress.getBenchmarkStartTime();
+
long requestsdone = 0;
int abortedRequesters = 0;
// wait for requesters
@@ -397,18 +397,18 @@ void sendrequests() throws IOException, InterruptedException, Throwable {
}
latencyStats.displayLatencyStats();
-
+
if (csvStatsFile != null) {
latencyStats.printCSVStats(csvStatsFile, true);
}
-
+
logger.info("REQUEST PHASE COMPLETED. " + requestsdone +
" requests done in " + (benchmarkTime/1000) + " seconds." +
" Requests/second = " + (1000*requestsdone)/benchmarkTime);
if (abortedRequesters > 0) {
logger.error(String.format("Benchmark did not complete cleanly: %d/%d " +
- "request threads aborted. See error log entries for details.",
- abortedRequesters, nrequesters));
+ "request threads aborted. See error log entries for details.",
+ abortedRequesters, nrequesters));
}
}
@@ -439,8 +439,7 @@ public void run() {
task.run();
} catch (Throwable e) {
Logger threadLog = Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
- threadLog.error("Unrecoverable exception in" +
- " worker thread:", e);
+ threadLog.error("Unrecoverable exception in worker thread:", e);
Runtime.getRuntime().halt(1);
}
doneSignal.countDown();
@@ -460,7 +459,7 @@ void drive() throws IOException, InterruptedException, Throwable {
public static void main(String[] args)
throws IOException, InterruptedException, Throwable {
processArgs(args);
- LinkBenchDriver d = new LinkBenchDriver(configFile,
+ LinkBenchDriver d = new LinkBenchDriver(configFile,
cmdLineProps, logFile);
try {
d.drive();
@@ -481,26 +480,26 @@ private static Options initializeOptions() {
Option config = new Option("c", true, "Linkbench config file");
config.setArgName("file");
options.addOption(config);
-
+
Option log = new Option("L", true, "Log to this file");
log.setArgName("file");
options.addOption(log);
-
- Option csvStats = new Option("csvstats", "csvstats", true,
+
+ Option csvStats = new Option("csvstats", "csvstats", true,
"CSV stats output");
csvStats.setArgName("file");
options.addOption(csvStats);
-
- Option csvStream = new Option("csvstream", "csvstream", true,
+
+ Option csvStream = new Option("csvstream", "csvstream", true,
"CSV streaming stats output");
csvStream.setArgName("file");
options.addOption(csvStream);
-
+
options.addOption("l", false,
"Execute loading stage of benchmark");
options.addOption("r", false,
"Execute request stage of benchmark");
-
+
// Java-style properties to override config file
// -Dkey=value
Option property = new Option("D", "Override a config setting");
@@ -508,10 +507,10 @@ private static Options initializeOptions() {
property.setArgName("property=value");
property.setValueSeparator('=');
options.addOption(property);
-
+
return options;
}
-
+
/**
* Process command line arguments and set static variables
* exits program if invalid arguments provided
@@ -522,7 +521,7 @@ private static Options initializeOptions() {
private static void processArgs(String[] args)
throws ParseException {
Options options = initializeOptions();
-
+
CommandLine cmd = null;
try {
CommandLineParser parser = new GnuParser();
@@ -533,8 +532,8 @@ private static void processArgs(String[] args)
printUsage(options);
System.exit(EXIT_BADARGS);
}
-
- /*
+
+ /*
* Apache CLI validates arguments, so can now assume
* all required options are present, etc
*/
@@ -547,14 +546,14 @@ private static void processArgs(String[] args)
System.err.println();
printUsage(options);
System.exit(EXIT_BADARGS);
- }
+ }
// Set static option variables
doLoad = cmd.hasOption('l');
doRequest = cmd.hasOption('r');
-
+
logFile = cmd.getOptionValue('L'); // May be null
-
+
configFile = cmd.getOptionValue('c');
if (configFile == null) {
// Try to find in usual location
@@ -570,7 +569,7 @@ private static void processArgs(String[] args)
System.exit(EXIT_BADARGS);
}
}
-
+
String csvStatsFileName = cmd.getOptionValue("csvstats"); // May be null
if (csvStatsFileName != null) {
try {
@@ -582,7 +581,7 @@ private static void processArgs(String[] args)
System.exit(EXIT_BADARGS);
}
}
-
+
String csvStreamFileName = cmd.getOptionValue("csvstream"); // May be null
if (csvStreamFileName != null) {
try {
@@ -597,9 +596,9 @@ private static void processArgs(String[] args)
System.exit(EXIT_BADARGS);
}
}
-
+
cmdLineProps = cmd.getOptionProperties("D");
-
+
if (!(doLoad || doRequest)) {
System.err.println("Did not select benchmark mode");
printUsage(options);
View
16 src/java/com/facebook/LinkBench/LinkBenchDriverMR.java
@@ -72,7 +72,7 @@
private static final Logger logger =
Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
-
+
static enum Counters { LINK_LOADED, REQUEST_DONE }
private static Properties props;
@@ -324,15 +324,15 @@ public void map(IntWritable loaderid,
long maxid1 = ConfigUtil.getLong(props, Config.MAX_ID);
long startid1 = ConfigUtil.getLong(props, Config.MIN_ID);
-
+
LoadProgress prog_tracker = LoadProgress.create(
Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER), props);
-
- LinkBenchLoad loader = new LinkBenchLoad(store, props, latencyStats,
+
+ LinkBenchLoad loader = new LinkBenchLoad(store, props, latencyStats,
null,
loaderid.get(), maxid1 == startid1 + 1,
nloaders.get(), prog_tracker, new Random());
-
+
LinkedList<LinkBenchLoad> tasks = new LinkedList<LinkBenchLoad>();
tasks.add(loader);
long linksloaded = 0;
@@ -367,13 +367,13 @@ public void map(IntWritable requesterid,
LatencyStats latencyStats = new LatencyStats(nrequesters.get());
RequestProgress progress =
LinkBenchRequest.createProgress(logger, props);
- progress.startTimer();
+ progress.startTimer();
// TODO: Don't support NodeStore yet
final LinkBenchRequest requester =
new LinkBenchRequest(store, null, props, latencyStats, null, progress,
new Random(), requesterid.get(), nrequesters.get());
-
-
+
+
// Wrap in runnable to handle error
Thread t = new Thread(new Runnable() {
public void run() {
View
128 src/java/com/facebook/LinkBench/LinkBenchLoad.java
@@ -41,7 +41,7 @@
* Multi-threaded loader for loading graph edges (but not nodes) into
* LinkStore. The range from startid1 to maxid1 is chunked up into equal sized
* disjoint ranges. These are then enqueued for processing by a number
- * of loader threads to be loaded in parallel. The #links generated for
+ * of loader threads to be loaded in parallel. The #links generated for
* an id1 is based on the configured distribution. The # of link types,
* and link payload data is also controlled by the configuration file.
* The actual counts of #links generated is tracked in nlinks_counts.
@@ -49,7 +49,7 @@
public class LinkBenchLoad implements Runnable {
- private final Logger logger = Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
+ private final Logger logger = Logger.getLogger(ConfigUtil.LINKBENCH_LOGGER);
private long maxid1; // max id1 to generate
private long startid1; // id1 at which to start
@@ -64,16 +64,16 @@
Level debuglevel;
String dbid;
-
+
private ID2Chooser id2chooser;
// Counters for load statistics
long sameShuffle;
long diffShuffle;
long linksloaded;
-
- /**
- * special case for single hot row benchmark. If singleAssoc is set,
+
+ /**
+ * special case for single hot row benchmark. If singleAssoc is set,
* then make this method not print any statistics message, all statistics
* are collected at a higher layer. */
boolean singleAssoc;
@@ -103,13 +103,13 @@ public LinkBenchLoad(LinkStore store, Properties props,
int nloaders, LoadProgress prog_tracker, Random rng) {
this(store, props, latencyStats, csvStreamOut, loaderID, singleAssoc,
new ArrayBlockingQueue<LoadChunk>(2), prog_tracker);
-
+
// Just add a single chunk to the queue
chunk_q.add(new LoadChunk(loaderID, startid1, maxid1, rng));
chunk_q.add(LoadChunk.SHUTDOWN);
}
-
+
public LinkBenchLoad(LinkStore linkStore,
Properties props,
LatencyStats latencyStats,
@@ -128,27 +128,27 @@ public LinkBenchLoad(LinkStore linkStore,
this.singleAssoc = singleAssoc;
this.chunk_q = chunk_q;
this.prog_tracker = prog_tracker;
-
-
+
+
/*
* Load settings from properties
*/
maxid1 = ConfigUtil.getLong(props, Config.MAX_ID);
startid1 = ConfigUtil.getLong(props, Config.MIN_ID);
-
+
// math functions may cause problems for id1 = 0. Start at 1.
if (startid1 <= 0) {
throw new LinkBenchConfigError("startid1 must be >= 1");
}
debuglevel = ConfigUtil.getDebugLevel(props);
-
+
double medianLinkDataSize = ConfigUtil.getDouble(props,
Config.LINK_DATASIZE);
linkDataSize = new LogNormalDistribution();
linkDataSize.init(0, LinkStore.MAX_LINK_DATA, medianLinkDataSize,
Config.LINK_DATASIZE_SIGMA);
-
+
try {
linkDataGen = ClassLoadUtil.newInstance(
ConfigUtil.getPropertyRequired(props, Config.LINK_ADD_DATAGEN),
@@ -156,15 +156,15 @@ public LinkBenchLoad(LinkStore linkStore,
linkDataGen.init(props, Config.LINK_ADD_DATAGEN_PREFIX);
} catch (ClassNotFoundException ex) {
logger.error(ex);
- throw new LinkBenchConfigError("Error loading data generator class: "
+ throw new LinkBenchConfigError("Error loading data generator class: "
+ ex.getMessage());
}
-
+
displayFreq_ms = ConfigUtil.getLong(props, Config.DISPLAY_FREQ) * 1000;
int maxsamples = ConfigUtil.getInt(props, Config.MAX_STAT_SAMPLES);
-
+
dbid = ConfigUtil.getPropertyRequired(props, Config.DBID);
-
+
/*
* Initialize statistics
*/
@@ -172,7 +172,7 @@ public LinkBenchLoad(LinkStore linkStore,
sameShuffle = 0;
diffShuffle = 0;
stats = new SampledStats(loaderID, maxsamples, csvStreamOut);
-
+
id2chooser = new ID2Chooser(props, startid1, maxid1, 1, 1);
}
@@ -188,7 +188,7 @@ public void run() {
logger.error("Error while initializing store", e);
throw new RuntimeException(e);
}
-
+
int bulkLoadBatchSize = store.bulkLoadBatchSize();
boolean bulkLoad = bulkLoadBatchSize > 0;
ArrayList<Link> loadBuffer = null;
@@ -200,17 +200,17 @@ public void run() {
logger.info("Starting loader thread #" + loaderID + " loading links");
lastDisplayTime = System.currentTimeMillis();
-
+
while (true) {
LoadChunk chunk;
try {
chunk = chunk_q.take();
- //logger.info("chunk end="+chunk.end);
+ //logger.info("chunk end="+chunk.end);
} catch (InterruptedException ie) {
logger.warn("InterruptedException not expected, try again", ie);
continue;
}
-
+
// Shutdown signal is received though special chunk type
if (chunk.shutdown) {
break;
@@ -220,19 +220,19 @@ public void run() {
processChunk(chunk, bulkLoad, bulkLoadBatchSize,
loadBuffer, countLoadBuffer);
}
-
+
if (bulkLoad) {
// Load any remaining links or counts
loadLinks(loadBuffer);
loadCounts(countLoadBuffer);
}
-
+
if (!singleAssoc) {
logger.debug(" Same shuffle = " + sameShuffle +
" Different shuffle = " + diffShuffle);
displayStats(lastDisplayTime, bulkLoad);
}
-
+
store.close();
}
@@ -240,7 +240,7 @@ public void run() {
private void displayStats(long startTime, boolean bulkLoad) {
long endTime = System.currentTimeMillis();
if (bulkLoad) {
- stats.displayStats(startTime, endTime,
+ stats.displayStats(startTime, endTime,
Arrays.asList(LinkBenchOp.LOAD_LINKS_BULK,
LinkBenchOp.LOAD_COUNTS_BULK, LinkBenchOp.LOAD_LINKS_BULK_NLINKS,
LinkBenchOp.LOAD_COUNTS_BULK_NLINKS));
@@ -257,7 +257,7 @@ private void processChunk(LoadChunk chunk, boolean bulkLoad,
logger.debug("Loader thread #" + loaderID + " processing "
+ chunk.toString());
}
-
+
// Counter for total number of links loaded in chunk;
long links_in_chunk = 0;
@@ -267,13 +267,13 @@ private void processChunk(LoadChunk chunk, boolean bulkLoad,
// otherwise reuse object
link = initLink();
}
-
+
long prevPercentPrinted = 0;
for (long id1 = chunk.start; id1 < chunk.end; id1 += chunk.step) {
long added_links= createOutLinks(chunk.rng, link, loadBuffer, countLoadBuffer,
id1, singleAssoc, bulkLoad, bulkLoadBatchSize);
links_in_chunk += added_links;
-
+
if (!singleAssoc) {
long nloaded = (id1 - chunk.start) / chunk.step;
if (bulkLoad) {
@@ -285,7 +285,7 @@ private void processChunk(LoadChunk chunk, boolean bulkLoad,
prevPercentPrinted = percent;
}
}
-
+
// Check if stats should be flushed and reset
long now = System.currentTimeMillis();
if (lastDisplayTime + displayFreq_ms <= now) {
@@ -294,7 +294,7 @@ private void processChunk(LoadChunk chunk, boolean bulkLoad,
lastDisplayTime = now;
}
}
-
+
// Update progress and maybe print message
prog_tracker.update(chunk.size, links_in_chunk);
}
@@ -319,7 +319,7 @@ private long createOutLinks(Random rng,
linkTypeCounts = new HashMap<Long, LinkCount>();
}
long nlinks_total = 0;
-
+
for (long link_type: id2chooser.getLinkTypes()) {
long nlinks = id2chooser.calcLinkCount(id1, link_type);
nlinks_total += nlinks;
@@ -328,7 +328,7 @@ private long createOutLinks(Random rng,
} else {
diffShuffle++;
}
-
+
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("id1 = " + id1 + " link_type = " + link_type +
" nlinks = " + nlinks);
@@ -339,15 +339,15 @@ private long createOutLinks(Random rng,
link = initLink();
}
constructLink(rng, link, id1, link_type, j, singleAssoc);
-
+
if (bulkLoad) {
loadBuffer.add(link);
if (loadBuffer.size() >= bulkLoadBatchSize) {
loadLinks(loadBuffer);
}
-
+
// Update link counts for this type
- LinkCount count = linkTypeCounts.get(link.link_type);
+ LinkCount count = linkTypeCounts.get(link.link_type);
if (count == null) {
count = new LinkCount(id1, link.link_type,
link.time, link.version, 1);
@@ -361,7 +361,7 @@ private long createOutLinks(Random rng,
loadLink(link, j, nlinks, singleAssoc);
}
}
-
+
}
// Maintain the counts separately
@@ -398,7 +398,7 @@ private void constructLink(Random rng, Link link, long id1,
long link_type, long outlink_ix, boolean singleAssoc) {
link.id1 = id1;
link.link_type = link_type;
-
+
// Using random number generator for id2 means we won't know
// which id2s exist. So link id1 to
// maxid1 + id1 + 1 thru maxid1 + id1 + nlinks(id1) UNLESS
@@ -428,7 +428,7 @@ private long chooseInitialTimestamp(Random rng) {
/**
* Load an individual link into the db.
- *
+ *
* If an error occurs during loading, this method will log it,
* add stats, and reset the connection.
* @param link
@@ -442,22 +442,22 @@ private void loadLink(Link link, long outlink_ix, long nlinks,
if (!singleAssoc) {
timestart = System.nanoTime();
}
-
+
try {
// no inverses for now
store.addLink(dbid, link, true);
linksloaded++;
-
+
if (!singleAssoc && outlink_ix == nlinks - 1) {
long timetaken = (System.nanoTime() - timestart);
-
+
// convert to microseconds
stats.addStats(LinkBenchOp.LOAD_LINK, timetaken/1000, false);
-
- latencyStats.recordLatency(loaderID,
+
+ latencyStats.recordLatency(loaderID,
LinkBenchOp.LOAD_LINK, timetaken);
}
-
+
} catch (Throwable e){//Catch exception if any
long endtime2 = System.nanoTime();
long timetaken2 = (endtime2 - timestart)/1000;
@@ -475,13 +475,13 @@ private void loadLinks(ArrayList<Link> loadBuffer) {
store.addBulkLinks(dbid, loadBuffer, true);
linksloaded += nlinks;
loadBuffer.clear();
-
+
long timetaken = (System.nanoTime() - timestart);
-
+
// convert to microseconds
stats.addStats(LinkBenchOp.LOAD_LINKS_BULK, timetaken/1000, false);
stats.addStats(LinkBenchOp.LOAD_LINKS_BULK_NLINKS, nlinks, false);
-
+
latencyStats.recordLatency(loaderID, LinkBenchOp.LOAD_LINKS_BULK,
timetaken);
} catch (Throwable e){//Catch exception if any
@@ -492,22 +492,22 @@ private void loadLinks(ArrayList<Link> loadBuffer) {
store.clearErrors(loaderID);
}
}
-
+
private void loadCounts(ArrayList<LinkCount> loadBuffer) {
long timestart = System.nanoTime();
-
+
try {
// no inverses for now
int ncounts = loadBuffer.size();
store.addBulkCounts(dbid, loadBuffer);
loadBuffer.clear();
-
+
long timetaken = (System.nanoTime() - timestart);
-
+
// convert to microseconds
stats.addStats(LinkBenchOp.LOAD_COUNTS_BULK, timetaken/1000, false);
stats.addStats(LinkBenchOp.LOAD_COUNTS_BULK_NLINKS, ncounts, false);
-
+
latencyStats.recordLatency(loaderID, LinkBenchOp.LOAD_COUNTS_BULK,
timetaken);
} catch (Throwable e){//Catch exception if any
@@ -518,7 +518,7 @@ private void loadCounts(ArrayList<LinkCount> loadBuffer) {
store.clearErrors(loaderID);
}
}
-
+
/**
* Represents a portion of the id space, starting with
* start, going up until end (non-inclusive) with step size
@@ -550,7 +550,7 @@ public LoadChunk(boolean shutdown,
public final long step;
public final long size;
public Random rng;
-
+
public String toString() {
if (shutdown) {
return "chunk SHUTDOWN";
@@ -567,7 +567,7 @@ public String toString() {
public static class LoadProgress {
/** report progress at intervals of progressReportInterval links */
private final long progressReportInterval;
-
+
public LoadProgress(Logger progressLogger,
long id1s_total, long progressReportInterval) {
super();
@@ -578,27 +578,27 @@ public LoadProgress(Logger progressLogger,
this.id1s_loaded = new AtomicLong();
this.links_loaded = new AtomicLong();
}
-
+
public static LoadProgress create(Logger progressLogger, Properties props) {
long maxid1 = ConfigUtil.getLong(props, Config.MAX_ID);
long startid1 = ConfigUtil.getLong(props, Config.MIN_ID);
long nids = maxid1 - startid1;
- long progressReportInterval = ConfigUtil.getLong(props,
+ long progressReportInterval = ConfigUtil.getLong(props,
Config.LOAD_PROG_INTERVAL, 50000L);
return new LoadProgress(progressLogger, nids, progressReportInterval);
}
-
+
private final Logger progressLogger;
private final AtomicLong id1s_loaded; // progress
private final AtomicLong links_loaded; // progress
private final long id1s_total; // goal
private long starttime_ms;
-
+
/** Mark current time as start time for load */
public void startTimer() {
starttime_ms = System.currentTimeMillis();
}
-
+
/**
* Update progress
* @param id1_incr number of additional id1s loaded since last call
@@ -606,10 +606,10 @@ public void startTimer() {
*/
public void update(long id1_incr, long links_incr) {
long curr_id1s = id1s_loaded.addAndGet(id1_incr);
-
+
long curr_links = links_loaded.addAndGet(links_incr);
long prev_links = curr_links - links_incr;
-
+
if ((curr_links / progressReportInterval) >
(prev_links / progressReportInterval) || curr_id1s == id1s_total) {
double percentage = (curr_id1s / (double)id1s_total) * 100.0;
@@ -620,7 +620,7 @@ public void update(long id1_incr, long links_incr) {
double id1_rate = ((curr_id1s) / ((double) now - starttime_ms))*1000;
progressLogger.info(String.format(
"%d/%d id1s loaded (%.1f%% complete) at %.2f id1s/sec avg. " +
- "%d links loaded at %.2f links/sec avg.",
+ "%d links loaded at %.2f links/sec avg.",
curr_id1s, id1s_total, percentage, id1_rate,
curr_links, link_rate));
}
View
6 src/java/com/facebook/LinkBench/LinkBenchOp.java
@@ -32,13 +32,13 @@
LOAD_LINKS_BULK,
LOAD_COUNTS_BULK,
// Although the following are not truly operations, we need stats
- // for them
+ // for them
RANGE_SIZE, // how big range scans are
LOAD_LINKS_BULK_NLINKS, // how many links inserted in bulk
LOAD_COUNTS_BULK_NLINKS, // how many counts inserted in bulk
UNKNOWN;
-
+
public String displayName() {
return name();
}
-}
+}
View
230 src/java/com/facebook/LinkBench/LinkBenchRequest.java
@@ -43,22 +43,22 @@
Properties props;
LinkStore linkStore;
NodeStore nodeStore;
-
+
RequestProgress progressTracker;
long numRequests;
/** Requests per second: <= 0 for unlimited rate */
private long requestrate;
-
+
/** Maximum number of failed requests: < 0 for unlimited */
private long maxFailedRequests;
-
- /**
+
+ /**
* Time to run benchmark for before collecting stats. Allows
* caches, etc to warm up.
*/
private long warmupTime;
-
+
/** Maximum time to run benchmark for, not including warmup time */
long maxTime;
int nrequesters;
@@ -78,7 +78,7 @@
private LogNormalDistribution nodeDataSize;
private DataGenerator nodeAddDataGen;
private DataGenerator nodeUpDataGen;
-
+
// cummulative percentages
double pc_addlink;
double pc_deletelink;
@@ -90,10 +90,10 @@
double pc_deletenode;
double pc_updatenode;
double pc_getnode;
-
+
// Chance of doing historical range query
double p_historical_getlinklist;
-
+
private static class HistoryKey {
public final long id1;
public final long link_type;
@@ -102,11 +102,11 @@ public HistoryKey(long id1, long link_type) {
this.id1 = id1;
this.link_type = link_type;
}
-
+
public HistoryKey(Link l) {
this(l.id1, l.link_type);
}
-
+
@Override
public int hashCode() {
final int prime = 31;
@@ -115,7 +115,7 @@ public int hashCode() {
result = prime * result + (int) (link_type ^ (link_type >>> 32));
return result;
}
-
+
@Override
public boolean equals(Object obj) {
if (!(obj instanceof HistoryKey))
@@ -123,21 +123,21 @@ public boolean equals(Object obj) {
HistoryKey other = (HistoryKey) obj;
return id1 == other.id1 && link_type == other.link_type;
}
-
+
}
-
+
// Cache of last link in lists where full list wasn't retrieved
ArrayList<Link> listTailHistory;
-
+
// Index of history to avoid duplicates
HashMap<HistoryKey, Integer> listTailHistoryIndex;
-
+
// Limit of cache size
private int listTailHistoryLimit;
-
+
// Probability distribution for ids in multiget
ProbabilityDistribution multigetDist;
-
+
// Statistics
SampledStats stats;
LatencyStats latencyStats;
@@ -146,17 +146,17 @@ public boolean equals(Object obj) {
long numfound = 0;
long numnotfound = 0;
long numHistoryQueries = 0;
-
- /**
+
+ /**
* Random number generator use for generating workload. If
* initialized with same seed, should generate same sequence of requests
- * so that tests and benchmarks are repeatable.
+ * so that tests and benchmarks are repeatable.
*/
Random rng;
-
+
// Last node id accessed
long lastNodeId;
-
+
long requestsDone = 0;
long errors = 0;
boolean aborted;
@@ -171,7 +171,7 @@ public boolean equals(Object obj) {
private AccessDistribution nodeReadDist; // node reads
private AccessDistribution nodeUpdateDist; // node writes
private AccessDistribution nodeDeleteDist; // node deletes
-
+
private ID2Chooser id2chooser;
public LinkBenchRequest(LinkStore linkStore,
NodeStore nodeStore,
@@ -184,10 +184,10 @@ public LinkBenchRequest(LinkStore linkStore,
int nrequesters) {
assert(linkStore != null);
if (requesterID < 0 || requesterID >= nrequesters) {
- throw new IllegalArgumentException("Bad requester id "
+ throw new IllegalArgumentException("Bad requester id "
+ requesterID + "/" + nrequesters);
}
-
+
this.linkStore = linkStore;
this.nodeStore = nodeStore;
this.props = props;
@@ -238,13 +238,13 @@ public LinkBenchRequest(LinkStore linkStore,
progressFreq_ms = ConfigUtil.getLong(props, Config.PROGRESS_FREQ, 6L) * 1000;
int maxsamples = ConfigUtil.getInt(props, Config.MAX_STAT_SAMPLES);
stats = new SampledStats(requesterID, maxsamples, csvStreamOut);
-
+
listTailHistoryLimit = 2048; // Hardcoded limit for now
listTailHistory = new ArrayList<Link>(listTailHistoryLimit);
listTailHistoryIndex = new HashMap<HistoryKey, Integer>();
p_historical_getlinklist = ConfigUtil.getDouble(props,
- Config.PR_GETLINKLIST_HISTORY, 0.0) / 100;
-
+ Config.PR_GETLINKLIST_HISTORY, 0.0) / 100;
+
lastNodeId = startid1;
}
@@ -255,49 +255,49 @@ private void initRequestProbabilities(Properties props) {
pc_countlink = pc_updatelink + ConfigUtil.getDouble(props, Config.PR_COUNT_LINKS);
pc_getlink = pc_countlink + ConfigUtil.getDouble(props, Config.PR_GET_LINK);
pc_getlinklist = pc_getlink + ConfigUtil.getDouble(props, Config.PR_GET_LINK_LIST);
-
+
pc_addnode = pc_getlinklist + ConfigUtil.getDouble(props, Config.PR_ADD_NODE, 0.0);
pc_updatenode = pc_addnode + ConfigUtil.getDouble(props, Config.PR_UPDATE_NODE, 0.0);
pc_deletenode = pc_updatenode + ConfigUtil.getDouble(props, Config.PR_DELETE_NODE, 0.0);
pc_getnode = pc_deletenode + ConfigUtil.getDouble(props, Config.PR_GET_NODE, 0.0);
-
+
if (Math.abs(pc_getnode - 100.0) > 1e-5) {//compare real numbers
- throw new LinkBenchConfigError("Percentages of request types do not " +
+ throw new LinkBenchConfigError("Percentages of request types do not " +
"add to 100, only " + pc_getnode + "!");
}
}
private void initLinkRequestDistributions(Properties props, int requesterID,
int nrequesters) {
- writeDist = AccessDistributions.loadAccessDistribution(props,
+ writeDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_WRITES);
- readDist = AccessDistributions.loadAccessDistribution(props,
+ readDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_READS);
-
+
// Load uncorrelated distributions for blending if needed
writeDistUncorr = null;
if (props.containsKey(Config.WRITE_UNCORR_BLEND)) {
// Ratio of queries to use uncorrelated. Convert from percentage
- writeDistUncorrBlend = ConfigUtil.getDouble(props,
+ writeDistUncorrBlend = ConfigUtil.getDouble(props,
Config.WRITE_UNCORR_BLEND) / 100.0;
if (writeDistUncorrBlend > 0.0) {
- writeDistUncorr = AccessDistributions.loadAccessDistribution(props,
+ writeDistUncorr = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_WRITES_UNCORR);
}
}
-
+
readDistUncorr = null;
if (props.containsKey(Config.READ_UNCORR_BLEND)) {
// Ratio of queries to use uncorrelated. Convert from percentage
- readDistUncorrBlend = ConfigUtil.getDouble(props,
+ readDistUncorrBlend = ConfigUtil.getDouble(props,
Config.READ_UNCORR_BLEND) / 100.0;
if (readDistUncorrBlend > 0.0) {
- readDistUncorr = AccessDistributions.loadAccessDistribution(props,
+ readDistUncorr = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.LINK_READS_UNCORR);
}
}
-
- id2chooser = new ID2Chooser(props, startid1, maxid1,
+
+ id2chooser = new ID2Chooser(props, startid1, maxid1,
nrequesters, requesterID);
// Distribution of #id2s per multiget
@@ -308,11 +308,11 @@ private void initLinkRequestDistributions(Properties props, int requesterID,
try {
multigetDist = ClassLoadUtil.newInstance(multigetDistClass,
ProbabilityDistribution.class);
- multigetDist.init(multigetMin, multigetMax, props,
+ multigetDist.init(multigetMin, multigetMax, props,
Config.LINK_MULTIGET_DIST_PREFIX);
} catch (ClassNotFoundException e) {
logger.error(e);
- throw new LinkBenchConfigError("Class" + multigetDistClass +
+ throw new LinkBenchConfigError("Class" + multigetDistClass +
" could not be loaded as ProbabilityDistribution");
}
} else {
@@ -322,7 +322,7 @@ private void initLinkRequestDistributions(Properties props, int requesterID,
private void initLinkDataGeneration(Properties props) {
try {
- double medLinkDataSize = ConfigUtil.getDouble(props,
+ double medLinkDataSize = ConfigUtil.getDouble(props,
Config.LINK_DATASIZE);
linkDataSize = new LogNormalDistribution();
linkDataSize.init(0, LinkStore.MAX_LINK_DATA, medLinkDataSize,
@@ -331,21 +331,21 @@ private void initLinkDataGeneration(Properties props) {
ConfigUtil.getPropertyRequired(props, Config.LINK_ADD_DATAGEN),
DataGenerator.class);
linkAddDataGen.init(props, Config.LINK_ADD_DATAGEN_PREFIX);
-
+
linkUpDataGen = ClassLoadUtil.newInstance(
ConfigUtil.getPropertyRequired(props, Config.LINK_UP_DATAGEN),
DataGenerator.class);
linkUpDataGen.init(props, Config.LINK_UP_DATAGEN_PREFIX);
} catch (ClassNotFoundException ex) {
logger.error(ex);
- throw new LinkBenchConfigError("Error loading data generator class: "
+ throw new LinkBenchConfigError("Error loading data generator class: "
+ ex.getMessage());
}
}
private void initNodeRequestDistributions(Properties props) {
try {
- nodeReadDist = AccessDistributions.loadAccessDistribution(props,
+ nodeReadDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.NODE_READS);
} catch (LinkBenchConfigError e) {
// Not defined
@@ -354,9 +354,9 @@ private void initNodeRequestDistributions(Properties props) {
throw new LinkBenchConfigError("Node read distribution not " +
"configured but node read operations have non-zero probability");
}
-
+
try {
- nodeUpdateDist = AccessDistributions.loadAccessDistribution(props,
+ nodeUpdateDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.NODE_UPDATES);
} catch (LinkBenchConfigError e) {
// Not defined
@@ -365,9 +365,9 @@ private void initNodeRequestDistributions(Properties props) {
throw new LinkBenchConfigError("Node write distribution not " +
"configured but node write operations have non-zero probability");
}
-
+
try {
- nodeDeleteDist = AccessDistributions.loadAccessDistribution(props,
+ nodeDeleteDist = AccessDistributions.loadAccessDistribution(props,
startid1, maxid1, DistributionType.NODE_DELETES);
} catch (LinkBenchConfigError e) {
// Not defined
@@ -379,27 +379,27 @@ private void initNodeRequestDistributions(Properties props) {
}
private void initNodeDataGeneration(Properties props) {
- try {
- double medNodeDataSize = ConfigUtil.getDouble(props,
+ try {
+ double medNodeDataSize = ConfigUtil.getDouble(props,
Config.NODE_DATASIZE);
nodeDataSize = new LogNormalDistribution();
nodeDataSize.init(0, NodeStore.MAX_NODE_DATA, medNodeDataSize,
Config.NODE_DATASIZE_SIGMA);
- String dataGenClass = ConfigUtil.getPropertyRequired(props,
+ String dataGenClass = ConfigUtil.getPropertyRequired(props,
Config.NODE_ADD_DATAGEN);
nodeAddDataGen = ClassLoadUtil.newInstance(dataGenClass,
DataGenerator.class);
nodeAddDataGen.init(props, Config.NODE_ADD_DATAGEN_PREFIX);
-
- dataGenClass = ConfigUtil.getPropertyRequired(props,
+
+ dataGenClass = ConfigUtil.getPropertyRequired(props,
Config.NODE_UP_DATAGEN);
nodeUpDataGen = ClassLoadUtil.newInstance(dataGenClass,
DataGenerator.class);
nodeUpDataGen.init(props, Config.NODE_UP_DATAGEN_PREFIX);
} catch (ClassNotFoundException ex) {
logger.error(ex);
- throw new LinkBenchConfigError("Error loading data generator class: "
+ throw new LinkBenchConfigError("Error loading data generator class: "
+ ex.getMessage());
}
}
@@ -407,7 +407,7 @@ private void initNodeDataGeneration(Properties props) {
public long getRequestsDone() {
return requestsDone;
}
-
+
public boolean didAbort() {
return aborted;
}
@@ -455,7 +455,7 @@ private long chooseRequestID(DistributionType type, long previousId1) {
" for access distribution: " + dist.getClass().getName() + ": " +
dist.toString());
}
-
+
if (dist.getShuffler() != null) {
// Shuffle to go from position in space ranked from most to least accessed,
// to the real id space
@@ -490,7 +490,7 @@ private boolean oneRequest(boolean recordStats) {
link.visibility = LinkStore.VISIBILITY_DEFAULT;
link.version = 0;
link.time = System.currentTimeMillis();
- link.data = linkAddDataGen.fill(rng,
+ link.data = linkAddDataGen.fill(rng,
new byte[(int)linkDataSize.choose(rng)]);
starttime = System.nanoTime();
@@ -499,9 +499,9 @@ private boolean oneRequest(boolean recordStats) {
boolean added = !alreadyExists;
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
- logger.trace("addLink id1=" + link.id1 + " link_type="
+ logger.trace("addLink id1=" + link.id1 + " link_type="
+ link.link_type + " id2=" + link.id2 + " added=" + added);
- }
+ }
} else if (r <= pc_deletelink) {
type = LinkBenchOp.DELETE_LINK;
long id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
@@ -513,9 +513,9 @@ private boolean oneRequest(boolean recordStats) {
false);
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
- logger.trace("deleteLink id1=" + id1 + " link_type=" + link_type
+ logger.trace("deleteLink id1=" + id1 + " link_type=" + link_type
+ " id2=" + id2);
- }
+ }
} else if (r <= pc_updatelink) {
type = LinkBenchOp.UPDATE_LINK;
link.id1 = chooseRequestID(DistributionType.LINK_WRITES, link.id1);
@@ -526,8 +526,8 @@ private boolean oneRequest(boolean recordStats) {
link.visibility = LinkStore.VISIBILITY_DEFAULT;
link.version = 0;
link.time = System.currentTimeMillis();
- link.data = linkUpDataGen.fill(rng,
- new byte[(int)linkDataSize.choose(rng)]);
+ link.data = linkUpDataGen.fill(rng,
+ new byte[(int)linkDataSize.choose(rng)]);
starttime = System.nanoTime();
// no inverses for now
@@ -535,9 +535,9 @@ private boolean oneRequest(boolean recordStats) {
boolean found = found1;
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
- logger.trace("updateLink id1=" + link.id1 + " link_type="
+ logger.trace("updateLink id1=" + link.id1 + " link_type="
+ link.link_type + " id2=" + link.id2 + " found=" + found);
- }
+ }
} else if (r <= pc_countlink) {
type = LinkBenchOp.COUNT_LINK;
@@ -548,9 +548,9 @@ private boolean oneRequest(boolean recordStats) {
long count = linkStore.countLinks(dbid, id1, link_type);
endtime = System.nanoTime();
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
- logger.trace("countLink id1=" + id1 + " link_type=" + link_type
+ logger.trace("countLink id1=" + id1 + " link_type=" + link_type
+ " count=" + count);
- }
+ }
} else if (r <= pc_getlink) {
type = LinkBenchOp.MULTIGET_LINK;
@@ -558,7 +558,7 @@ private boolean oneRequest(boolean recordStats) {
long id1 = chooseRequestID(DistributionType.LINK_READS, link.id1);
long link_type = id2chooser.chooseRandomLinkType(rng);
int nid2s = 1;
- if (multigetDist != null) {
+ if (multigetDist != null) {
nid2s = (int)multigetDist.choose(rng);
}
long id2s[] = id2chooser.chooseMultipleForOp(rng, id1, link_type, nid2s,
@@ -579,7 +579,7 @@ private boolean oneRequest(boolean recordStats) {
type = LinkBenchOp.GET_LINKS_LIST;
Link links[];
-
+
if (rng.nextDouble() < p_historical_getlinklist &&
!this.listTailHistory.isEmpty()) {
links = getLinkListTail();
@@ -590,7 +590,7 @@ private boolean oneRequest(boolean recordStats) {
links = getLinkList(id1, link_type);
endtime = System.nanoTime();
}
-
+
int count = ((links == null) ? 0 : links.length);
if (recordStats) {
stats.addStats(LinkBenchOp.RANGE_SIZE, count, false);
@@ -608,21 +608,21 @@ private boolean oneRequest(boolean recordStats) {
type = LinkBenchOp.UPDATE_NODE;
// Choose an id that has previously been created (but might have
// been since deleted
- long upId = chooseRequestID(DistributionType.NODE_UPDATES,
+ long upId = chooseRequestID(DistributionType.NODE_UPDATES,
lastNodeId);
// Generate new data randomly
Node newNode = createUpdateNode(upId);
-
+
starttime = System.nanoTime();
boolean changed = nodeStore.updateNode(dbid, newNode);
endtime = System.nanoTime();
lastNodeId = upId;
if (Level.TRACE.isGreaterOrEqual(debuglevel)) {
logger.trace("updateNode " + newNode + " changed=" + changed);
- }
+ }
} else if (r <= pc_deletenode) {
type = LinkBenchOp.DELETE_NODE;
- long idToDelete = chooseRequestID(DistributionType.NODE_DELETES,
+ long idToDelete = chooseRequestID(DistributionType.NODE_DELETES,
lastNodeId);
starttime = System.nanoTime();
boolean deleted = nodeStore.deleteNode(dbid, LinkStore.DEFAULT_NODE_TYPE,