Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

LPS-27277 Source formatting

  • Loading branch information...
commit 9189c30122c0f4205a175ae8657c8ea59cf7a20b 1 parent fc9802c
@brianchandotcom brianchandotcom authored
View
63 benchmarks/benchmarks.properties
@@ -10,16 +10,6 @@
##
#
- # Specify the directory to output the generated SQL files.
- #
- sample.sql.output.dir=.
-
- #
- # Specify whether the output should be merged into a single SQL file.
- #
- sample.sql.output.merge=true
-
- #
# Specify the database type of the generated SQL files.
#
#sample.sql.db.type=db2
@@ -40,102 +30,115 @@
#
# Specify the number of comments to generate per blogs entry.
#
- sample.sql.blogs.entry.comment.count=10
+ sample.sql.max.blogs.entry.comment.count=10
#
# Specify the number of blogs entries to generate per community.
#
- sample.sql.blogs.entry.count=10
+ sample.sql.max.blogs.entry.count=10
#
# Specify the number of records to generate per dynamic data lists record
# set.
#
- sample.sql.ddl.record.count=10
+ sample.sql.max.ddl.record.count=10
#
# Specify the number of dynamic data lists record sets to generate per
# community.
#
- sample.sql.ddl.record.set.count=1
+ sample.sql.max.ddl.record.set.count=1
#
# Specify the number of file entries to generate per document library
# folder.
#
- sample.sql.dl.file.entry.count=10
+ sample.sql.max.dl.file.entry.count=10
#
# Specify the size of the generated document library file entries.
#
- sample.sql.dl.file.entry.size=10240
+ sample.sql.max.dl.file.entry.size=10240
#
# Specify the number of document library folders to generate per depth in a
# community.
#
- sample.sql.dl.folder.count=10
+ sample.sql.max.dl.folder.count=10
#
# Specify the depth of document library folders to generate per community.
#
- sample.sql.dl.folder.depth=3
+ sample.sql.max.dl.folder.depth=3
#
# Specify the number of sites to generate.
#
- sample.sql.group.count=10
+ sample.sql.max.group.count=10
#
# Specify the number of journal articles to generate per community.
#
- sample.sql.journal.article.count=6
+ sample.sql.max.journal.article.count=6
#
# Specify the size of the generated journal articles.
#
- sample.sql.journal.article.size=30
+ sample.sql.max.journal.article.size=30
#
# Specify the number of message boards categories to generate per community.
#
- sample.sql.mb.category.count=10
+ sample.sql.max.mb.category.count=10
#
# Specify the number of messages to generate per message boards thread.
#
- sample.sql.mb.message.count=10
+ sample.sql.max.mb.message.count=10
#
# Specify the number of threads to generate per message boards category.
#
- sample.sql.mb.thread.count=10
+ sample.sql.max.mb.thread.count=10
#
# Specify the number of users to generate per community.
#
- sample.sql.user.count=10
+ sample.sql.max.user.count=10
#
# All generated users are associated with the Guest community. Set this
# property to specify how many generated sites a user will be associated
# with.
#
- sample.sql.user.to.group.count=2
+ sample.sql.max.user.to.group.count=2
#
# Specify the number of wiki nodes to generate per community.
#
- sample.sql.wiki.node.count=10
+ sample.sql.max.wiki.node.count=10
#
# Specify the number of comments to generate per wiki page.
#
- sample.sql.wiki.page.comment.count=10
+ sample.sql.max.wiki.page.comment.count=10
#
# Specify the number of pages to generate per wiki node.
#
- sample.sql.wiki.page.count=10
+ sample.sql.max.wiki.page.count=10
+
+ #
+ # Specify the optimize buffer size.
+ #
+ sample.sql.optimize.buffer.size=8192
+
+ #
+ # Specify the directory to output the generated SQL files.
+ #
+ sample.sql.output.dir=.
- sample.sql.optimize.buffer.size=8192
+ #
+ # Specify whether the output should be merged into a single SQL file.
+ #
+ sample.sql.output.merge=true
View
42 benchmarks/build.xml
@@ -16,29 +16,29 @@
>
<arg value="-Dexternal-properties=com/liferay/portal/tools/dependencies/portal-tools.properties" />
<arg value="sample.sql.base.dir=${basedir}" />
- <arg value="sample.sql.output.dir=${sample.sql.output.dir}" />
- <arg value="sample.sql.output.merge=${sample.sql.output.merge}" />
<arg value="sample.sql.db.type=${sample.sql.db.type}" />
- <arg value="sample.sql.blogs.entry.comment.count=${sample.sql.blogs.entry.comment.count}" />
- <arg value="sample.sql.blogs.entry.count=${sample.sql.blogs.entry.count}" />
- <arg value="sample.sql.ddl.record.count=${sample.sql.ddl.record.count}" />
- <arg value="sample.sql.ddl.record.set.count=${sample.sql.ddl.record.set.count}" />
- <arg value="sample.sql.dl.file.entry.count=${sample.sql.dl.file.entry.count}" />
- <arg value="sample.sql.dl.file.entry.size=${sample.sql.dl.file.entry.size}" />
- <arg value="sample.sql.dl.folder.count=${sample.sql.dl.folder.count}" />
- <arg value="sample.sql.dl.folder.depth=${sample.sql.dl.folder.depth}" />
- <arg value="sample.sql.group.count=${sample.sql.group.count}" />
- <arg value="sample.sql.journal.article.count=${sample.sql.journal.article.count}" />
- <arg value="sample.sql.journal.article.size=${sample.sql.journal.article.size}" />
- <arg value="sample.sql.mb.category.count=${sample.sql.mb.category.count}" />
- <arg value="sample.sql.mb.message.count=${sample.sql.mb.message.count}" />
- <arg value="sample.sql.mb.thread.count=${sample.sql.mb.thread.count}" />
- <arg value="sample.sql.user.count=${sample.sql.user.count}" />
- <arg value="sample.sql.user.to.group.count=${sample.sql.user.to.group.count}" />
- <arg value="sample.sql.wiki.node.count=${sample.sql.wiki.node.count}" />
- <arg value="sample.sql.wiki.page.comment.count=${sample.sql.wiki.page.comment.count}" />
- <arg value="sample.sql.wiki.page.count=${sample.sql.wiki.page.count}" />
+ <arg value="sample.sql.max.blogs.entry.comment.count=${sample.sql.max.blogs.entry.comment.count}" />
+ <arg value="sample.sql.max.blogs.entry.count=${sample.sql.max.blogs.entry.count}" />
+ <arg value="sample.sql.max.ddl.record.count=${sample.sql.max.ddl.record.count}" />
+ <arg value="sample.sql.max.ddl.record.set.count=${sample.sql.max.ddl.record.set.count}" />
+ <arg value="sample.sql.max.dl.file.entry.count=${sample.sql.max.dl.file.entry.count}" />
+ <arg value="sample.sql.max.dl.file.entry.size=${sample.sql.max.dl.file.entry.size}" />
+ <arg value="sample.sql.max.dl.folder.count=${sample.sql.max.dl.folder.count}" />
+ <arg value="sample.sql.max.dl.folder.depth=${sample.sql.max.dl.folder.depth}" />
+ <arg value="sample.sql.max.group.count=${sample.sql.max.group.count}" />
+ <arg value="sample.sql.max.journal.article.count=${sample.sql.max.journal.article.count}" />
+ <arg value="sample.sql.max.journal.article.size=${sample.sql.max.journal.article.size}" />
+ <arg value="sample.sql.max.mb.category.count=${sample.sql.max.mb.category.count}" />
+ <arg value="sample.sql.max.mb.message.count=${sample.sql.max.mb.message.count}" />
+ <arg value="sample.sql.max.mb.thread.count=${sample.sql.max.mb.thread.count}" />
+ <arg value="sample.sql.max.user.count=${sample.sql.max.user.count}" />
+ <arg value="sample.sql.max.user.to.group.count=${sample.sql.max.user.to.group.count}" />
+ <arg value="sample.sql.max.wiki.node.count=${sample.sql.max.wiki.node.count}" />
+ <arg value="sample.sql.max.wiki.page.comment.count=${sample.sql.max.wiki.page.comment.count}" />
+ <arg value="sample.sql.max.wiki.page.count=${sample.sql.max.wiki.page.count}" />
<arg value="sample.sql.optimize.buffer.size=${sample.sql.optimize.buffer.size}" />
+ <arg value="sample.sql.output.dir=${sample.sql.output.dir}" />
+ <arg value="sample.sql.output.merge=${sample.sql.output.merge}" />
</java>
</target>
View
72 portal-impl/src/com/liferay/portal/tools/samplesqlbuilder/SampleSQLBuilder.java
@@ -80,77 +80,75 @@ public static void main(String[] args) {
InitUtil.initWithSpring();
String baseDir = arguments.get("sample.sql.base.dir");
- String outputDir = arguments.get("sample.sql.output.dir");
- boolean outputMerge = GetterUtil.getBoolean(
- arguments.get("sample.sql.output.merge"));
String dbType = arguments.get("sample.sql.db.type");
int maxBlogsEntryCommentCount = GetterUtil.getInteger(
- arguments.get("sample.sql.blogs.entry.comment.count"));
+ arguments.get("sample.sql.max.blogs.entry.comment.count"));
int maxBlogsEntryCount = GetterUtil.getInteger(
- arguments.get("sample.sql.blogs.entry.count"));
+ arguments.get("sample.sql.max.blogs.entry.count"));
int maxDDLRecordCount = GetterUtil.getInteger(
- arguments.get("sample.sql.ddl.record.count"));
+ arguments.get("sample.sql.max.ddl.record.count"));
int maxDDLRecordSetCount = GetterUtil.getInteger(
- arguments.get("sample.sql.ddl.record.set.count"));
+ arguments.get("sample.sql.max.ddl.record.set.count"));
int maxDLFileEntryCount = GetterUtil.getInteger(
- arguments.get("sample.sql.dl.file.entry.count"));
+ arguments.get("sample.sql.max.dl.file.entry.count"));
int maxDLFileEntrySize = GetterUtil.getInteger(
- arguments.get("sample.sql.dl.file.entry.size"));
+ arguments.get("sample.sql.max.dl.file.entry.size"));
int maxDLFolderCount = GetterUtil.getInteger(
- arguments.get("sample.sql.dl.folder.count"));
+ arguments.get("sample.sql.max.dl.folder.count"));
int maxDLFolderDepth = GetterUtil.getInteger(
- arguments.get("sample.sql.dl.folder.depth"));
+ arguments.get("sample.sql.max.dl.folder.depth"));
int maxGroupCount = GetterUtil.getInteger(
- arguments.get("sample.sql.group.count"));
+ arguments.get("sample.sql.max.group.count"));
int maxJournalArticleCount = GetterUtil.getInteger(
- arguments.get("sample.sql.journal.article.count"));
+ arguments.get("sample.sql.max.journal.article.count"));
int maxJournalArticleSize = GetterUtil.getInteger(
- arguments.get("sample.sql.journal.article.size"));
+ arguments.get("sample.sql.max.journal.article.size"));
int maxMBCategoryCount = GetterUtil.getInteger(
- arguments.get("sample.sql.mb.category.count"));
+ arguments.get("sample.sql.max.mb.category.count"));
int maxMBMessageCount = GetterUtil.getInteger(
- arguments.get("sample.sql.mb.message.count"));
+ arguments.get("sample.sql.max.mb.message.count"));
int maxMBThreadCount = GetterUtil.getInteger(
- arguments.get("sample.sql.mb.thread.count"));
+ arguments.get("sample.sql.max.mb.thread.count"));
int maxUserCount = GetterUtil.getInteger(
- arguments.get("sample.sql.user.count"));
+ arguments.get("sample.sql.max.user.count"));
int maxUserToGroupCount = GetterUtil.getInteger(
- arguments.get("sample.sql.user.to.group.count"));
+ arguments.get("sample.sql.max.user.to.group.count"));
int maxWikiNodeCount = GetterUtil.getInteger(
- arguments.get("sample.sql.wiki.node.count"));
+ arguments.get("sample.sql.max.wiki.node.count"));
int maxWikiPageCommentCount = GetterUtil.getInteger(
- arguments.get("sample.sql.wiki.page.comment.count"));
+ arguments.get("sample.sql.max.wiki.page.comment.count"));
int maxWikiPageCount = GetterUtil.getInteger(
- arguments.get("sample.sql.wiki.page.count"));
+ arguments.get("sample.sql.max.wiki.page.count"));
int optimizeBufferSize = GetterUtil.getInteger(
arguments.get("sample.sql.optimize.buffer.size"));
+ String outputDir = arguments.get("sample.sql.output.dir");
+ boolean outputMerge = GetterUtil.getBoolean(
+ arguments.get("sample.sql.output.merge"));
new SampleSQLBuilder(
- arguments, baseDir, outputDir, outputMerge, dbType,
- maxBlogsEntryCommentCount, maxBlogsEntryCount, maxDDLRecordCount,
- maxDDLRecordSetCount, maxDLFileEntryCount, maxDLFileEntrySize,
- maxDLFolderCount, maxDLFolderDepth, maxGroupCount,
- maxJournalArticleCount, maxJournalArticleSize, maxMBCategoryCount,
- maxMBMessageCount, maxMBThreadCount, maxUserCount,
- maxUserToGroupCount, maxWikiNodeCount, maxWikiPageCommentCount,
- maxWikiPageCount, optimizeBufferSize);
+ arguments, baseDir, dbType, maxBlogsEntryCommentCount,
+ maxBlogsEntryCount, maxDDLRecordCount, maxDDLRecordSetCount,
+ maxDLFileEntryCount, maxDLFileEntrySize, maxDLFolderCount,
+ maxDLFolderDepth, maxGroupCount, maxJournalArticleCount,
+ maxJournalArticleSize, maxMBCategoryCount, maxMBMessageCount,
+ maxMBThreadCount, maxUserCount, maxUserToGroupCount,
+ maxWikiNodeCount, maxWikiPageCommentCount, maxWikiPageCount,
+ optimizeBufferSize, outputDir, outputMerge);
}
public SampleSQLBuilder(
- Map<String, String> arguments, String baseDir, String outputDir,
- boolean outputMerge, String dbType, int maxBlogsEntryCommentCount,
- int maxBlogsEntryCount, int maxDDLRecordCount, int maxDDLRecordSetCount,
+ Map<String, String> arguments, String baseDir, String dbType,
+ int maxBlogsEntryCommentCount, int maxBlogsEntryCount,
+ int maxDDLRecordCount, int maxDDLRecordSetCount,
int maxDLFileEntryCount, int maxDLFileEntrySize, int maxDLFolderCount,
int maxDLFolderDepth, int maxGroupCount, int maxJournalArticleCount,
int maxJournalArticleSize, int maxMBCategoryCount,
int maxMBMessageCount, int maxMBThreadCount, int maxUserCount,
int maxUserToGroupCount, int maxWikiNodeCount,
int maxWikiPageCommentCount, int maxWikiPageCount,
- int optimizeBufferSize) {
+ int optimizeBufferSize, String outputDir, boolean outputMerge) {
try {
- _outputDir = outputDir;
- _outputMerge = outputMerge;
_dbType = dbType;
_maxBlogsEntryCommentCount = maxBlogsEntryCommentCount;
_maxBlogsEntryCount = maxBlogsEntryCount;
@@ -171,6 +169,8 @@ public SampleSQLBuilder(
_maxWikiPageCommentCount = maxWikiPageCommentCount;
_maxWikiPageCount = maxWikiPageCount;
_optimizeBufferSize = optimizeBufferSize;
+ _outputDir = outputDir;
+ _outputMerge = outputMerge;
int totalMThreadCount = maxMBCategoryCount * maxMBThreadCount;
int totalMBMessageCount = totalMThreadCount * maxMBMessageCount;
View
41 sql/build-parent.xml
@@ -158,28 +158,29 @@
<arg value="-Dexternal-properties=com/liferay/portal/tools/dependencies/portal-tools.properties" />
<arg value="sql.dir=${basedir}" />
<arg value="sample.sql.base.dir=${basedir}/../benchmarks" />
+ <arg value="sample.sql.db.type=hypersonic" />
+ <arg value="sample.sql.max.blogs.entry.comment.count=1" />
+ <arg value="sample.sql.max.blogs.entry.count=1" />
+ <arg value="sample.sql.max.ddl.record.count=1" />
+ <arg value="sample.sql.max.ddl.record.set.count=1" />
+ <arg value="sample.sql.max.dl.file.entry.count=1" />
+ <arg value="sample.sql.max.dl.file.entry.size=1" />
+ <arg value="sample.sql.max.dl.folder.count=1" />
+ <arg value="sample.sql.max.dl.folder.depth=1" />
+ <arg value="sample.sql.max.group.count=1" />
+ <arg value="sample.sql.max.journal.article.count=1" />
+ <arg value="sample.sql.max.journal.article.size=1" />
+ <arg value="sample.sql.max.mb.category.count=1" />
+ <arg value="sample.sql.max.mb.message.count=1" />
+ <arg value="sample.sql.max.mb.thread.count=1" />
+ <arg value="sample.sql.max.user.count=1" />
+ <arg value="sample.sql.max.user.to.group.count=1" />
+ <arg value="sample.sql.max.wiki.node.count=1" />
+ <arg value="sample.sql.max.wiki.page.comment.count=1" />
+ <arg value="sample.sql.max.wiki.page.count=1" />
+ <arg value="sample.sql.optimize.buffer.size=8192" />
<arg value="sample.sql.output.dir=${tstamp.value}" />
<arg value="sample.sql.output.merge=true" />
- <arg value="sample.sql.db.type=hypersonic" />
- <arg value="sample.sql.blogs.entry.comment.count=1" />
- <arg value="sample.sql.blogs.entry.count=1" />
- <arg value="sample.sql.ddl.record.count=1" />
- <arg value="sample.sql.ddl.record.set.count=1" />
- <arg value="sample.sql.dl.file.entry.count=1" />
- <arg value="sample.sql.dl.file.entry.size=1" />
- <arg value="sample.sql.dl.folder.count=1" />
- <arg value="sample.sql.dl.folder.depth=1" />
- <arg value="sample.sql.group.count=1" />
- <arg value="sample.sql.journal.article.count=${sample.sql.journal.article.count}" />
- <arg value="sample.sql.journal.article.size=${sample.sql.journal.article.size}" />
- <arg value="sample.sql.mb.category.count=1" />
- <arg value="sample.sql.mb.message.count=1" />
- <arg value="sample.sql.mb.thread.count=1" />
- <arg value="sample.sql.user.count=1" />
- <arg value="sample.sql.user.to.group.count=1" />
- <arg value="sample.sql.wiki.node.count=1" />
- <arg value="sample.sql.wiki.page.comment.count=1" />
- <arg value="sample.sql.wiki.page.count=1" />
</java>
<delete dir="${tstamp.value}" />
Please sign in to comment.
Something went wrong with that request. Please try again.