Skip to content

Commit

Permalink
LPS-34467 Add properties to benchmark.properties.
Browse files Browse the repository at this point in the history
  • Loading branch information
tinatian authored and brianchandotcom committed Jun 6, 2013
1 parent 18d957c commit 73f67fa
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 7 deletions.
24 changes: 22 additions & 2 deletions benchmarks/benchmarks.properties
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,20 @@
#sample.sql.db.type=sybase

#
# Specify the number of asset category to generate.
# Specify the number of asset category per vocabulary to generate.
#
sample.sql.max.asset.category.count=0

#
# Specify the number of asset tag to generate.
#
sample.sql.max.asset.tag.count=0

#
# Specify the number of asset vocabulary to generate.
#
sample.sql.max.asset.vocabulary.count=0

#
# Specify the number of comments to generate per blogs entry.
#
Expand Down Expand Up @@ -162,4 +172,14 @@
#
# Specify whether the output should be merged into a single SQL file.
#
sample.sql.output.merge=true
sample.sql.output.merge=true

#
# Specify the number of asset category per asset entry to generate.
#
sample.sql.per.asset.entry.max.asset.category.count=2

#
# Specify the number of asset tag per asset entry to generate.
#
sample.sql.per.asset.entry.max.asset.tag.count=5
4 changes: 4 additions & 0 deletions benchmarks/build.xml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
<arg value="sample.sql.base.dir=${basedir}" />
<arg value="sample.sql.db.type=${sample.sql.db.type}" />
<arg value="sample.sql.max.asset.category.count=${sample.sql.max.asset.category.count}" />
<arg value="sample.sql.max.asset.tag.count=${sample.sql.max.asset.tag.count}" />
<arg value="sample.sql.max.asset.vocabulary.count=${sample.sql.max.asset.vocabulary.count}" />
<arg value="sample.sql.max.blogs.entry.comment.count=${sample.sql.max.blogs.entry.comment.count}" />
<arg value="sample.sql.max.blogs.entry.count=${sample.sql.max.blogs.entry.count}" />
<arg value="sample.sql.max.ddl.custom.field.count=${sample.sql.max.ddl.custom.field.count}" />
Expand All @@ -37,6 +39,8 @@
<arg value="sample.sql.optimize.buffer.size=${sample.sql.optimize.buffer.size}" />
<arg value="sample.sql.output.dir=${sample.sql.output.dir}" />
<arg value="sample.sql.output.merge=${sample.sql.output.merge}" />
<arg value="sample.sql.per.asset.entry.max.asset.category.count=${sample.sql.per.asset.entry.max.asset.category.count}" />
<arg value="sample.sql.per.asset.entry.max.asset.tag.count=${sample.sql.per.asset.entry.max.asset.tag.count}" />
</java>
</target>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -169,15 +169,23 @@
public class DataFactory {

public DataFactory(
String baseDir, int maxAssetCategoryCount, int maxBlogsEntryCount,
int maxDDLCustomFieldCount, int maxGroupsCount,
int maxJournalArticleCount, int maxJournalArticleSize,
int maxMBCategoryCount, int maxMBThreadCount, int maxMBMessageCount,
String baseDir, int maxAssetCategoryCount,
int maxAssetCategoryPerAssetEntryCount, int maxAssetTagCount,
int maxAssetTagPerAssetEntryCount, int maxAssetVocabularyCount,
int maxBlogsEntryCount, int maxDDLCustomFieldCount,
int maxGroupsCount, int maxJournalArticleCount,
int maxJournalArticleSize, int maxMBCategoryCount,
int maxMBThreadCount, int maxMBMessageCount,
int maxUserToGroupCount)
throws Exception {

_baseDir = baseDir;
_maxAssetCategoryCount = maxAssetCategoryCount;
_maxAssetCategoryPerAssetEntryCount =
maxAssetCategoryPerAssetEntryCount;
_maxAssetTagCount = maxAssetTagCount;
_maxAssetTagPerAssetEntryCount = maxAssetTagPerAssetEntryCount;
_maxAssetVocabularyCount = maxAssetVocabularyCount;
_maxBlogsEntryCount = maxBlogsEntryCount;
_maxDDLCustomFieldCount = maxDDLCustomFieldCount;
_maxGroupsCount = maxGroupsCount;
Expand Down Expand Up @@ -2192,6 +2200,10 @@ protected Date nextFutureDate() {
private Map<Long, SimpleCounter> _layoutCounters =
new HashMap<Long, SimpleCounter>();
private int _maxAssetCategoryCount;
private int _maxAssetCategoryPerAssetEntryCount;
private int _maxAssetTagCount;
private int _maxAssetTagPerAssetEntryCount;
private int _maxAssetVocabularyCount;
private int _maxBlogsEntryCount;
private int _maxDDLCustomFieldCount;
private int _maxDLFileEntrySize;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,10 @@ public SampleSQLBuilder(Map<String, String> arguments) throws Exception {
_dbType = arguments.get("sample.sql.db.type");
_maxAssetCategoryCount = GetterUtil.getInteger(
arguments.get("sample.sql.max.asset.category.count"));
_maxAssetTagCount = GetterUtil.getInteger(
arguments.get("sample.sql.max.asset.tag.count"));
_maxAssetVocabularyCount = GetterUtil.getInteger(
arguments.get("sample.sql.max.asset.vocabulary.count"));
_maxBlogsEntryCommentCount = GetterUtil.getInteger(
arguments.get("sample.sql.max.blogs.entry.comment.count"));
_maxBlogsEntryCount = GetterUtil.getInteger(
Expand Down Expand Up @@ -127,9 +131,16 @@ public SampleSQLBuilder(Map<String, String> arguments) throws Exception {
_outputDir = arguments.get("sample.sql.output.dir");
_outputMerge = GetterUtil.getBoolean(
arguments.get("sample.sql.output.merge"));
_maxAssetCategoryPerAssetEntry = GetterUtil.getInteger(
arguments.get(
"sample.sql.per.asset.entry.max.asset.category.count"));
_maxAssetTagPerAssetEntryCount = GetterUtil.getInteger(
arguments.get("sample.sql.per.asset.entry.max.asset.tag.count"));

_dataFactory = new DataFactory(
baseDir, _maxAssetCategoryCount, _maxBlogsEntryCount,
baseDir, _maxAssetCategoryCount, _maxAssetCategoryPerAssetEntry,
_maxAssetTagCount, _maxAssetTagPerAssetEntryCount,
_maxAssetVocabularyCount, _maxBlogsEntryCount,
_maxDDLCustomFieldCount, _maxGroupCount, _maxJournalArticleCount,
_maxJournalArticleSize, _maxMBCategoryCount, _maxMBThreadCount,
_maxMBMessageCount, _maxUserToGroupCount);
Expand Down Expand Up @@ -339,6 +350,8 @@ protected Map<String, Object> getContext() {
put(context, "counter", _dataFactory.getCounter());
put(context, "dataFactory", _dataFactory);
put(context, "maxAssetCategoryCount", _maxAssetCategoryCount);
put(context, "maxAssetTagCount", _maxAssetTagCount);
put(context, "maxAssetVocabularyCount", _maxAssetVocabularyCount);
put(context, "maxDLFileEntrySize", _maxDLFileEntrySize);
put(context, "maxBlogsEntryCommentCount", _maxBlogsEntryCommentCount);
put(context, "maxBlogsEntryCount", _maxBlogsEntryCount);
Expand Down Expand Up @@ -492,6 +505,10 @@ protected void writeToInsertSQLFile(String tableName, String sql)
private Map<String, Writer> _insertSQLWriters =
new ConcurrentHashMap<String, Writer>();
private int _maxAssetCategoryCount;
private int _maxAssetCategoryPerAssetEntry;
private int _maxAssetTagCount;
private int _maxAssetTagPerAssetEntryCount;
private int _maxAssetVocabularyCount;
private int _maxBlogsEntryCommentCount;
private int _maxBlogsEntryCount;
private int _maxDDLCustomFieldCount;
Expand Down
4 changes: 4 additions & 0 deletions sql/build-parent.xml
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,8 @@
<arg value="sample.sql.base.dir=${basedir}/../benchmarks" />
<arg value="sample.sql.db.type=hypersonic" />
<arg value="sample.sql.max.asset.category.count=1" />
<arg value="sample.sql.max.asset.tag.count=1" />
<arg value="sample.sql.max.asset.vocabulary.count=1" />
<arg value="sample.sql.max.blogs.entry.comment.count=1" />
<arg value="sample.sql.max.blogs.entry.count=1" />
<arg value="sample.sql.max.ddl.custom.field.count=1" />
Expand All @@ -198,6 +200,8 @@
<arg value="sample.sql.optimize.buffer.size=8192" />
<arg value="sample.sql.output.dir=${tstamp.value}" />
<arg value="sample.sql.output.merge=true" />
<arg value="sample.sql.per.asset.entry.max.asset.category.count=1" />
<arg value="sample.sql.per.asset.entry.max.asset.tag.count=1" />
</java>

<delete dir="${tstamp.value}" />
Expand Down

0 comments on commit 73f67fa

Please sign in to comment.