Skip to content

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also .

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also .
...
  • 3 commits
  • 2 files changed
  • 0 commit comments
  • 1 contributor
Commits on May 16, 2012
@ffissore ffissore "internal" is not the default data segment name d3f68a8
@ffissore ffissore - "internal" is not the default data segment name
- updated configuration parsing (handles versions)
0e7062a
@ffissore ffissore Merge pull request #38 from ffissore/master
default datasegment name and configuration parsing update
d51ae10
Showing with 69 additions and 15 deletions.
  1. +32 −14 lib/orientdb/connection/parser.js
  2. +37 −1 test/parser/configuration.js
View
46 lib/orientdb/connection/parser.js
@@ -628,8 +628,13 @@ var parseConfiguration = function(configString) {
return value;
};
- var phySegmentFromStream = function(config, values, index) {
+ var phySegmentFromStream = function(config, version, values, index) {
var fileTemplate = { };
+ if (version > 2) {
+ fileTemplate.location = read(values[index++]);
+ } else {
+ fileTemplate.location = null;
+ }
fileTemplate.maxSize = read(values[index++]);
fileTemplate.fileType = read(values[index++]);
fileTemplate.fileStartSize = read(values[index++]);
@@ -640,10 +645,21 @@ var parseConfiguration = function(configString) {
fileTemplate.infoFiles = [];
var size = parseInt(read(values[index++]));
for (var i = 0; i < size; i++) {
+ var fileName = read(values[index++]);
+
+ if (fileName.indexOf("$") === -1) {
+ // @COMPATIBILITY 0.9.25
+ var pos = fileName.indexOf("/databases");
+ if (pos > -1) {
+ fileName = "${ORIENTDB_HOME}" + fileName.substring(pos);
+ }
+ }
+
var infoFile = { };
- infoFile.fileName = read(values[index++]);
- infoFile.path = read(values[index++]);
+ infoFile.path = fileName;
infoFile.type = read(values[index++]);
+ infoFile.maxSize = read(values[index++]);
+ infoFile.incrementSize = fileTemplate.fileIncrementSize;
fileTemplate.infoFiles.push(infoFile);
}
@@ -652,7 +668,7 @@ var parseConfiguration = function(configString) {
return index;
};
- var clustersFromStream = function(config, values, index) {
+ var clustersFromStream = function(config, version, values, index) {
config.clusters = [];
var size = parseInt(read(values[index++]));
for (var i = 0; i < size; i++) {
@@ -664,32 +680,34 @@ var parseConfiguration = function(configString) {
var cluster = { };
cluster.clusterId = clusterId;
cluster.clusterName = read(values[index++]);
+ cluster.dataSegmentId = version >= 3 ? parseInt(read(values[index++])) : 0;
cluster.clusterType = read(values[index++]);
-
if (cluster.clusterType === "p") {
- index = phySegmentFromStream(cluster, values, index);
+ index = phySegmentFromStream(cluster, version, values, index);
+
cluster.holeFile = {};
cluster.holeFile.path = read(values[index++]);
cluster.holeFile.type = read(values[index++]);
cluster.holeFile.maxSize = read(values[index++]);
- } else if (cluster.clusterType === "l") {
- cluster.physicalClusterId = parseInt(read(values[index++]));
- cluster.map = read(values[index++]);
+ } else if (cluster.clusterType === "m") {
+ //nothing
+ } else {
+ throw new Error("Unknown cluster type: " + cluster.clusterType);
}
config.clusters.push(cluster);
}
return index;
};
- var dataSegmentsFromStream = function(config, values, index) {
+ var dataSegmentsFromStream = function(config, version, values, index) {
config.dataSegments = [];
var size = parseInt(read(values[index++]));
for (var i = 0; i < size; i++) {
var dataSegment = { };
dataSegment.dataId = parseInt(read(values[index++]));
dataSegment.dataName = read(values[index++]);
- index = phySegmentFromStream(dataSegment, values, index);
+ index = phySegmentFromStream(dataSegment, version, values, index);
dataSegment.holeFile = {};
dataSegment.holeFile.path = read(values[index++]);
dataSegment.holeFile.type = read(values[index++]);
@@ -727,10 +745,10 @@ var parseConfiguration = function(configString) {
config.dateFormat = read(values[index++]);
config.dateTimeFormat = read(values[index++]);
if (config.version > 1) {
- index = phySegmentFromStream(config, values, index);
+ index = phySegmentFromStream(config, config.version, values, index);
}
- index = clustersFromStream(config, values, index);
- index = dataSegmentsFromStream(config, values, index);
+ index = clustersFromStream(config, config.version, values, index);
+ index = dataSegmentsFromStream(config, config.version, values, index);
config.txSegment = { };
config.txSegment.path = read(values[index++]);
config.txSegment.type = read(values[index++]);
View
38 test/parser/configuration.js
@@ -34,4 +34,40 @@ configAsString = "2| |#0:1| |#0:2|it|IT|yyyy-MM-dd|yyyy-MM-dd HH:mm:ss|0|mmap|50
config = parser.parseConfiguration(configAsString);
assert.equal(6, config.clusters.length);
assert.equal(5, config.clusters[5].clusterId);
-assert.equal("${STORAGE_PATH}/internal.0.ocl", config.clusters[0].fileTemplate.infoFiles[0].fileName);
+assert.equal("${STORAGE_PATH}/internal.0.ocl", config.clusters[0].fileTemplate.infoFiles[0].path);
+
+configAsString = "3| |#2:0| |#0:1|it|IT|yyyy-MM-dd|yyyy-MM-dd HH:mm:ss| |0|mmap|500Kb|500Mb|50%|auto|0|0|0| |mmap|512mb|false|true|0|";
+config = parser.parseConfiguration(configAsString);
+
+assert.equal(3, config.version);
+assert.equal(null, config.name);
+assert.equal("#2:0", config.schemaRecordId);
+assert.equal(null, config.dictionaryRecordId);
+assert.equal("#0:1", config.indexMgrRecordId);
+assert.equal("it", config.localeLanguage);
+assert.equal("IT", config.localeCountry);
+assert.equal("yyyy-MM-dd", config.dateFormat);
+assert.equal("yyyy-MM-dd HH:mm:ss", config.dateTimeFormat);
+assert.equal("0", config.fileTemplate.maxSize);
+assert.equal("mmap", config.fileTemplate.fileType);
+assert.equal("500Kb", config.fileTemplate.fileStartSize);
+assert.equal("500Mb", config.fileTemplate.fileMaxSize);
+assert.equal("50%", config.fileTemplate.fileIncrementSize);
+assert.equal("auto", config.fileTemplate.defrag);
+assert.equal(0, config.fileTemplate.infoFiles);
+assert.equal(0, config.clusters.length);
+assert.equal(0, config.dataSegments.length);
+assert.equal(null, config.txSegment.path);
+assert.equal("mmap", config.txSegment.type);
+assert.equal("512mb", config.txSegment.maxSize);
+assert.equal(false, config.txSegment.synchRecord);
+assert.equal(true, config.txSegment.synchTx);
+assert.equal(0, config.properties.length);
+
+
+configAsString = "3| |#2:0| |#0:1|it|IT|yyyy-MM-dd|yyyy-MM-dd HH:mm:ss| |0|mmap|500Kb|500Mb|50%|auto|0|6|0|internal|0|p| |0|mmap|1Mb|500Mb|50%|auto|1|${STORAGE_PATH}/internal.0.ocl|mmap|500Mb|${STORAGE_PATH}/internal.och|mmap|500Mb|1|index|0|p| |0|mmap|1Mb|500Mb|50%|auto|1|${STORAGE_PATH}/index.0.ocl|mmap|500Mb|${STORAGE_PATH}/index.och|mmap|500Mb|2|default|0|p| |0|mmap|1Mb|500Mb|50%|auto|1|${STORAGE_PATH}/default.0.ocl|mmap|500Mb|${STORAGE_PATH}/default.och|mmap|500Mb|3|orole|0|p| |0|mmap|1Mb|500Mb|50%|auto|1|${STORAGE_PATH}/orole.0.ocl|mmap|500Mb|${STORAGE_PATH}/orole.och|mmap|500Mb|4|ouser|0|p| |0|mmap|1Mb|500Mb|50%|auto|1|${STORAGE_PATH}/ouser.0.ocl|mmap|500Mb|${STORAGE_PATH}/ouser.och|mmap|500Mb|5|orids|0|p| |0|mmap|1Mb|500Mb|50%|auto|1|${STORAGE_PATH}/orids.0.ocl|mmap|500Mb|${STORAGE_PATH}/orids.och|mmap|500Mb|1|0|default| |0|mmap|1Mb|500Mb|100%|auto|1|${STORAGE_PATH}/default.0.oda|mmap|500Mb|/home/federico/materiale/works_My/orientdb-graphed-1.0/databases/presentz/default.odh|mmap|0|${STORAGE_PATH}/txlog.otx|mmap|512mb|false|true|0|";
+config = parser.parseConfiguration(configAsString);
+assert.equal(3, config.version);
+assert.equal(6, config.clusters.length);
+assert.equal(5, config.clusters[5].clusterId);
+assert.equal("${STORAGE_PATH}/internal.0.ocl", config.clusters[0].fileTemplate.infoFiles[0].path);

No commit comments for this range

Something went wrong with that request. Please try again.