Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reference data set and generator #63

2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ __IMPORTANT NOTICE:__ The contents of this repository currectly reflect a __DRAF
1. [Confidence Level Joining](./usage-examples/confidence-level-joining.md)
1. [Delivery Interface](./usage-examples/delivery-interface.md)
1. [Build Avoidance](./usage-examples/build-avoidance.md)
1. Reference Data Sets
1. [Default](./usage-examples/reference-data-sets/default.md)
1. Customization
1. [Custom Events](./customization/custom-events.md)
1. [Custom Data](./customization/custom-data.md)
Expand Down
6 changes: 3 additions & 3 deletions examples/flows/confidence-level-joining/events.json
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
},
"data": {
"name": "Act1",
"category": "Test Activity",
"categories": ["Test Activity"],
"triggers": [
{
"type": "EIFFEL_EVENT"
Expand All @@ -138,7 +138,7 @@
},
"data": {
"name": "Act2",
"category": "Test Activity",
"categories": ["Test Activity"],
"triggers": [
{
"type": "EIFFEL_EVENT"
Expand Down Expand Up @@ -401,7 +401,7 @@
},
"data": {
"outcome": {
"conclusion": "SUCCESS"
"conclusion": "SUCCESSFUL"
}
},
"links": [
Expand Down
Binary file not shown.
566 changes: 566 additions & 0 deletions examples/reference-data-sets/default/generator.py

Large diffs are not rendered by default.

118 changes: 78 additions & 40 deletions examples/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,51 +5,89 @@
import fnmatch
from jsonschema import validate

def applySchema(schemaFileName):
print(" - Applying", schemaFileName, "to ...")
global schemas
schemas +=1
eventTypeName = schemaFileName[:-5]
eventTypeDirName = "examples/events/" + eventTypeName
try:
with open("schemas/" + schemaFileName, "r") as f:
schema = json.load(f)
def loadAllJsonObjects(dir):
objects = []
badFiles = []

for root, dirNames, fileNames in os.walk(dir):
for fileName in fnmatch.filter(fileNames, "*.json"):
try:
path = os.path.join(root, fileName)
with open(path, "r") as f:
loadedObject = json.load(f)
if(isinstance(loadedObject, list)):
for o in loadedObject:
objects.append((path, fileName, o))
else:
objects.append((path, fileName, loadedObject))
except Exception as e:
print(e)
badFiles.append(path)

return objects, badFiles

def loadSchemas():
schemaTuples, badSchemaFiles = loadAllJsonObjects("schemas")
schemas = {}
for path, fileName, o in schemaTuples:
schemas[fileName[:-5]] = o
return schemas, badSchemaFiles

for root, dirnames, filenames in os.walk(eventTypeDirName):
for exampleFileName in fnmatch.filter(filenames, "*.json"):
validateExample(schema, os.path.join(root, exampleFileName))
except Exception as e:
reportFailure(e)

def validateExample(schema, exampleFilePath):
print(" ... ", exampleFilePath)
global examples
examples +=1
exception = False
try:
with open(exampleFilePath, "r") as f:
example = json.load(f)
def loadExamples():
exampleTuples, badExampleFiles = loadAllJsonObjects("examples")
examples = []
for path, fileName, o in exampleTuples:
examples.append((path, o["meta"]["type"], o["meta"]["id"], o))
return examples, badExampleFiles

def validateExamples(examples, schemas):
failures = []
numberOfSuccessfulValidations = 0
unchecked = []

for path, type, id, json in examples:
if type in schemas:
try:
validate(json, schemas[type])
numberOfSuccessfulValidations += 1
except Exception as e:
failures.append((path, type, id, e))
else:
unchecked.append((path, type, id, json))

validate(example, schema)
print(" PASS")
except Exception as e:
reportFailure(e)
return failures, unchecked, numberOfSuccessfulValidations

def report(unchecked,failures,badSchemaFiles,badExampleFiles,numberOfSuccessfulValidations):
for path, type, id, o in unchecked:
print("WARNING: Missing schema for " + id + "(" + type + ") in " + path + ".")

for badSchemaFile in badSchemaFiles:
print("ERROR: Failed to load schema from file", badSchemaFile)

for badExampleFile in badExampleFiles:
print("ERROR: Failed to load example from file", badExampleFile)

for path, type, id, e in failures:
print("ERROR: Validation failed for " + id + "(" + type + ") in " + path + ": " + str(e))

def reportFailure(exception):
global failures
failures += 1
print(" FAIL:", type(exception).__name__)
print(" ", exception)
print("")
print("===SUMMARY===")
print("Bad schema files: ", len(badSchemaFiles))
print("Bad example files: ", len(badExampleFiles))
print("Successful validations: ", numberOfSuccessfulValidations)
print("Failed validations: ", len(failures))
print("Unchecked examples: ", len(unchecked))
print("=============")

schemas, badSchemaFiles = loadSchemas()
print("Loaded", len(schemas), "schemas.")

failures = 0
schemas = 0
examples = 0
examples, badExampleFiles = loadExamples()
print("Loaded", len(examples), "examples.")

for root, dirNames, fileNames in os.walk("schemas"):
for schemaFileName in fnmatch.filter(fileNames, "*.json"):
applySchema(schemaFileName)
failures, unchecked, numberOfSuccessfulValidations = validateExamples(examples, schemas)

print("Encountered", failures, "validation failures through application of", schemas, "schemas to", examples, "examples.")
report(unchecked, failures, badSchemaFiles, badExampleFiles, numberOfSuccessfulValidations)

if failures > 0:
if len(badSchemaFiles) > 0 or len(badExampleFiles) > 0 or len(failures) > 0:
sys.exit("Validation failed.")
1 change: 1 addition & 0 deletions usage-examples/reference-data-sets/default.gliffy

Large diffs are not rendered by default.

8 changes: 8 additions & 0 deletions usage-examples/reference-data-sets/default.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Default Reference Data Set
This is a description of the "Default" reference data set found [here](../../examples/reference-data-sets/default/events.zip).

The purpose of a reference data set is to serve as supporting documentation, but also to allow for functional and non-functional testing as well as benchmarking of implementations.

This particular data set is designed as a combination of three usage examples: [Confidence Level Joining](../confidence-level-joining.md), [Delivery Interface](../delivery-interface.md) and [Build Avoidance](../build-avoidance.md) and consists of multiple iterations through the represented continuous integration and delivery system, with certain random elements (e.g. sometimes tests fail, sometimes tests succeed). A single iteration is represented graphically below.
![alt text](./default.png "Single iteration of the Default Reference Data Set")

Binary file added usage-examples/reference-data-sets/default.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.