diff --git a/cloudpg/src/main/java/io/clouditor/graph/App.kt b/cloudpg/src/main/java/io/clouditor/graph/App.kt index 658ab9f..6dcb660 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/App.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/App.kt @@ -174,6 +174,7 @@ object App : Callable { .registerPass(GormDatabasePass()) .registerPass(PyMongoPass()) .registerPass(Psycopg2Pass()) + .registerPass(PythonFileWritePass()) .processAnnotations(true) if (labelsEnabled) { diff --git a/cloudpg/src/main/java/io/clouditor/graph/nodes/labels/PseudoIdentifier.kt b/cloudpg/src/main/java/io/clouditor/graph/nodes/labels/PseudoIdentifier.kt index 851507d..e0cde80 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/nodes/labels/PseudoIdentifier.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/nodes/labels/PseudoIdentifier.kt @@ -4,6 +4,7 @@ import de.fraunhofer.aisec.cpg.graph.Node open class PseudoIdentifier(labeledNode: Node) : DataLabel(labeledNode) { override fun areMergeable(l: Label): Boolean { - return l::class == PseudoIdentifier::class + return false + //return l::class == PseudoIdentifier::class } } diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/DatabaseOperationPass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/DatabaseOperationPass.kt index ee08ad1..23c5a00 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/passes/DatabaseOperationPass.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/DatabaseOperationPass.kt @@ -41,9 +41,10 @@ abstract class DatabaseOperationPass : Pass() { connect: DatabaseConnect, storage: List, calls: List, - app: Application? + app: Application?, + type: String ): DatabaseQuery { - val op = DatabaseQuery(modify, calls, storage, connect.to) + val op = DatabaseQuery(modify, type, calls, storage, connect.to) op.location = app?.location storage.forEach { diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/FileWritePass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/FileWritePass.kt new file mode 100644 index 0000000..2667231 --- /dev/null +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/FileWritePass.kt @@ -0,0 +1,25 @@ +package io.clouditor.graph.passes + +import de.fraunhofer.aisec.cpg.TranslationResult +import de.fraunhofer.aisec.cpg.graph.statements.expressions.CallExpression +import de.fraunhofer.aisec.cpg.passes.Pass +import io.clouditor.graph.Application +import io.clouditor.graph.FileWrite +import io.clouditor.graph.plusAssign + +abstract class FileWritePass: Pass() { + + protected fun createFileWrite( + t: TranslationResult, + call: CallExpression, + app: Application? + ): FileWrite { + // Create node + val fileWriteNode = FileWrite(call) + // Add to functionalities if necessary + app?.functionalities?.plusAssign(fileWriteNode) + // Add to translation result + t += fileWriteNode + return fileWriteNode + } +} \ No newline at end of file diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/GormDatabasePass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/GormDatabasePass.kt index c957832..8d78547 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/passes/GormDatabasePass.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/GormDatabasePass.kt @@ -13,6 +13,7 @@ import de.fraunhofer.aisec.cpg.processing.IVisitor import de.fraunhofer.aisec.cpg.processing.strategy.Strategy import io.clouditor.graph.* import io.clouditor.graph.nodes.getStorageOrCreate +import io.clouditor.graph.utils.DatabaseQueryType class GormDatabasePass : DatabaseOperationPass() { override fun accept(t: TranslationResult) { @@ -116,7 +117,16 @@ class GormDatabasePass : DatabaseOperationPass() { val op = app?.functionalities?.filterIsInstance()?.firstOrNull()?.let { - val op = createDatabaseQuery(result, false, it, mutableListOf(), calls, app) + val op = + createDatabaseQuery( + result, + false, + it, + mutableListOf(), + calls, + app, + DatabaseQueryType.READ.toString() + ) op.name = call.name // loop through the calls and set DFG edges @@ -144,7 +154,8 @@ class GormDatabasePass : DatabaseOperationPass() { it, mutableListOf(), mutableListOf(call), - app + app, + DatabaseQueryType.CREATE.toString() ) op.name = call.name @@ -165,7 +176,8 @@ class GormDatabasePass : DatabaseOperationPass() { it, mutableListOf(), mutableListOf(call), - app + app, + DatabaseQueryType.UPDATE.toString() ) op.name = call.name diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/HttpClientPass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/HttpClientPass.kt index 4b9a016..6c7e5b0 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/passes/HttpClientPass.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/HttpClientPass.kt @@ -16,7 +16,7 @@ abstract class HttpClientPass : Pass() { app: Application? ): HttpRequest { val endpoints = getEndpointsForUrl(t, url, method) - val request = HttpRequest(call, body, endpoints) + val request = HttpRequest(call, body, endpoints, url) request.name = method request.location = call.location diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/golang/GoFileWritePass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/golang/GoFileWritePass.kt new file mode 100644 index 0000000..d410f09 --- /dev/null +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/golang/GoFileWritePass.kt @@ -0,0 +1,35 @@ +package io.clouditor.graph.passes.python + +import de.fraunhofer.aisec.cpg.ExperimentalPython +import de.fraunhofer.aisec.cpg.TranslationResult +import de.fraunhofer.aisec.cpg.graph.Node +import de.fraunhofer.aisec.cpg.graph.statements.expressions.MemberCallExpression +import de.fraunhofer.aisec.cpg.processing.IVisitor +import de.fraunhofer.aisec.cpg.processing.strategy.Strategy +import io.clouditor.graph.findApplicationByTU +import io.clouditor.graph.passes.FileWritePass + +@ExperimentalPython +class GoFileWritePass: FileWritePass() { + override fun accept(t: TranslationResult) { + for (tu in t.translationUnits) { + tu.accept( + Strategy::AST_FORWARD, + object : IVisitor() { + // check all MemberCallExpressions + fun visit(r: MemberCallExpression) { + // look for writeFile() call of os library + if (r.name == "WriteFile" && r.base.name == "os") { + createFileWrite(t, r, t.findApplicationByTU(tu)) + } + } + } + ) + } + } + + override fun cleanup() { + // Nothing to do + } + +} \ No newline at end of file diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/python/Psycopg2Pass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/python/Psycopg2Pass.kt index 3b3efc9..cc5453e 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/passes/python/Psycopg2Pass.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/python/Psycopg2Pass.kt @@ -9,6 +9,7 @@ import de.fraunhofer.aisec.cpg.processing.strategy.Strategy import io.clouditor.graph.* import io.clouditor.graph.nodes.getStorageOrCreate import io.clouditor.graph.passes.DatabaseOperationPass +import io.clouditor.graph.utils.DatabaseQueryType class Psycopg2Pass : DatabaseOperationPass() { @@ -148,7 +149,16 @@ class Psycopg2Pass : DatabaseOperationPass() { val dbName = dbStorage.firstOrNull()?.name val storage = connect.to.map { it.getStorageOrCreate(table ?: "", dbName) } - val op = createDatabaseQuery(result, false, connect, storage, mutableListOf(call), app) + val op = + createDatabaseQuery( + result, + false, + connect, + storage, + mutableListOf(call), + app, + DatabaseQueryType.UNKNOWN.toString() + ) op.name = call.name // in the select case, the arguments are just arguments to the query itself and flow diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/python/PyMongoPass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/python/PyMongoPass.kt index f35c7e1..7b1de07 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/passes/python/PyMongoPass.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/python/PyMongoPass.kt @@ -10,6 +10,7 @@ import de.fraunhofer.aisec.cpg.processing.strategy.Strategy import io.clouditor.graph.* import io.clouditor.graph.nodes.getStorageOrCreate import io.clouditor.graph.passes.DatabaseOperationPass +import io.clouditor.graph.utils.DatabaseQueryType import java.net.URI class PyMongoPass : DatabaseOperationPass() { @@ -168,14 +169,68 @@ class PyMongoPass : DatabaseOperationPass() { var (connect, storage) = pair var op: DatabaseQuery? = null if (mce.name == "insert_one") { - op = createDatabaseQuery(t, true, connect, storage, listOf(mce), app) + op = + createDatabaseQuery( + t, + true, + connect, + storage, + listOf(mce), + app, + DatabaseQueryType.CREATE.toString() + ) // data flows from first argument to op mce.arguments.firstOrNull()?.addNextDFG(op) } if (mce.name == "find" || mce.name == "find_one") { - op = createDatabaseQuery(t, false, connect, storage, listOf(mce), app) + op = + createDatabaseQuery( + t, + false, + connect, + storage, + listOf(mce), + app, + DatabaseQueryType.READ.toString() + ) + // data flows from first argument to op + mce.arguments.firstOrNull()?.addNextDFG(op) + + // and towards the DFG target(s) of the call + mce.nextDFG.forEach { op!!.addNextDFG(it) } + } + + if (mce.name == "delete_one" || mce.name == "delete_many") { + op = + createDatabaseQuery( + t, + true, + connect, + storage, + listOf(mce), + app, + DatabaseQueryType.DELETE.toString() + ) + // data flows from first argument to op + mce.arguments.firstOrNull()?.addNextDFG(op) + + // and towards the DFG target(s) of the call + mce.nextDFG.forEach { op!!.addNextDFG(it) } + } + + if (mce.name == "update_one" || mce.name == "update_many") { + op = + createDatabaseQuery( + t, + true, + connect, + storage, + listOf(mce), + app, + DatabaseQueryType.UPDATE.toString() + ) // data flows from first argument to op mce.arguments.firstOrNull()?.addNextDFG(op) diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/python/PythonFileWritePass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/python/PythonFileWritePass.kt new file mode 100644 index 0000000..1dfdee9 --- /dev/null +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/python/PythonFileWritePass.kt @@ -0,0 +1,34 @@ +package io.clouditor.graph.passes.python + +import de.fraunhofer.aisec.cpg.ExperimentalPython +import de.fraunhofer.aisec.cpg.TranslationResult +import de.fraunhofer.aisec.cpg.graph.Node +import de.fraunhofer.aisec.cpg.graph.statements.expressions.MemberCallExpression +import de.fraunhofer.aisec.cpg.processing.IVisitor +import de.fraunhofer.aisec.cpg.processing.strategy.Strategy +import io.clouditor.graph.findApplicationByTU +import io.clouditor.graph.passes.FileWritePass + +@ExperimentalPython +class PythonFileWritePass: FileWritePass() { + override fun accept(t: TranslationResult) { + for (tu in t.translationUnits) { + tu.accept( + Strategy::AST_FORWARD, + object : IVisitor() { + fun visit(r: MemberCallExpression) { + // look for write() call + if (r.name == "write") { + createFileWrite(t, r, t.findApplicationByTU(tu)) + } + } + } + ) + } + } + + override fun cleanup() { + // Nothing to do + } + +} \ No newline at end of file diff --git a/cloudpg/src/main/java/io/clouditor/graph/passes/python/RequestsPass.kt b/cloudpg/src/main/java/io/clouditor/graph/passes/python/RequestsPass.kt index c77540f..4a4bea3 100644 --- a/cloudpg/src/main/java/io/clouditor/graph/passes/python/RequestsPass.kt +++ b/cloudpg/src/main/java/io/clouditor/graph/passes/python/RequestsPass.kt @@ -29,6 +29,10 @@ class RequestsPass : HttpClientPass() { handleClientRequest(tu, t, r, "GET") } else if (r.name == "post" && r.base.name == "requests") { handleClientRequest(tu, t, r, "POST") + } else if (r.name == "delete" && r.base.name == "requests") { + handleClientRequest(tu, t, r, "DELETE") + } else if (r.name == "put" && r.base.name == "requests") { + handleClientRequest(tu, t, r, "PUT") } } } diff --git a/cloudpg/src/main/java/io/clouditor/graph/utils/DatabaseQueryType.kt b/cloudpg/src/main/java/io/clouditor/graph/utils/DatabaseQueryType.kt new file mode 100644 index 0000000..b2114ad --- /dev/null +++ b/cloudpg/src/main/java/io/clouditor/graph/utils/DatabaseQueryType.kt @@ -0,0 +1,9 @@ +package io.clouditor.graph.utils + +enum class DatabaseQueryType { + CREATE, + READ, + UPDATE, + DELETE, + UNKNOWN +} diff --git a/cloudpg/src/test/java/io/clouditor/graph/GDPRComplianceChecks.kt b/cloudpg/src/test/java/io/clouditor/graph/GDPRComplianceChecks.kt new file mode 100644 index 0000000..c49b2a8 --- /dev/null +++ b/cloudpg/src/test/java/io/clouditor/graph/GDPRComplianceChecks.kt @@ -0,0 +1,697 @@ +package io.clouditor.graph + +import io.clouditor.graph.utils.DatabaseQueryType +import kotlin.io.path.Path +import kotlin.test.assertEquals +import kotlin.test.assertNotEquals +import org.junit.Test +import org.junit.jupiter.api.Tag +import org.neo4j.driver.internal.InternalPath +import kotlin.test.assertFalse +import kotlin.test.assertTrue + +@Tag("TestingLibrary") +open class GDPRComplianceChecks { + + @Test + fun checkComplianceToArticle16() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: 'POST'})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: 'CREATE'}) WHERE NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest {name: 'PUT'})-[:TO]->(he3:HttpEndpoint {method: 'PUT'})--()-[:DFG*]->(d2:DatabaseQuery) WHERE (d2.type='UPDATE') AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + + // create a list for all pseudoidentifiers with no update call connected to them via a data flow + val listOfAllPseudoIdentifierWithNoUpdateByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoUpdateByIdentity.contains(firstNode.id())) + listOfAllPseudoIdentifierWithNoUpdateByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 16, the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoUpdateByIdentity.size) + } + + @Test + fun checkComplianceToArticle16_validation() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: 'POST'})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: 'CREATE'}) WHERE NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest {name: 'PUT'})-[:TO]->(he3:HttpEndpoint {method: 'PUT'})--()-[:DFG*]->(d2:DatabaseQuery) WHERE (d2.type='UPDATE') AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + + // create a list for all pseudoidentifiers with no update call connected to them via a data + // flow + val listOfAllPseudoIdentifierWithNoUpdateByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoUpdateByIdentity.contains(firstNode.id())) + listOfAllPseudoIdentifierWithNoUpdateByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 16, the list should be empty + assertNotEquals(0, listOfAllPseudoIdentifierWithNoUpdateByIdentity.size) + } + + @Test + fun checkComplianceToArticle16_same_personal_data_different_location() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: 'POST'})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: 'CREATE'}) WHERE NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest {name: 'PUT'})-[:TO]->(he3:HttpEndpoint {method: 'PUT'})--()-[:DFG*]->(d2:DatabaseQuery) WHERE (d2.type='UPDATE') AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + + // create a list for all pseudoidentifiers with no update call connected to them via a data flow + val listOfAllPseudoIdentifierWithNoUpdateByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoUpdateByIdentity.contains(firstNode.id())) + listOfAllPseudoIdentifierWithNoUpdateByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 16, the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoUpdateByIdentity.size) + } + + @Test + fun checkComplianceToArticle17_paragraph_1() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: 'POST'})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: 'CREATE'}) WHERE NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest {name: 'DELETE'})-[:TO]->(he3:HttpEndpoint {method: 'DELETE'})--()-[:DFG*]->(d2:DatabaseQuery) WHERE (d2.type='DELETE') AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no delete call connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteByIdentity.contains(firstNode.id())) + listOfAllPseudoIdentifierWithNoDeleteByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 17(1), the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoDeleteByIdentity.size) + } + + @Test + fun checkComplianceToArticle17_paragraph_1_validation() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: 'POST'})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: 'CREATE'}) WHERE NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest {name: 'DELETE'})-[:TO]->(he3:HttpEndpoint {method: 'DELETE'})--()-[:DFG*]->(d2:DatabaseQuery) WHERE (d2.type='DELETE') AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no delete call connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteByIdentity.contains(firstNode.id())) + listOfAllPseudoIdentifierWithNoDeleteByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 17(1), the list should be empty + assertNotEquals(0, listOfAllPseudoIdentifierWithNoDeleteByIdentity.size) + } + + @Test + fun checkComplianceToArticle17_paragraph_1_same_personal_data_different_location() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: 'POST'})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: 'CREATE'}) WHERE NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest {name: 'DELETE'})-[:TO]->(he3:HttpEndpoint {method: 'DELETE'})--()-[:DFG*]->(d2:DatabaseQuery) WHERE (d2.type='DELETE') AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no delete call connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteByIdentity.contains(firstNode.id())) + listOfAllPseudoIdentifierWithNoDeleteByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 17(1), the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoDeleteByIdentity.size) + } + + @Test + fun checkComplianceToArticle17_paragraph_2() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python" + ), + listOf(Path(".")), + "MATCH (hr1:HttpRequest), path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1) WHERE NOT (hr1)-[:TO]-(:HttpEndpoint) AND NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr2:HttpRequest {name: 'DELETE'})-[:TO]-(he2:HttpEndpoint {method: 'DELETE'})--()-[:DFG*]->(hr3:HttpRequest) WHERE (hr3.name='DELETE') AND (hr3.url = hr1.url) AND NOT (hr3)-[:TO]-(:HttpEndpoint) } RETURN path1" + ) + // create a list for all pseudoidentifiers, which are communicated to extern with no delete call to extern connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 17(2), the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.size) + } + + @Test + fun checkComplianceToArticle17_paragraph_2_validation() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation" + ), + listOf(Path(".")), + "MATCH (hr1:HttpRequest), path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1) WHERE NOT (hr1)-[:TO]-(:HttpEndpoint) AND NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr2:HttpRequest {name: 'DELETE'})-[:TO]-(he2:HttpEndpoint {method: 'DELETE'})--()-[:DFG*]->(hr3:HttpRequest) WHERE (hr3.name='DELETE') AND (hr3.url = hr1.url) AND NOT (hr3)-[:TO]-(:HttpEndpoint) } RETURN path1" + ) + // create a list for all pseudoidentifiers, which are communicated to extern with no delete call to extern connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 17(2), the list should be empty + assertNotEquals(0, listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.size) + } + + @Test + fun checkComplianceToArticle17_paragraph_2_same_personal_data_different_location() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location" + ), + listOf(Path(".")), + "MATCH (hr1:HttpRequest), path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1) WHERE NOT (hr1)-[:TO]-(:HttpEndpoint) AND NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr2:HttpRequest {name: 'DELETE'})-[:TO]-(he2:HttpEndpoint {method: 'DELETE'})--()-[:DFG*]->(hr3:HttpRequest) WHERE (hr3.name='DELETE') AND (hr3.url = hr1.url) AND NOT (hr3)-[:TO]-(:HttpEndpoint) } RETURN path1" + ) + // create a list for all pseudoidentifiers, which are communicated to extern with no delete call to extern connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity = mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.add(firstNode.id()) + } + } + // if the code is compliant to article 17(2), the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoDeleteToExternByIdentity.size) + } + + @Test + fun checkComplianceToArticle19() { + val result_data_flows = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest)-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(hr2:HttpRequest) WHERE NOT (hr2)-[:TO]->(:HttpEndpoint) AND NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest)-[:TO]->(he3:HttpEndpoint)--()-[:DFG*]->(hr4:HttpRequest) WHERE NOT (hr4)-[:TO]->(:HttpEndpoint) AND ((hr4.name='DELETE') OR (hr4.name='PUT')) AND (hr4.url = hr2.url) } RETURN path1" + ) + // create a list for all pseudoidentifiers, which are communicated to extern with no delete or update call to extern connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity = mutableListOf() + // iterate over all paths and add to the list + result_data_flows.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.add( + firstNode.id() + ) + } + } + // if the code is compliant to article 19, the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.size) + + val result_data_storage = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest)-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]-(hr2:HttpRequest) WHERE NOT (hr2)-[:TO]-(:HttpEndpoint) WITH COLLECT(DISTINCT hr2.url) as externalDataRecipients MATCH path2=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->()<-[:DFG*]-(l2:Literal) WHERE ALL(recipient IN externalDataRecipients WHERE l2.value CONTAINS recipient) RETURN path2" + ) + // iterate over all found paths and check if the first node is a FileWrite + result_data_storage.forEach { + val path = it.get("path2") as Array<*> + // the first node is the literal, which contains the name of the personal data recipient + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + // if the first node is a FileWrite => A call expression that writes a literal containing information of the data recipients could be found => the code is compliant to article 19 + assertTrue(firstNode.labels().contains("FileWrite")) + } + } + + @Test + fun checkComplianceToArticle19_validation() { + val result_data_flows = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest)-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(hr2:HttpRequest) WHERE NOT (hr2)-[:TO]->(:HttpEndpoint) AND NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest)-[:TO]->(he3:HttpEndpoint)--()-[:DFG*]->(hr4:HttpRequest) WHERE NOT (hr4)-[:TO]->(:HttpEndpoint) AND ((hr4.name='DELETE') OR (hr4.name='PUT')) AND (hr4.url = hr2.url) } RETURN path1" + ) + // create a list for all pseudoidentifiers, which are communicated to extern with no delete + // or update call to extern connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity = mutableListOf() + // iterate over all paths and add to the list + result_data_flows.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.add( + firstNode.id() + ) + } + } + // check if the code is not compliant to article 19 + assertNotEquals(0, listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.size) + + val result_data_storage = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest)-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]-(hr2:HttpRequest) WHERE NOT (hr2)-[:TO]-(:HttpEndpoint) WITH COLLECT(DISTINCT hr2.url) as externalDataRecipients MATCH path2=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->()<-[:DFG*]-(l2:Literal) WHERE ALL(recipient IN externalDataRecipients WHERE l2.value CONTAINS recipient) RETURN path2" + ) + // iterate over all found paths and check if the first node is a FileWrite + result_data_storage.forEach { + val path = it.get("path2") as Array<*> + // the first node is the literal, which contains the name of the personal data recipient + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + // if the first node is a FileWrite => A call expression that writes a literal containing information of the data recipients could be found => the code is compliant to article 19 + assertFalse(firstNode.labels().contains("FileWrite")) + } + } + + @Test + fun checkComplianceToArticle19_same_personal_data_different_location() { + val result_data_flows = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest)-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(hr2:HttpRequest) WHERE NOT (hr2)-[:TO]->(:HttpEndpoint) AND NOT EXISTS { MATCH path2=(ps1)--()-[:DFG*]->(hr3:HttpRequest)-[:TO]->(he3:HttpEndpoint)--()-[:DFG*]->(hr4:HttpRequest) WHERE NOT (hr4)-[:TO]->(:HttpEndpoint) AND ((hr4.name='DELETE') OR (hr4.name='PUT')) AND (hr4.url = hr2.url) } RETURN path1" + ) + // create a list for all pseudoidentifiers, which are communicated to extern with no delete or update call to extern connected to them via a data flow + val listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity = mutableListOf() + // iterate over all paths and add to the list + result_data_flows.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.add( + firstNode.id() + ) + } + } + // if the code is compliant to article 19, the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoDeleteOrUpdateToExternByIdentity.size) + + val result_data_storage = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python" + ), + listOf(Path(".")), + "MATCH path1=(ps1:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest)-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]-(hr2:HttpRequest) WHERE NOT (hr2)-[:TO]-(:HttpEndpoint) WITH COLLECT(DISTINCT hr2.url) as externalDataRecipients MATCH path2=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->()<-[:DFG*]-(l2:Literal) WHERE ALL(recipient IN externalDataRecipients WHERE l2.value CONTAINS recipient) RETURN path2" + ) + // iterate over all found paths and check if the first node is a FileWrite + result_data_storage.forEach { + val path = it.get("path2") as Array<*> + // the first node is the literal, which contains the name of the personal data recipient + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + // if the first node is a FileWrite => A call expression that writes a literal containing information of the data recipients could be found => the code is compliant to article 19 + assertTrue(firstNode.labels().contains("FileWrite")) + } + } + + @Test + fun checkComplianceToArticle20_paragraph_1() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)-[:DFG*]->(d1:DatabaseQuery {type:\"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint {method: \"GET\"})-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->({name: \"HttpStatus.OK\"}), path3=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->(:Node)<-[:DFG*]-(hr2) WHERE (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + val path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.add( + firstNode.id() + ) + } + } + // if the code is compliant to article 20(1), the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.size) + } + + @Test + fun checkComplianceToArticle20_paragraph_1_validation() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)-[:DFG*]->(d1:DatabaseQuery {type:\"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint {method: \"GET\"})-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->({name: \"HttpStatus.OK\"}), path3=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->(:Node)<-[:DFG*]-(hr2) WHERE (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + val path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.add( + firstNode.id() + ) + } + } + // if the code is compliant to article 20(1), the list should be empty + assertNotEquals(0, listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.size) + } + + @Test + fun checkComplianceToArticle20_paragraph_1_same_personal_data_different_location() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)-[:DFG*]->(d1:DatabaseQuery {type:\"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint {method: \"GET\"})-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->({name: \"HttpStatus.OK\"}), path3=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->(:Node)<-[:DFG*]-(hr2) WHERE (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + val path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.add( + firstNode.id() + ) + } + } + // if the code is compliant to article 20(1), the list should be empty + assertEquals(0, listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.size) + } + + @Test + fun checkComplianceToArticle20_paragraph_1_no_machine_readable_format() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)-[:DFG*]->(d1:DatabaseQuery {type:\"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint {method: \"GET\"})-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->({name: \"HttpStatus.OK\"}), path3=(:FileWrite)-[:CALLS]->(m:MemberCallExpression)-[:ARGUMENTS]->(:Node)<-[:DFG*]-(hr2) WHERE (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + val path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.contains( + firstNode.id() + ) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.add( + firstNode.id() + ) + } + } + // if the code is compliant to article 20(1), the list should be empty => In this case we except the list is not empty, because no machine-readadble format is used + assertNotEquals(0, listOfAllPseudoIdentifierWithNoCompliantDataPortabilityByIdentity.size) + } + + @Test + fun checkComplianceToArticle20_paragraph_2() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: \"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint)--()-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->(hr3:HttpRequest {name: \"PUT\"}) WHERE NOT (hr3)-[:TO]-(:HttpEndpoint) AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability (to external + // service) + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity + .contains(firstNode.id()) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity + .add(firstNode.id()) + } + } + // if the code is compliant to article 20(2), the list should be empty + assertEquals( + 0, + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity.size + ) + } + + @Test + fun checkComplianceToArticle20_paragraph_2_validation() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: \"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint)--()-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->(hr3:HttpRequest {name: \"PUT\"}) WHERE NOT (hr3)-[:TO]-(:HttpEndpoint) AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability (to external + // service) + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity + .contains(firstNode.id()) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity + .add(firstNode.id()) + } + } + // if the code is compliant to article 20(2), the list should be empty + assertNotEquals( + 0, + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity.size + ) + } + + @Test + fun checkComplianceToArticle20_paragraph_2_same_personal_data_different_location() { + val result = + executePPGAndQuery( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location" + ), + listOf(Path(".")), + "MATCH path1=(psi:PseudoIdentifier)--()-[:DFG*]->(hr1:HttpRequest {name: \"POST\"})-[:TO]->(he1:HttpEndpoint)--()-[:DFG*]->(d1:DatabaseQuery {type: \"CREATE\"}) WHERE NOT EXISTS { MATCH path2=(psi)--()-[:DFG*]->(hr2:HttpRequest {name: \"GET\"})-[:TO]->(he2:HttpEndpoint)--()-[:DFG*]->(d2:DatabaseQuery {type:\"READ\"})-[:DFG*]->(hr3:HttpRequest {name: \"PUT\"}) WHERE NOT (hr3)-[:TO]-(:HttpEndpoint) AND (d1)-[:STORAGE]->(:DatabaseStorage)<-[:STORAGE]-(d2) } RETURN path1" + ) + // create a list for all pseudoidentifiers with no compliant data portability (to external + // service) + val listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity = + mutableListOf() + // iterate over all paths and add to the list + result.forEach { + var path = it.get("path1") as Array<*> + + // the first node is the pseudoidentifier because of the query + val firstNode = (path.first() as InternalPath.SelfContainedSegment).start() + if (firstNode.labels().contains("PseudoIdentifier")) { + // add the pseudoidentifier to the list if it is not already in it + if (!listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity + .contains(firstNode.id()) + ) + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity + .add(firstNode.id()) + } + } + // if the code is compliant to article 20(2), the list should be empty + assertEquals( + 0, + listOfAllPseudoIdentifierWithNoCompliantDataPortabilityToExternalServiceByIdentity.size + ) + } +} diff --git a/cloudpg/src/test/java/io/clouditor/graph/GDPRExtensionPerformanceTest.kt b/cloudpg/src/test/java/io/clouditor/graph/GDPRExtensionPerformanceTest.kt new file mode 100644 index 0000000..3f94c63 --- /dev/null +++ b/cloudpg/src/test/java/io/clouditor/graph/GDPRExtensionPerformanceTest.kt @@ -0,0 +1,80 @@ +package io.clouditor.graph + +import java.util.concurrent.TimeUnit +import kotlin.io.path.* +import kotlinx.benchmark.Scope +import kotlinx.benchmark.readFile +import org.junit.Test +import org.openjdk.jmh.annotations.* +import kotlin.system.measureTimeMillis + +open class GDPRExtensionPerformanceTest { + + @Test + open fun testScalability() { + // create a list of times + val times = mutableListOf() + + // Warmup + executePPG( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python" + ), + listOf(Path(".")) + ) + + // measure time needed for execution of PPG (20 times) + for (i in 1..20) { + // measure time + val timeForExecution = measureTimeMillis { + executePPG( + Path( + System.getProperty("user.dir") + + "/../ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python" + ), + listOf(Path(".")) + ) + } + println("Iteration $i: time for execution of PPG: $timeForExecution ms") + times.add(timeForExecution) + } + println("--------------------") + // calculate average time + var averageTime = 0L + for (time in times) { + averageTime += time + } + averageTime /= times.size + // print average time + println("Average time for execution of PPG: $averageTime ms") + + // calculate standard deviation + var standardDeviation = 0L + for (time in times) { + standardDeviation += (time - averageTime) * (time - averageTime) + } + standardDeviation /= times.size + standardDeviation = Math.sqrt(standardDeviation.toDouble()).toLong() + println("Standard deviation for execution of PPG: $standardDeviation ms") + + // calculate maximum time + var maximumTime = 0L + for (time in times) { + if (time > maximumTime) { + maximumTime = time + } + } + println("Maximum time for execution of PPG: $maximumTime ms") + + // calculate minimum time + var minimumTime = Long.MAX_VALUE + for (time in times) { + if (time < minimumTime) { + minimumTime = time + } + } + println("Minimum time for execution of PPG: $minimumTime ms") + } + +} \ No newline at end of file diff --git a/owl2java/resources/urn_webprotege_ontology_e4316a28-d966-4499-bd93-6be721055117.owx b/owl2java/resources/urn_webprotege_ontology_e4316a28-d966-4499-bd93-6be721055117.owx index 2a9e00e..b2d84f0 100644 --- a/owl2java/resources/urn_webprotege_ontology_e4316a28-d966-4499-bd93-6be721055117.owx +++ b/owl2java/resources/urn_webprotege_ontology_e4316a28-d966-4499-bd93-6be721055117.owx @@ -6,17 +6,11 @@ xmlns:xsd="http://www.w3.org/2001/XMLSchema#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" ontologyIRI="urn:webprotege:ontology:e4316a28-d966-4499-bd93-6be721055117"> - - - - - - @@ -311,6 +305,9 @@ + + + @@ -324,46 +321,46 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -417,109 +414,109 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -534,145 +531,145 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -706,42 +703,42 @@ - + - + - + - + - + - + xsd:java.util.List<de.fraunhofer.aisec.cpg.graph.declarations.TranslationUnitDeclaration> @@ -756,14 +753,14 @@ - + - + @@ -793,7 +790,7 @@ - + @@ -815,14 +812,14 @@ - + - + @@ -852,28 +849,28 @@ - + - + - + xsd:java.util.Map<String, String> - + @@ -888,7 +885,7 @@ - + @@ -903,7 +900,7 @@ - + @@ -918,21 +915,21 @@ - + - + - + @@ -947,7 +944,7 @@ - + @@ -966,14 +963,14 @@ - + - + @@ -991,10 +988,17 @@ - + + + + + + + + @@ -1006,7 +1010,7 @@ - + @@ -1021,7 +1025,7 @@ - + @@ -1058,7 +1062,7 @@ - + @@ -1073,42 +1077,42 @@ - + - + - + - + - + - + xsd:de.fraunhofer.aisec.cpg.graph.declarations.FunctionDeclaration @@ -1119,10 +1123,17 @@ - + + + + + + + + @@ -1133,7 +1144,7 @@ - + xsd:de.fraunhofer.aisec.cpg.graph.statements.expressions.CallExpression @@ -1144,21 +1155,21 @@ - + - + - + @@ -1169,7 +1180,7 @@ - + @@ -1180,21 +1191,21 @@ - + - + - + @@ -1205,7 +1216,7 @@ - + @@ -1237,7 +1248,7 @@ - + @@ -1248,7 +1259,7 @@ - + @@ -1271,14 +1282,14 @@ - + - + @@ -1293,21 +1304,21 @@ - + - + - + @@ -1318,28 +1329,28 @@ - + - + - + - + @@ -1350,21 +1361,21 @@ - + - + xsd:de.fraunhofer.aisec.cpg.graph.statements.expressions.CallExpression - + xsd:de.fraunhofer.aisec.cpg.graph.statements.expressions.Expression @@ -1379,14 +1390,14 @@ - + - + @@ -1404,7 +1415,7 @@ - + @@ -1415,7 +1426,7 @@ - + @@ -1429,7 +1440,7 @@ - + @@ -1462,14 +1473,14 @@ - + - + @@ -1491,28 +1502,28 @@ - + - + - + xsd:java.util.ArrayList<String> - + xsd:java.util.ArrayList<Short> @@ -1535,7 +1546,7 @@ - + @@ -1557,21 +1568,21 @@ - + - + - + xsd:de.fraunhofer.aisec.cpg.graph.Node @@ -1582,7 +1593,7 @@ - + @@ -1597,7 +1608,7 @@ - + @@ -1612,7 +1623,7 @@ - + @@ -1641,14 +1652,14 @@ - + - + @@ -1659,7 +1670,7 @@ - + @@ -1678,7 +1689,7 @@ - + @@ -1696,7 +1707,7 @@ - + @@ -1707,7 +1718,7 @@ - + @@ -1718,28 +1729,28 @@ - + - + - + - + @@ -1754,35 +1765,35 @@ - + - + - + - + - + @@ -1801,7 +1812,7 @@ - + @@ -1809,425 +1820,436 @@ + + + + + + + + + xsd:de.fraunhofer.aisec.cpg.graph.statements.expressions.CallExpression + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - + + + - - + + - - + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - + + - - - + + + - - + + - - + + - - + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - + + listNamespacedPod - - + + ips += clusterIP ips += externalIP ips += loadBalancerIP name = metadata.name - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -2947,492 +2969,492 @@ name = metadata.name - aws:Account + http://graph.clouditor.io/individuals/aws/Account AWS Account - aws:Aurora + http://graph.clouditor.io/individuals/aws/Aurora AWS Aurora - aws:CloudTrail + http://graph.clouditor.io/individuals/aws/CloudTrail AWS CloudTrail - aws:DynamoDB + http://graph.clouditor.io/individuals/aws/DynamoDB AWS DynamoDB - aws:EC2 + http://graph.clouditor.io/individuals/aws/EC2 AWS EC2 - aws:EC2Instance + http://graph.clouditor.io/individuals/aws/EC2Instance AWS EC2 Instance - aws:EC2LoadBalancer + http://graph.clouditor.io/individuals/aws/EC2LoadBalancer AWS EC2 LoadBalancer - aws:EC2NetworkInterface + http://graph.clouditor.io/individuals/aws/EC2NetworkInterface AWS EC2 Network Interface - aws:EC2Subnet + http://graph.clouditor.io/individuals/aws/EC2Subnet AWS EC2 Subnet - aws:EC2VPC + http://graph.clouditor.io/individuals/aws/EC2VPC AWS EC2 VPC - aws:IAM + http://graph.clouditor.io/individuals/aws/IAM AWS IAM - aws:IAMUser + http://graph.clouditor.io/individuals/aws/IAMUser AWS IAM User - aws:IoTCore + http://graph.clouditor.io/individuals/aws/IoTCore AWS IoT Core - aws:IoTDeviceManagement + http://graph.clouditor.io/individuals/aws/IoTDeviceManagement AWS IoT Device Management - aws:Lambda + http://graph.clouditor.io/individuals/aws/Lambda AWS Lambda - aws:RDS + http://graph.clouditor.io/individuals/aws/RDS AWS RDS - aws:S3 + http://graph.clouditor.io/individuals/aws/S3 AWS S3 - aws:S3Bucket + http://graph.clouditor.io/individuals/aws/S3Bucket AWS S3 Bucket - aws:S3Bucket + http://graph.clouditor.io/individuals/aws/S3Bucket AWS::S3::Bucket - aws:Volume + http://graph.clouditor.io/individuals/aws/Volume Volume - azure:ActivityLog + http://graph.clouditor.io/individuals/azure/ActivityLog Azure Activity Log - azure:Azure + http://graph.clouditor.io/individuals/azure/Azure Azure - azure:CosmosDB + http://graph.clouditor.io/individuals/azure/CosmosDB Azure CosmosDB - azure:CosmosDBAccount + http://graph.clouditor.io/individuals/azure/CosmosDBAccount Azure CosmosDB Account - azure:DeviceProvisioning + http://graph.clouditor.io/individuals/azure/DeviceProvisioning Azure Device Provisioning - azure:Disk + http://graph.clouditor.io/individuals/azure/Disk Azure Disk - azure:Functions + http://graph.clouditor.io/individuals/azure/Functions Azure Functions - azure:IoTHub + http://graph.clouditor.io/individuals/azure/IoTHub Azure IoT Hub - azure:SQLDB + http://graph.clouditor.io/individuals/azure/SQLDB Azure SQLDB - azure:SQLDBDatabase + http://graph.clouditor.io/individuals/azure/SQLDBDatabase Azure SQLDB Database - azure:StorageAccount + http://graph.clouditor.io/individuals/azure/StorageAccount Azure StorageAccount - azure:StorageAccount + http://graph.clouditor.io/individuals/azure/StorageAccount Microsoft.Storage/storageAccounts - azure:StorageAccounts + http://graph.clouditor.io/individuals/azure/StorageAccounts Azure StorageAccounts - azure:Subscription + http://graph.clouditor.io/individuals/azure/Subscription Azure Subscription - azure:VirtualMachines + http://graph.clouditor.io/individuals/azure/VirtualMachines Azure VirtualMachines - azure:VirtualMachinesVM + http://graph.clouditor.io/individuals/azure/VirtualMachinesVM Azure VirtualMachines VM - docker:Image + http://graph.clouditor.io/individuals/docker/Image Docker Image - k8s:Container + http://graph.clouditor.io/individuals/k8s/Container Kubernetes Container - k8s:Ingress + http://graph.clouditor.io/individuals/k8s/Ingress Kubernetes Ingress - k8s:Kubernetes + http://graph.clouditor.io/individuals/k8s/Kubernetes Kubernetes - k8s:Namespace + http://graph.clouditor.io/individuals/k8s/Namespace Kubernetes Namespace - k8s:Node + http://graph.clouditor.io/individuals/k8s/Node Kubernetes Node - k8s:Pod + http://graph.clouditor.io/individuals/k8s/Pod Kubernetes Pod - k8s:Service + http://graph.clouditor.io/individuals/k8s/Service Kubernetes Service - k8s:Volume + http://graph.clouditor.io/individuals/k8s/Volume Kubernetes Volume - library:Jersey + http://graph.clouditor.io/individuals/libraries/Jersey Jersey - library:SpringBoot + http://graph.clouditor.io/individuals/libraries/SpringBoot SpringBoot - library:SpringBootRESTController + http://graph.clouditor.io/individuals/libraries/SpringBootRESTController SpringBoot - REST Controller - library:SpringBootRequestMapping + http://graph.clouditor.io/individuals/libraries/SpringBootRequestMapping SpringBoot - Request Mapping - prop:activated + http://graph.clouditor.io/properties/activated activated - prop:algorithm + http://graph.clouditor.io/properties/algorithm algorithm - prop:apiListFunction + http://graph.clouditor.io/properties/apiListFunction apiListFunction - prop:argument + http://graph.clouditor.io/properties/argument argument - prop:backend + http://graph.clouditor.io/properties/backend backend - prop:call + http://graph.clouditor.io/properties/call call - prop:collectionOf + http://graph.clouditor.io/properties/collectionOf collectionOf - prop:deployedOn + http://graph.clouditor.io/properties/deployedOn deployedOn - prop:enabled + http://graph.clouditor.io/properties/enabled enabled - prop:enforced + http://graph.clouditor.io/properties/enforced enforced - prop:field + http://graph.clouditor.io/properties/field field - prop:from + http://graph.clouditor.io/properties/from from - prop:handler + http://graph.clouditor.io/properties/handler handler - prop:has + http://graph.clouditor.io/properties/has has - prop:hasMultiple + http://graph.clouditor.io/properties/hasMultiple hasMultiple - prop:impact + http://graph.clouditor.io/properties/impact impact - prop:implements + http://graph.clouditor.io/properties/implements implements - prop:inbound + http://graph.clouditor.io/properties/inbound inbound - prop:ips + http://graph.clouditor.io/properties/ips ips - prop:keyManagement + http://graph.clouditor.io/properties/keyManagement keymanagement - prop:keyManager + http://graph.clouditor.io/properties/keyManager keyManager - prop:keyUrl + http://graph.clouditor.io/properties/keyUrl keyUrl - prop:labels + http://graph.clouditor.io/properties/labels labels - prop:managementUrl + http://graph.clouditor.io/properties/managementUrl managementUrl - prop:method + http://graph.clouditor.io/properties/method method - prop:modify + http://graph.clouditor.io/properties/modify modify - prop:offers + http://graph.clouditor.io/properties/offers offers - prop:parent + http://graph.clouditor.io/properties/parent parent - prop:path + http://graph.clouditor.io/properties/path path - prop:policy + http://graph.clouditor.io/properties/policy policy - prop:ports + http://graph.clouditor.io/properties/ports ports - prop:programmingLanguage + http://graph.clouditor.io/properties/programmingLanguage programmingLanguage - prop:propertyMapping + http://graph.clouditor.io/properties/propertyMapping propertyMapping - prop:proxyTarget + http://graph.clouditor.io/properties/proxyTarget proxyTarget - prop:public + http://graph.clouditor.io/properties/public public - prop:region + http://graph.clouditor.io/properties/region region - prop:resourceOf + http://graph.clouditor.io/properties/resourceOf resourceOf - prop:restrictedPorts + http://graph.clouditor.io/properties/restrictedPorts restrictedPorts - prop:runsOn + http://graph.clouditor.io/properties/runsOn runsOn - prop:serves + http://graph.clouditor.io/properties/serves serves - prop:serviceOf + http://graph.clouditor.io/properties/serviceOf serviceOf - prop:source + http://graph.clouditor.io/properties/source source - prop:storage + http://graph.clouditor.io/properties/storage storage - prop:tlsVersion + http://graph.clouditor.io/properties/tlsVersion tlsVersion - prop:to + http://graph.clouditor.io/properties/to to - prop:translationUnits + http://graph.clouditor.io/properties/translationUnits translationUnits - prop:type + http://graph.clouditor.io/properties/type type - prop:url + http://graph.clouditor.io/properties/url url - prop:value + http://graph.clouditor.io/properties/value value @@ -3445,6 +3467,11 @@ name = metadata.name xsd:de.fraunhofer.aisec.cpg.graph.Node xsd:de.fraunhofer.aisec.cpg.graph.Node + + + #FileCreate + FileWrite + diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/README.md b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/README.md new file mode 100644 index 0000000..756b13e --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 19 - Notification Obligation +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database and communicates parts of the personal data to a third party (external advertising server). The client offers a function for the deletion and rectification of his personal data. The server peforms these requests and notifies the external advertising server about the deletion and rectification of the personal data. The client also offers a function to retrieve information about the data recipients. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 19. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/client.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/client.py new file mode 100755 index 0000000..2130a1b --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/client.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +import requests + +def delete_data(personal_data): + url = 'test-online-notepad.com/data' + requests.delete(url, json = personal_data) + +def rectify_data(personal_data): + url = 'test-online-notepad.com/data' + requests.put(url, json = personal_data) + +def get_information_about_data_recipients(personal_data): + data_recipients_information = "receiver of your personal data: ext-ad-server.com/data (external advertising server)\nIt is used for the following purposes: advertising" + # create file containing the information + data_recipients_server = requests.get("test-online-notepad.com/data_recipients", params = {"auth_token": personal_data["auth_token"]}) + f = open("data_recipients_information.txt", "w") + f.write(data_recipients_server) + f.close() + +def send_data_to_server(personal_data): + url = 'test-online-notepad.com/data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"], + "auth_token": "1234567890" + } + send_data_to_server(personal_data) + rectify_data(personal_data) + get_information_about_data_recipients(personal_data) + delete_data(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/config.yml b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/config.yml new file mode 100644 index 0000000..7180571 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/config.yml @@ -0,0 +1,13 @@ +services: + - type: server + name: server + host: test-online-notepad.com + - type: client + name: client + - type: external-advertising-server + name: external-advertising-server + host: ext-ad-server.com + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/server.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/server.py new file mode 100755 index 0000000..5ab712a --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python/server.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +data_recipients_information = "receiver of your personal data: ext-ad-server.com/data (external advertising server)\nIt is used for the following purposes: advertising" + +@app.route("/data", methods=['DELETE']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + + user_db_collection.delete_one({"username": data['username']}) + # inform external advertising server about the deletion + requests.delete("ext1-ad-server.com/data", json = data) + return "OK", 200 + +@app.route("/data", methods=['PUT']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + url = "ext-ad-server.com/data" + user_db_collection.update_one({"username": data['username']}, {"$set": {"notes": data['notes']}}) + # inform external advertising server about the rectification + requests.put(url, json = data) + return "OK", 200 + +@app.route("/data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + url = "ext-ad-server.com/data" + # send notes to external advertising server + requests.post("ext-ad-server.com/data", json = data['notes']) + return "OK", 200 + +@app.route("/data_recipients", methods=['GET']) +def parse_data(): + return data_recipients_information, 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/README.md b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/README.md new file mode 100644 index 0000000..6fcac06 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 19 - Notification Obligation - Same Personal Data, Different Location +- Test case description: A user is registered in the "client_signup" page and his personal data is sent to a server. The server processes the data, saves it in a Mongo database and communicates parts of the personal data to a third party (external advertising server). On another page ("client_edit") the user can request deletion and rectification of his personal data, which was initially stored via signup. The server performs these requests and notifies the external advertising server about the deletion and rectification of the personal data. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 19. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/client_edit.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/client_edit.py new file mode 100755 index 0000000..0da6ebe --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/client_edit.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 + +import requests + +def rectify_data(personal_data): + url = 'test-online-notepad.com/data' + personal_data["name"] = "new name" + requests.put(url, json = personal_data) + +def get_information_about_data_recipients(personal_data): + data_recipients_information = "receiver of your personal data: ext-ad-server.com/data (external advertising server)\nIt is used for the following purposes: advertising" + # create file containing the information + data_recipients_server = requests.get("test-online-notepad.com/data_recipients", params = {"auth_token": personal_data["auth_token"]}) + f = open("data_recipients_information.txt", "w") + f.write(data_recipients_server) + f.close() + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"], + "auth_token": "1234567890" + } + rectify_data(personal_data) + get_information_about_data_recipients(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/client_signup.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/client_signup.py new file mode 100755 index 0000000..98f1399 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/client_signup.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import requests + +def send_data_to_server(personal_data): + url = 'test-online-notepad.com/data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"], + "auth_token": "1234567890" + } + send_data_to_server(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/config.yml b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/config.yml new file mode 100644 index 0000000..7180571 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/config.yml @@ -0,0 +1,13 @@ +services: + - type: server + name: server + host: test-online-notepad.com + - type: client + name: client + - type: external-advertising-server + name: external-advertising-server + host: ext-ad-server.com + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/server.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/server.py new file mode 100755 index 0000000..5ab712a --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_same_personal_data_different_location/server.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +data_recipients_information = "receiver of your personal data: ext-ad-server.com/data (external advertising server)\nIt is used for the following purposes: advertising" + +@app.route("/data", methods=['DELETE']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + + user_db_collection.delete_one({"username": data['username']}) + # inform external advertising server about the deletion + requests.delete("ext1-ad-server.com/data", json = data) + return "OK", 200 + +@app.route("/data", methods=['PUT']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + url = "ext-ad-server.com/data" + user_db_collection.update_one({"username": data['username']}, {"$set": {"notes": data['notes']}}) + # inform external advertising server about the rectification + requests.put(url, json = data) + return "OK", 200 + +@app.route("/data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + url = "ext-ad-server.com/data" + # send notes to external advertising server + requests.post("ext-ad-server.com/data", json = data['notes']) + return "OK", 200 + +@app.route("/data_recipients", methods=['GET']) +def parse_data(): + return data_recipients_information, 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/README.md b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/README.md new file mode 100644 index 0000000..a87d64e --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/README.md @@ -0,0 +1,5 @@ +# Test Case: Article 19 Validation - Notification Obligation +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database and communicates parts of the personal data to a third party (external advertising server). The client offers a function for the deletion and rectification of his personal data. The server peforms these requests and does not notify the external advertising server about the deletion and rectification of the personal data. The client also offers a function to retrieve information about the data recipients but does not hand out information to the user. +- Expected outcome: + - The server is not informing the external advertising server about the deletion and rectification of the personal data is detected. + - The client does not hand out information about the data recipients to the user is detected. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/client.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/client.py new file mode 100755 index 0000000..42d01d7 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/client.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 + +import requests + +def delete_data(personal_data): + url = 'test-online-notepad.com/data' + requests.delete(url, json = personal_data) + +def rectify_data(personal_data): + url = 'test-online-notepad.com/data' + requests.put(url, json = personal_data) + +def get_information_about_data_recipients(): + # VALIDATION: no correct information is given + data_recipients_information = "receiver of your personal data: test-online-notepad.com/data (external advertising server)\nIt is used for the following purposes: advertising" + # create file containing the information + f = open("data_recipients_information.txt", "w") + f.write(data_recipients_information) + f.close() + +def send_data_to_server(personal_data): + url = 'test-online-notepad.com/data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"] + } + send_data_to_server(personal_data) + rectify_data(personal_data) + get_information_about_data_recipients() + delete_data(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/config.yml b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/config.yml new file mode 100644 index 0000000..7180571 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/config.yml @@ -0,0 +1,13 @@ +services: + - type: server + name: server + host: test-online-notepad.com + - type: client + name: client + - type: external-advertising-server + name: external-advertising-server + host: ext-ad-server.com + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/server.py b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/server.py new file mode 100755 index 0000000..9ba979e --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/NotificationObligation/Python_validation/server.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['DELETE']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + + user_db_collection.delete_one({"username": data['username']}) + # VALIDATION: no external advertising server is informed about the deletion + return "OK", 200 + +@app.route("/data", methods=['PUT']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + user_db_collection.update_one({"username": data['username']}, {"$set": {"notes": data['notes']}}) + # VALIDATION: no external advertising server is informed about the rectification + return "OK", 200 + +@app.route("/data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + url = "ext-ad-server.com/data" + # send notes to external advertising server + requests.post(url, json = data['notes']) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/README.md new file mode 100644 index 0000000..b152d2b --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 20 - Right to Data Portability +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database. The client offers a function for the retrieval of his personal data in a machine-readable format. Also a function for the transfer of the personal data to another data controller is integrated in the client. The server performs these requests and transfers the personal data to the client or to another data controller in a machine-readable format. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 20. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/client.py new file mode 100755 index 0000000..81d51b8 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/client.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +import requests +import os + +def get_personal_data_in_machine_readable_format(personal_data): + url = 'test-online-notepad.com/data' + # get the data from the server + personal_data_received = requests.get(url, json = personal_data) + f = open("personal_data.json", "w") + f.write(personal_data_received) + f.close() + +def transfer_personal_data_to_another_service(personal_data): + url = 'test-online-notepad.com/transfer' + data = { + "receiver_url": "other-test-online-notepad.com/data", + "personal_data": personal_data + } + requests.get(url, json = data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data_of_client = { + "username": "testuser", + "name": "", + "notes": "" + } + store_personal_data_on_server(personal_data_of_client) + get_personal_data_in_machine_readable_format(personal_data_of_client) + transfer_personal_data_to_another_service(personal_data_of_client) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/server.py new file mode 100755 index 0000000..1754c26 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python/server.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['GET']) +def get_data_in_csv_format(): + req = request.json + data = { + "username": req['username'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['username']}) + # send the data to the client + return user_data, 200 + +@app.route("/transfer", methods=['GET']) +def transfer_data_to_another_service(): + req = request.json + data = { + "receiver_url": req['receiver_url'], + "personal_data": req['personal_data'] + } + if user_db_collection.find( { "username": data['personal_data']['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['personal_data']['username']}) + response = requests.put(data['receiver_url'], json = user_data) + if response.status_code == 201: + return "OK", 200 + else: + return "Internal Server Error", 500 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/README.md new file mode 100644 index 0000000..ab5e7ed --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 20 - Right to Data Portability - No Machine Readable Format +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database. The client offers a function for the retrieval of his personal data. A file is created containing in the personal data, but the created file is not in a machine-readable format. +- Expected outcome: + - A data flow is detected which does not fulfill the code properties of GDPR article 20. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/client.py new file mode 100755 index 0000000..cc3f7aa --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/client.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +import requests +import os + +def get_personal_data_in_machine_readable_format(personal_data): + url = 'test-online-notepad.com/data' + # get the data from the server + personal_data_received = requests.get(url, json = personal_data) + f = open("personal_data.txt", "w") + f.write(personal_data_received) + f.close() + +def transfer_personal_data_to_another_service(personal_data): + url = 'test-online-notepad.com/transfer' + data = { + "receiver_url": "other-test-online-notepad.com/data", + "personal_data": personal_data + } + requests.get(url, json = data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data_of_client = { + "username": "testuser", + "name": "", + "notes": "" + } + store_personal_data_on_server(personal_data_of_client) + get_personal_data_in_machine_readable_format(personal_data_of_client) + transfer_personal_data_to_another_service(personal_data_of_client) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/server.py new file mode 100755 index 0000000..1754c26 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_no_machine_readable_format/server.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['GET']) +def get_data_in_csv_format(): + req = request.json + data = { + "username": req['username'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['username']}) + # send the data to the client + return user_data, 200 + +@app.route("/transfer", methods=['GET']) +def transfer_data_to_another_service(): + req = request.json + data = { + "receiver_url": req['receiver_url'], + "personal_data": req['personal_data'] + } + if user_db_collection.find( { "username": data['personal_data']['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['personal_data']['username']}) + response = requests.put(data['receiver_url'], json = user_data) + if response.status_code == 201: + return "OK", 200 + else: + return "Internal Server Error", 500 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/README.md new file mode 100644 index 0000000..6b7d235 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 20 - Right to Data Portability - Same Personal Data, Different Location +- Test case description: A user is registered in the "client_signup" page and his personal data is sent to a server. The server processes the data, saves it in a Mongo database. On another page ("client_edit") the user can request retrieval of his stored personal data (via signup) in a machine-readable format, as well as the transfer of his personal data to another data controller. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 20. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/client_edit.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/client_edit.py new file mode 100755 index 0000000..86ac90f --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/client_edit.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 + +import requests +import os + +def get_personal_data_in_machine_readable_format(personal_data): + url = 'test-online-notepad.com/data' + # get the data from the server + personal_data_received = requests.get(url, json = personal_data) + f = open("personal_data.json", "w") + f.write(personal_data_received) + f.close() + +def transfer_personal_data_to_another_service(personal_data): + url = 'test-online-notepad.com/transfer' + data = { + "receiver_url": "other-test-online-notepad.com/data", + "personal_data": personal_data + } + requests.get(url, json = data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data_of_client = { + "username": "testuser", + "name": "", + "notes": "" + } + get_personal_data_in_machine_readable_format(personal_data_of_client) + transfer_personal_data_to_another_service(personal_data_of_client) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/client_signup.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/client_signup.py new file mode 100755 index 0000000..e89e2bc --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/client_signup.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python3 + +import requests +import os + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data_of_client = { + "username": "testuser", + "name": "", + "notes": "" + } + store_personal_data_on_server(personal_data_of_client) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/server.py new file mode 100755 index 0000000..1754c26 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_same_personal_data_different_location/server.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['GET']) +def get_data_in_csv_format(): + req = request.json + data = { + "username": req['username'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['username']}) + # send the data to the client + return user_data, 200 + +@app.route("/transfer", methods=['GET']) +def transfer_data_to_another_service(): + req = request.json + data = { + "receiver_url": req['receiver_url'], + "personal_data": req['personal_data'] + } + if user_db_collection.find( { "username": data['personal_data']['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['personal_data']['username']}) + response = requests.put(data['receiver_url'], json = user_data) + if response.status_code == 201: + return "OK", 200 + else: + return "Internal Server Error", 500 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/README.md new file mode 100644 index 0000000..8baafcd --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 20 Validation - Right to Data Portability +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database. The client offers a function for the retrieval of his personal data in a machine-readable format, but does not store the personal data. Also a function for the transfer of the personal data to another data controller is integrated in the client. The server does not transfer the personal data to the client or to another data controller in a machine-readable format. +- Expected outcome: + - The server does not transfer the personal data to the client or to another data controller in a machine-readable format is detected. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/client.py new file mode 100755 index 0000000..2e12fd2 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/client.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +import requests +import os + +def get_personal_data_in_machine_readable_format(personal_data): + url = 'test-online-notepad.com/data' + # get the data from the server + personal_data_received = requests.get(url, json = personal_data) + f = open("personal_data.json", "w") + # VALIDATION: The personal data in machine-readable format is not stored on the client + f.close() + +def transfer_personal_data_to_another_service(personal_data): + url = 'test-online-notepad.com/transfer' + data = { + "receiver_url": "other-test-online-notepad.com/data", + "personal_data": personal_data + } + requests.get(url, json = data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data_of_client = { + "username": "testuser", + "name": "", + "notes": "" + } + store_personal_data_on_server(personal_data_of_client) + get_personal_data_in_machine_readable_format(personal_data_of_client) + transfer_personal_data_to_another_service(personal_data_of_client) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/server.py new file mode 100755 index 0000000..492688a --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToDataPortability/Python_validation/server.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['GET']) +def get_data_in_csv_format(): + req = request.json + data = { + "username": req['username'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['username']}) + # send the data to the client + return user_data, 200 + +@app.route("/transfer", methods=['GET']) +def transfer_data_to_another_service(): + req = request.json + data = { + "receiver_url": req['receiver_url'], + "personal_data": req['personal_data'] + } + if user_db_collection.find( { "username": data['personal_data']['username'] } ).count() > 0: + return "Conflict", 409 + else: + # get the data from the database (mongodb) + user_data = user_db_collection.find_one({"username": data['personal_data']['username']}) + # VALIDATION: Personal data is not transferred to a third party + return "OK", 200 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/README.md new file mode 100644 index 0000000..eff557f --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 17 - Right to Erasure +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database and sends it to third parties. The client offers a function for the deletion of his personal data. The server performs this request, deletes the personal data and informs other data recipients about the deletion request. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 17. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/client.py new file mode 100755 index 0000000..8f8b829 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/client.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +import requests + +def delete_own_data(personal_data): + url = 'test-online-notepad.com/data' + requests.delete(url, json = personal_data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"] + } + store_personal_data_on_server(personal_data) + delete_own_data(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/config.yml new file mode 100644 index 0000000..8ddc973 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/config.yml @@ -0,0 +1,11 @@ +services: + - type: server + name: server + host: test-online-notepad.com + - type: client + name: client + - type: db + name: mongo + storages: + - userdata + - otherdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/server.py new file mode 100755 index 0000000..de7e141 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python/server.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['DELETE']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + user_db_collection.delete_one({"username": data['username']}) + # inform external advertising server about the deletion + requests.delete("ext-ad-server.com/data", json = data) + return "Created", 201 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + # send data to external advertising server + url = 'ext-ad-server.com/data' + requests.put(url, json = data) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/README.md new file mode 100644 index 0000000..ef570d3 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 17 - Right to Erasure - Same Personal Data, Different Location +- Test case description: A user is registered in the "client_signup" page and his personal data is sent to a server. The server processes the data, saves it in a Mongo database and sends parts of it to third parties. On another page ("client_edit") the user can request deletion of his personal data, which was initially stored via signup. The server performs this request, deletes the personal data and informs other data recipients about the deletion request. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 17. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/client_edit.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/client_edit.py new file mode 100755 index 0000000..355d97e --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/client_edit.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import requests + +def delete_own_data(personal_data): + url = 'test-online-notepad.com/data' + requests.delete(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"] + } + delete_own_data(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/client_signup.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/client_signup.py new file mode 100755 index 0000000..d5e3bc1 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/client_signup.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import requests + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "name": "firstname lastname", + "notes": ["note1", "note2", "note3"] + } + store_personal_data_on_server(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/config.yml new file mode 100644 index 0000000..8ddc973 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/config.yml @@ -0,0 +1,11 @@ +services: + - type: server + name: server + host: test-online-notepad.com + - type: client + name: client + - type: db + name: mongo + storages: + - userdata + - otherdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/server.py new file mode 100755 index 0000000..de7e141 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_same_personal_data_different_location/server.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['DELETE']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + user_db_collection.delete_one({"username": data['username']}) + # inform external advertising server about the deletion + requests.delete("ext-ad-server.com/data", json = data) + return "Created", 201 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + # send data to external advertising server + url = 'ext-ad-server.com/data' + requests.put(url, json = data) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/README.md new file mode 100644 index 0000000..46a35c4 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 17 Validation - Right to Erasure +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database and communicates it to third parties. The client offers a function for the deletion of his personal data. The server does not delete the personal data and does not inform other data recipients about the deletion request. +- Expected outcome: + - The server does not delete the personal data and does not inform other data recipients about the deletion request is detected. \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/client.py new file mode 100755 index 0000000..8c6829a --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/client.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 + +import requests + +def delete_own_data(personal_data): + url = 'test-online-notepad.com/data' + requests.delete(url, json = personal_data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "notes": ["note1", "note2", "note3"] + } + store_personal_data_on_server(personal_data) + delete_own_data(personal_data) + diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/config.yml new file mode 100644 index 0000000..30685fc --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/config.yml @@ -0,0 +1,10 @@ +services: + - type: server + name: server + host: test-online-notepad.com + - type: client + name: client + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/server.py new file mode 100755 index 0000000..fb7ad19 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToErasure/Python_validation/server.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +import requests + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['DELETE']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # VALIDATION: no external advertising server is informed about the deletion and no deletion is performed + return "OK", 200 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "notes": req['notes'] + } + if user_db_collection.find( { "username": data['username'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + # send data to external advertising server + url = 'test-online-notepad.com/data' + requests.put(url, json = data) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/README.md new file mode 100644 index 0000000..2be3513 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 16 - Right to Rectification +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database. The client offers a function for the rectification of his personal data. The server performs this request and rectifies the personal data and stores the updated data in the database. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 16. diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/client.py new file mode 100755 index 0000000..385769b --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/client.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 + +import requests + +def rectify(personal_data): + url = 'test-online-notepad.com/data' + + personal_data["name"] = "new name" + requests.put(url, json = personal_data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "name": "firstname lastname", + "notes": ["note1", "note2", "note3"] + } + store_personal_data_on_server(personal_data) + rectify(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/server.py new file mode 100755 index 0000000..44b7cf0 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python/server.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['PUT']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "name": req['name'], + "notes": req['notes'] + } + if user_db_collection.find( { "name": data['name'] } ).count() > 0: + return "Conflict", 409 + else: + user_db_collection.update_one({"name": data['name']}) + return "Created", 201 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "name": req['name'], + "notes": req['notes'] + } + if user_db_collection.find( { "name": data['name'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/README.md new file mode 100644 index 0000000..69fcbf2 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 16 - Right to Rectification - Same Personal Data, Different Location +- Test case description: A user is registered in the "client_signup" page and his personal data is sent to a server. The server processes the data, saves it in a Mongo database. On another page ("client_edit") the user can request rectification of his personal data, which was initially stored via signup. The server performs this request and rectifies the personal data and stores the updated data in the database. +- Expected outcome: + - No data flow is detected which does not fulfill the code properties of GDPR article 16. diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/client_edit.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/client_edit.py new file mode 100755 index 0000000..9e451e5 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/client_edit.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import requests + +def rectify(personal_data): + url = 'test-online-notepad.com/data' + personal_data["name"] = "new name" + requests.put(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data_1 = { + "username": "testuser", + "name": "firstname lastname" + } + rectify(personal_data_1) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/client_signup.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/client_signup.py new file mode 100755 index 0000000..d5e3bc1 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/client_signup.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import requests + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "name": "firstname lastname", + "notes": ["note1", "note2", "note3"] + } + store_personal_data_on_server(personal_data) diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/server.py new file mode 100755 index 0000000..44b7cf0 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_same_personal_data_different_location/server.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['PUT']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "name": req['name'], + "notes": req['notes'] + } + if user_db_collection.find( { "name": data['name'] } ).count() > 0: + return "Conflict", 409 + else: + user_db_collection.update_one({"name": data['name']}) + return "Created", 201 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "name": req['name'], + "notes": req['notes'] + } + if user_db_collection.find( { "name": data['name'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/README.md b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/README.md new file mode 100644 index 0000000..3a34420 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/README.md @@ -0,0 +1,4 @@ +# Test Case: Article 16 Validation - Right to Rectification +- Test case description: The client sends personal data to the server. The server processes the data, saves it in a Mongo database. The client offers a function for the rectification of his personal data. The server does not perform rectification of the personal data and therefore does not update the personal data record in the database. +- Expected outcome: + - The non-rectification of the personal data (the update call to the database of the personal data) is detected. diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/client.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/client.py new file mode 100755 index 0000000..e6a2e09 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/client.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 + +import requests + +def rectify(personal_data): + url = 'test-online-notepad.com/data' + + personal_data["name"] = "new name" + requests.put(url, json = personal_data) + +def store_personal_data_on_server(personal_data): + url = 'test-online-notepad.com/store_data' + requests.post(url, json = personal_data) + +if __name__ == '__main__': + #@PseudoIdentifier + personal_data = { + "username": "testuser", + "name": "firstname lastname", + "notes": ["note1", "note2", "note3"] + } + store_personal_data_on_server(personal_data) + rectify(personal_data) + diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/config.yml b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/config.yml new file mode 100644 index 0000000..bdc9375 --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/config.yml @@ -0,0 +1,15 @@ +services: + - type: server + directory: server + name: server + host: test-online-notepad.com + - type: db + directory: server + name: postgres + storages: + - userdata + - otherdata + - type: db + name: mongo + storages: + - userdata \ No newline at end of file diff --git a/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/server.py b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/server.py new file mode 100755 index 0000000..8e3f7ea --- /dev/null +++ b/ppg-testing-library/GDPRComplianceChecks/RightToRectification/Python_validation/server.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 + +from flask import Flask, request +from pymongo import MongoClient, database + +mongo_host = "mongo" +user_db_client = MongoClient("mongodb://mongo:27017/") +user_db = user_db_client.userdata +user_db_collection = user_db.records + +app = Flask(__name__) + +@app.route("/data", methods=['PUT']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "name": req['name'], + "notes": req['notes'] + } + if user_db_collection.find( { "name": data['name'] } ).count() > 0: + return "Conflict", 409 + else: + # VALIDATION: no rectification is performed => No update call to the database + return "Created", 201 + +@app.route("/store_data", methods=['POST']) +def parse_data(): + req = request.json + data = { + "username": req['username'], + "name": req['name'], + "notes": req['notes'] + } + if user_db_collection.find( { "name": data['name'] } ).count() > 0: + return "Conflict", 409 + else: + # save data to database + user_db_collection.insert_one(data) + return "OK", 200 + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=8080, debug=True, threaded=True) \ No newline at end of file