Skip to content

Commit

Permalink
Reimplement proofTermDefined as compactProof.
Browse files Browse the repository at this point in the history
- This approach better matches use case optimization needs.
  • Loading branch information
dlongley committed Jan 1, 2019
1 parent 9975258 commit fb60a05
Show file tree
Hide file tree
Showing 8 changed files with 145 additions and 109 deletions.
21 changes: 9 additions & 12 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,15 @@
## 3.0.0 - TBD

### Added
- Add `proofTermDefined` flag to enable skipping early JSON-LD expansion to
determine the term to use for `proof` (or `signature` if using a legacy
suite). Setting this flag will cause the `sign` and `verify` APIs to expect
the term `proof` (or the term `signature` for legacy suites) to be defined in
the same way that they are defined in the `https://w3id.org/security/v2`
JSON-LD `@context`. Note that setting this flag should not necessarily be
considered an optimization as JSON-LD expansion is usually required to
perform canonicalization anyway. If the flag is not set, expansion will be
performed early and skipped later during canonicalization. Setting this flag
is primarily useful for methods that perform specialized canonicalization
and that can take advantage of optimizing away the JSON-LD expansion step
to determine the `proof` term.
- Add `compactProof` flag that can be set to `false` to enable skipping
compaction of proof(s) when it is known that the input document's (for `sign`
or `verify`) JSON-LD `@context` defines all applicable proof terms using the
same definitions as the JSON-LD `@context` used internally by the library
(i.e. the JSON-LD `@context` defined by `constants.SECURITY_CONTEXT_URL`).
This flag should only be set to `false` by advanced users that have ensured
their software systems have strictly validated the input to ensure that it
is safe and cannot be misinterpreted. If these guarantees can be met, then
setting this flag to `false` may be a useful optimization consideration.

### Changed
- **BREAKING**: `sign` and `verify` APIs require suites and proof purpose
Expand Down
159 changes: 91 additions & 68 deletions lib/ProofSet.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,19 +42,19 @@ module.exports = class ProofSet {
* an error when unmapped properties are detected in the input, use `false`
* to turn this off and allow unmapped properties to be dropped or use a
* custom function.
* @param [proofTermDefined] {boolean} `true` indicates that the document's
* context has defined the term `proof` (or the term `signature` for
* legacy suites) and no JSON-LD processing is necessary to discover
* the proper term (default: `false`). Note that if expansion of the
* document is required for canonicalization anyway then setting this
* flag does not improve performance.
* @param [compactProof] {boolean} `true` instructs this call to compact
* the resulting proof to the same JSON-LD `@context` as the input
* document; this is the default behavior. Setting this flag to `false` can
* be used as an optimization to prevent an unnecessary compaction when the
* caller knows that all used proof terms have the same definition in the
* document's `@context` as the `constants.SECURITY_CONTEXT_URL` `@context`.
*
* @return {Promise<object>} resolves with the signed document, with
* the signature in the top-level `proof` property.
*/
async add(document, {
suite, purpose, documentLoader, expansionMap,
proofTermDefined = false} = {}) {
compactProof = true} = {}) {
if(!suite) {
throw new TypeError('"options.suite" is required.');
}
Expand Down Expand Up @@ -86,16 +86,9 @@ module.exports = class ProofSet {

// preprocess document to prepare to remove existing proofs
let input;
if(proofTermDefined) {
// simply shallow clone the document, do not expand
input = {...document};
// excluding any existing proof(s)
if(suite.legacy) {
delete input.signature;
} else {
delete input.proof;
}
} else {
let existingProofs;
const proofProperty = suite.legacy ? 'signature' : 'proof';
if(compactProof) {
// cannot assume security context terms, so do full expansion
([input = {}] = await jsonld.expand(
document, {documentLoader, expansionMap}));
Expand All @@ -105,36 +98,50 @@ module.exports = class ProofSet {
} else {
delete input[constants.SECURITY_PROOF_URL];
}
} else {
// modify document in place to maximize optimization
input = document;
// save but exclude any existing proof(s)
existingProofs = input[proofProperty];
delete document[proofProperty];
}

// create the new proof (suites MUST output a proof using the security-v2
// `@context`)
const proof = await suite.createProof({
document: input, purpose, documentLoader,
expansionMap, proofTermDefined});
expansionMap, compactProof});

// TODO: could potentially optimize to avoid extra compact based on flag
if(compactProof) {
// compact proof to match document's context
let expandedProof;
if(suite.legacy) {
expandedProof = {
[constants.SECURITY_SIGNATURE_URL]: proof
};
} else {
expandedProof = {
[constants.SECURITY_PROOF_URL]: {'@graph': proof}
};
}
const ctx = jsonld.getValues(document, '@context');
const compactProof = await jsonld.compact(
expandedProof, ctx,
{documentLoader, expansionMap, compactToRelative: false});
delete compactProof['@context'];

// compact proof to match document's context
let expandedProof;
if(suite.legacy) {
expandedProof = {
[constants.SECURITY_SIGNATURE_URL]: proof
};
// add proof to document
const key = Object.keys(compactProof)[0];
jsonld.addValue(document, key, compactProof[key]);
} else {
expandedProof = {
[constants.SECURITY_PROOF_URL]: {'@graph': proof}
};
// in-place restore any existing proofs
if(existingProofs) {
document[proofProperty] = existingProofs;
}
// add new proof
delete proof['@context'];
jsonld.addValue(document, proofProperty, proof);
}
const ctx = jsonld.getValues(document, '@context');
const compactProof = await jsonld.compact(
expandedProof, ctx,
{documentLoader, expansionMap, compactToRelative: false});
delete compactProof['@context'];

// add proof to document
const key = Object.keys(compactProof)[0];
jsonld.addValue(document, key, compactProof[key]);

return document;
}
Expand Down Expand Up @@ -172,12 +179,14 @@ module.exports = class ProofSet {
* an error when unmapped properties are detected in the input, use `false`
* to turn this off and allow unmapped properties to be dropped or use a
* custom function.
* @param [proofTermDefined] {boolean} `true` indicates that the document's
* context has defined the term `proof` (or the term `signature` for
* legacy suites) and no JSON-LD processing is necessary to discover
* the proper term (default: `false`). Note that if expansion of the
* document is required for canonicalization anyway then setting this
* flag does not improve performance.
* @param [compactProof] {boolean} `true` indicates that this method cannot
* assume that the incoming document has defined all proof terms in the
* same way as the `constants.SECURITY_CONTEXT_URL` JSON-LD `@context`.
* This means that this method must compact any found proofs to this
* context for internal and extension processing; this is the default
* behavior. To override this behavior and optimize away this step because
* the caller knows that the input document's JSON-LD `@context` defines
* the proof terms in the same way, set this flag to `false`.
*
* @return {Promise<object>} resolves with an object with a `verified`
* boolean property that is `true` if at least one proof matching the
Expand All @@ -187,7 +196,7 @@ module.exports = class ProofSet {
*/
async verify(document, {
suite, purpose, documentLoader, expansionMap,
proofTermDefined = false} = {}) {
compactProof = true} = {}) {
if(!suite) {
throw new TypeError('"options.suite" is required.');
}
Expand Down Expand Up @@ -225,21 +234,22 @@ module.exports = class ProofSet {
// fetch document
document = await documentLoader(document);
} else {
// TODO: could potentially optimize to avoid shallow copy
// TODO: consider in-place editing to optimize when `compactProof`
// is `false`

// shallow copy document to allow for removal of `proof` set
// shallow copy to allow for removal of proof set prior to canonize
document = {...document};
}

// get proofs from document
const {proofSet, document: doc} = await _getProofs({
document, legacy, documentLoader, expansionMap, proofTermDefined});
document, legacy, documentLoader, expansionMap, compactProof});
document = doc;

// verify proofs
const results = await _verify({
document, suites, proofSet,
purpose, documentLoader, expansionMap, proofTermDefined});
purpose, documentLoader, expansionMap, compactProof});
if(results.length === 0) {
throw new Error(
'Could not verify any proofs; no proofs matched the required ' +
Expand All @@ -265,7 +275,7 @@ module.exports = class ProofSet {
};

async function _getProofs({
document, legacy, documentLoader, expansionMap, proofTermDefined}) {
document, legacy, documentLoader, expansionMap, compactProof}) {
// handle document preprocessing to find proofs
let proofProperty;
let proofPropertyUrl;
Expand All @@ -277,40 +287,53 @@ async function _getProofs({
proofPropertyUrl = constants.SECURITY_PROOF_URL;
}
let proofSet;
if(proofTermDefined) {
proofSet = jsonld.getValues(document, proofProperty);
delete document[proofProperty];
} else {
if(compactProof) {
// if we must compact the proof(s) then we must first expand the input
// document to find the proof(s)
([document = {}] = await jsonld.expand(
document, {documentLoader, expansionMap}));
proofSet = jsonld.getValues(document, proofPropertyUrl);
delete document[proofPropertyUrl];
} else {
// since proofs needn't be compacted, assume matching term definitions
proofSet = jsonld.getValues(document, proofProperty);
delete document[proofProperty];
}

if(proofSet.length === 0) {
// no possible matches
throw new Error('No matching proofs found in the given document.');
}

// compact proofs to SECURITY_CONTEXT_URL context
const expanded = {
[proofTermDefined ? proofProperty : proofPropertyUrl]: proofSet
};
const ctx = jsonld.getValues(document, '@context');
expanded['@context'] = ctx;
const compact = await jsonld.compact(
expanded, constants.SECURITY_CONTEXT_URL,
{documentLoader, expansionMap, compactToRelative: false});
proofSet = jsonld.getValues(compact, proofProperty).map(proof => {
proof['@context'] = constants.SECURITY_CONTEXT_URL;
return proof;
});
if(compactProof) {
// compact proofs to SECURITY_CONTEXT_URL context
const expanded = {
[proofPropertyUrl]: proofSet
};
const ctx = jsonld.getValues(document, '@context');
expanded['@context'] = ctx;
const compact = await jsonld.compact(
expanded, constants.SECURITY_CONTEXT_URL,
{documentLoader, expansionMap, compactToRelative: false});
proofSet = jsonld.getValues(compact, proofProperty).map(proof => {
proof['@context'] = constants.SECURITY_CONTEXT_URL;
return proof;
});
} else {
// TODO: consider in-place editing to optimize

// merely shallow copy proofs and add SECURITY_CONTEXT_URL
proofSet = proofSet.map(proof => ({
'@context': constants.SECURITY_CONTEXT_URL,
...proof
}));
}
return {proofSet, document};
}

async function _verify({
document, suites, proofSet, purpose,
documentLoader, expansionMap, proofTermDefined}) {
documentLoader, expansionMap, compactProof}) {
// filter out matching proofs
const matches = proofSet.filter(proof => purpose ?
purpose.match(proof, {document, documentLoader, expansionMap}) :
Expand All @@ -326,7 +349,7 @@ async function _verify({
if(s.matchProof({proof, document, documentLoader, expansionMap})) {
return s.verifyProof({
proof, document, purpose, documentLoader, expansionMap,
proofTermDefined}).catch(error => ({verified: false, error}));
compactProof}).catch(error => ({verified: false, error}));
}
}
}))).map((r, i) => r ? {proof: matches[i], ...r} : null).filter(r => r);
Expand Down
8 changes: 4 additions & 4 deletions lib/jsonld-signatures.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,15 @@ Object.assign(api, constants);
const ProofSet = require('./ProofSet');

api.sign = async function sign(document, {
suite, purpose, documentLoader, expansionMap, proofTermDefined} = {}) {
suite, purpose, documentLoader, expansionMap, compactProof} = {}) {
return new ProofSet().add(
document, {suite, purpose, documentLoader, expansionMap, proofTermDefined});
document, {suite, purpose, documentLoader, expansionMap, compactProof});
};

api.verify = async function verify(document, {
suite, purpose, documentLoader, expansionMap, proofTermDefined} = {}) {
suite, purpose, documentLoader, expansionMap, compactProof} = {}) {
return new ProofSet().verify(
document, {suite, purpose, documentLoader, expansionMap, proofTermDefined});
document, {suite, purpose, documentLoader, expansionMap, compactProof});
};

// expose suite classes
Expand Down
6 changes: 3 additions & 3 deletions lib/suites/GraphSignature2012.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,16 @@ module.exports = class GraphSignature2012 extends LinkedDataSignature2015 {
* @param proof {object}
* @param documentLoader {function}
* @param expansionMap {function}
* @param proofTermDefined {boolean}
* @param compactProof {boolean}
*
* @returns {Promise<{Uint8Array}>}.
*/
async createVerifyData({
document, proof, documentLoader, expansionMap, proofTermDefined}) {
document, proof, documentLoader, expansionMap, compactProof}) {
const c14n = await this.canonize(document, {
documentLoader,
expansionMap,
skipExpansion: !proofTermDefined
skipExpansion: compactProof
});

let verifyData = '';
Expand Down
4 changes: 2 additions & 2 deletions lib/suites/LinkedDataProof.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ module.exports = class LinkedDataProof {
* @param purpose {ProofPurpose}
* @param documentLoader {function}
* @param expansionMap {function}
* @param proofTermDefined {boolean}
* @param compactProof {boolean}
*
* @returns {Promise<object>} Resolves with the created proof object.
*/
async createProof(
{document, purpose, documentLoader, expansionMap, proofTermDefined}) {
{document, purpose, documentLoader, expansionMap, compactProof}) {
throw new Error('"createProof" must be implemented in a derived class.');
}

Expand Down
Loading

0 comments on commit fb60a05

Please sign in to comment.