-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- add [eip712](https://eips.ethereum.org/EIPS/eip-712) typed struct data hashing. - add signTypedData for DidKeyring. - vc, vp, ctype, did-document supports signTypedData. - verify functions support TypedData.
- Loading branch information
1 parent
e953dfb
commit 872ed45
Showing
42 changed files
with
847 additions
and
168 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
--- | ||
'@zcloak/did-resolver': minor | ||
'@zcloak/keyring': minor | ||
'@zcloak/message': minor | ||
'@zcloak/crypto': minor | ||
'@zcloak/verify': minor | ||
'@zcloak/ctype': minor | ||
'@zcloak/did': minor | ||
'@zcloak/vc': minor | ||
--- | ||
|
||
Data signing. | ||
|
||
- add [eip712](https://eips.ethereum.org/EIPS/eip-712) typed struct data hashing. | ||
- add signTypedData for DidKeyring. | ||
- vc, vp, ctype, did-document supports signTypedData. | ||
- verify functions support TypedData. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,109 @@ | ||
// Copyright 2021-2023 zcloak authors & contributors | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
import type { Keypair } from '../types'; | ||
|
||
import { u8aToHex, u8aToNumber } from '@polkadot/util'; | ||
|
||
import { ethereumEncode } from '../ethereum'; | ||
import { initCrypto } from '../initCrypto'; | ||
import { keccak256AsU8a } from '../keccak'; | ||
import { secp256k1PairFromSeed, secp256k1Sign } from '../secp256k1'; | ||
import { encodeData, encodeType, getMessage, structHash, typeHash } from './eip712'; | ||
|
||
const typedData = { | ||
types: { | ||
EIP712Domain: [ | ||
{ name: 'name', type: 'string' }, | ||
{ name: 'version', type: 'string' }, | ||
{ name: 'chainId', type: 'uint256' }, | ||
{ name: 'verifyingContract', type: 'address' } | ||
], | ||
Person: [ | ||
{ name: 'name', type: 'string' }, | ||
{ name: 'wallet', type: 'address' } | ||
], | ||
Mail: [ | ||
{ name: 'from', type: 'Person' }, | ||
{ name: 'to', type: 'Person' }, | ||
{ name: 'contents', type: 'string' } | ||
] | ||
}, | ||
primaryType: 'Mail', | ||
domain: { | ||
name: 'Ether Mail', | ||
version: '1', | ||
chainId: 1, | ||
verifyingContract: '0xCcCCccccCCCCcCCCCCCcCcCccCcCCCcCcccccccC' | ||
}, | ||
message: { | ||
from: { | ||
name: 'Cow', | ||
wallet: '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826' | ||
}, | ||
to: { | ||
name: 'Bob', | ||
wallet: '0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB' | ||
}, | ||
contents: 'Hello, Bob!' | ||
} | ||
}; | ||
|
||
describe('EIP-712', (): void => { | ||
let pair: Keypair; | ||
|
||
beforeAll(async (): Promise<void> => { | ||
await initCrypto(); | ||
pair = secp256k1PairFromSeed(keccak256AsU8a('cow')); | ||
}); | ||
|
||
it('eip712 encodeType', () => { | ||
expect(encodeType(typedData, 'Mail')).toBe( | ||
'Mail(Person from,Person to,string contents)Person(string name,address wallet)' | ||
); | ||
}); | ||
|
||
it('eip712 typeHash', () => { | ||
expect(u8aToHex(typeHash(typedData, 'Mail'))).toBe( | ||
'0xa0cedeb2dc280ba39b857546d74f5549c3a1d7bdc2dd96bf881f76108e23dac2' | ||
); | ||
}); | ||
|
||
it('eip712 encodeData', () => { | ||
expect(u8aToHex(encodeData(typedData, typedData.primaryType, typedData.message))).toBe( | ||
'0xa0cedeb2dc280ba39b857546d74f5549c3a1d7bdc2dd96bf881f76108e23dac2fc71e5fa27ff56c350aa531bc129ebdf613b772b6604664f5d8dbe21b85eb0c8cd54f074a4af31b4411ff6a60c9719dbd559c221c8ac3492d9d872b041d703d1b5aadf3154a261abdd9086fc627b61efca26ae5702701d05cd2305f7c52a2fc8' | ||
); | ||
}); | ||
|
||
it('eip712 structHash', () => { | ||
expect(u8aToHex(structHash(typedData, typedData.primaryType, typedData.message))).toBe( | ||
'0xc52c0ee5d84264471806290a3f2c4cecfc5490626bf912d01f240d7a274b371e' | ||
); | ||
expect(u8aToHex(structHash(typedData, 'EIP712Domain', typedData.domain))).toBe( | ||
'0xf2cee375fa42b42143804025fc449deafd50cc031ca257e0b194a650a912090f' | ||
); | ||
}); | ||
|
||
it('eip712 getMessage', () => { | ||
expect(u8aToHex(getMessage(typedData, true))).toBe( | ||
'0xbe609aee343fb3c4b28e1df9e632fca64fcfaede20f02e86244efddf30957bd2' | ||
); | ||
}); | ||
|
||
it('eip712 getMessage signature', () => { | ||
const message = getMessage(typedData, true); | ||
|
||
expect(ethereumEncode(pair.publicKey)).toBe( | ||
ethereumEncode('0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826') | ||
); | ||
const signature = secp256k1Sign(message, pair); | ||
|
||
const v = signature.slice(-1); | ||
const r = signature.slice(0, 32); | ||
const s = signature.slice(32, 64); | ||
|
||
expect(u8aToNumber(v)).toBe(1); | ||
expect(u8aToHex(r)).toBe('0x4355c47d63924e8a72e509b65029052eb6c299d53a04e167c5775fd466751c9d'); | ||
expect(u8aToHex(s)).toBe('0x07299936d304c153f6443dfa05f40ff007d72911b6f72307f996231605b91562'); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,199 @@ | ||
// Copyright 2021-2023 zcloak authors & contributors | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
// Reference | ||
// https://eips.ethereum.org/EIPS/eip-712 | ||
// https://eips.ethereum.org/assets/eip-712/Example.js | ||
|
||
import type { TypedData } from './types'; | ||
|
||
import { hexToU8a, isU8a, u8aConcat, u8aToBuffer, u8aToU8a } from '@polkadot/util'; | ||
import abi from 'ethereumjs-abi'; | ||
|
||
import { keccak256AsU8a } from '../keccak'; | ||
|
||
const EIP_191_PREFIX = hexToU8a('0x1901'); | ||
|
||
export const ARRAY_REGEX = /^(.*)\[([0-9]*?)]$/; | ||
|
||
function encode(types: string[], values: any[]) { | ||
return abi.rawEncode( | ||
types, | ||
// ethereumjs-abi not allowd uin8array | ||
values.map((value) => (isU8a(value) ? u8aToBuffer(value) : value)) | ||
); | ||
} | ||
|
||
/** | ||
* @name getDependencies | ||
* @description | ||
* Get the dependencies of a struct type. If a struct has the same dependency multiple times, it's only included once | ||
* in the resulting array. | ||
*/ | ||
export function getDependencies( | ||
typedData: TypedData, | ||
type: string, | ||
dependencies: string[] = [] | ||
): string[] { | ||
if (dependencies.includes(type)) { | ||
return dependencies; | ||
} | ||
|
||
if (!typedData.types[type]) { | ||
return dependencies; | ||
} | ||
|
||
return [ | ||
type, | ||
...typedData.types[type].reduce<string[]>( | ||
(previous, type) => [ | ||
...previous, | ||
...getDependencies(typedData, type.type, previous).filter( | ||
(dependency) => !previous.includes(dependency) | ||
) | ||
], | ||
[] | ||
) | ||
]; | ||
} | ||
|
||
/** | ||
* @name encodeType | ||
* @description | ||
* Encode a type to a string. All dependant types are alphabetically sorted. | ||
*/ | ||
export function encodeType(typedData: TypedData, type: string): string { | ||
const [primary, ...dependencies] = getDependencies(typedData, type); | ||
const types = [primary, ...dependencies.sort()]; | ||
|
||
return types | ||
.map((dependency) => { | ||
return `${dependency}(${typedData.types[dependency].map( | ||
(type) => `${type.type} ${type.name}` | ||
)})`; | ||
}) | ||
.join(''); | ||
} | ||
|
||
/** | ||
* @name typeHash | ||
* @description | ||
* Get a type string as hash. | ||
*/ | ||
export function typeHash(typedData: TypedData, type: string): Uint8Array { | ||
return keccak256AsU8a(encodeType(typedData, type)); | ||
} | ||
|
||
/** | ||
* @name encodeValue | ||
* @description | ||
* Encodes a single value to an ABI serialisable string, number or Buffer. Returns the data as tuple, which consists of | ||
* an array of ABI compatible types, and an array of corresponding values. | ||
*/ | ||
function encodeValue( | ||
typedData: TypedData, | ||
type: string, | ||
data: unknown | ||
): [string, string | Uint8Array | number] { | ||
const match = type.match(ARRAY_REGEX); | ||
|
||
// Checks for array types | ||
if (match) { | ||
const arrayType = match[1]; | ||
const length = Number(match[2]) || undefined; | ||
|
||
if (!Array.isArray(data)) { | ||
throw new Error('Cannot encode data: value is not of array type'); | ||
} | ||
|
||
if (length && data.length !== length) { | ||
throw new Error(`Cannot encode data: expected length of ${length}, but got ${data.length}`); | ||
} | ||
|
||
const encodedData = data.map((item) => encodeValue(typedData, arrayType, item)); | ||
const types = encodedData.map((item) => item[0]); | ||
const values = encodedData.map((item) => item[1]); | ||
|
||
return ['bytes32', keccak256AsU8a(encode(types, values))]; | ||
} | ||
|
||
if (typedData.types[type]) { | ||
return ['bytes32', structHash(typedData, type, data as Record<string, unknown>)]; | ||
} | ||
|
||
// Strings and arbitrary byte arrays are hashed to bytes32 | ||
if (type === 'string') { | ||
return ['bytes32', keccak256AsU8a(data as string)]; | ||
} | ||
|
||
if (type === 'bytes') { | ||
return ['bytes32', keccak256AsU8a(u8aToU8a(data as string))]; | ||
} | ||
|
||
return [type, data as string]; | ||
} | ||
|
||
/** | ||
* @name encodeData | ||
* @description | ||
* Encode the data to an ABI encoded Buffer. The data should be a key -> value object with all the required values. All | ||
* dependant types are automatically encoded. | ||
*/ | ||
export function encodeData( | ||
typedData: TypedData, | ||
type: string, | ||
data: Record<string, unknown> | ||
): Uint8Array { | ||
const [types, values] = typedData.types[type].reduce<[string[], unknown[]]>( | ||
([types, values], field) => { | ||
if (data[field.name] === undefined || data[field.name] === null) { | ||
throw new Error(`Cannot encode data: missing data for '${field.name}'`); | ||
} | ||
|
||
const value = data[field.name]; | ||
const [type, encodedValue] = encodeValue(typedData, field.type, value); | ||
|
||
return [ | ||
[...types, type], | ||
[...values, encodedValue] | ||
]; | ||
}, | ||
[['bytes32'], [typeHash(typedData, type)]] | ||
); | ||
|
||
return encode(types, values); | ||
} | ||
|
||
/** | ||
* @name structHash | ||
* @description | ||
* Get encoded data as a hash. The data should be a key -> value object with all the required values. All dependant | ||
* types are automatically encoded. | ||
*/ | ||
export function structHash( | ||
typedData: TypedData, | ||
type: string, | ||
data: Record<string, unknown> | ||
): Uint8Array { | ||
return keccak256AsU8a(encodeData(typedData, type, data)); | ||
} | ||
|
||
/** | ||
* @name getMessage | ||
* @description | ||
* Get the EIP-191 encoded message to sign, from the typedData object. If `hash` is enabled, the message will be hashed | ||
* with Keccak256. | ||
*/ | ||
export function getMessage(typedData: TypedData, hash?: boolean): Uint8Array { | ||
const message = u8aConcat( | ||
EIP_191_PREFIX, | ||
structHash(typedData, 'EIP712Domain', typedData.domain), | ||
structHash(typedData, typedData.primaryType, typedData.message) | ||
); | ||
|
||
if (hash) { | ||
return keccak256AsU8a(message); | ||
} | ||
|
||
return message; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
// Copyright 2021-2023 zcloak authors & contributors | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
export * as eip712 from './eip712'; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
// Copyright 2021-2023 zcloak authors & contributors | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
export interface DataTypeProperty { | ||
name: string; | ||
type: string; | ||
} | ||
|
||
export interface DataTypes { | ||
[additionalProperties: string]: DataTypeProperty[]; | ||
} | ||
|
||
export interface TypedData { | ||
types: DataTypes; | ||
primaryType: string; | ||
domain: { | ||
name?: string; | ||
version?: string; | ||
chainId?: number; | ||
verifyingContract?: string; | ||
salt?: string; | ||
}; | ||
message: Record<string, unknown>; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,4 @@ | ||
// Copyright 2021-2023 zcloak authors & contributors | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
export { keccak256AsU8a, keccak512AsU8a } from './asU8a'; | ||
export { keccak256AsHex, keccak512AsHex, keccak256AsU8a, keccak512AsU8a } from './asU8a'; |
Oops, something went wrong.