diff --git a/components/AmountDisplay.tsx b/components/AmountDisplay.tsx index f814be4..007ead6 100644 --- a/components/AmountDisplay.tsx +++ b/components/AmountDisplay.tsx @@ -3,7 +3,7 @@ import { Text, TouchableOpacity, View, StyleSheet, Modal } from 'react-native' import { formatSatoshis, formatSatoshisAsFiat, satoshisOptions } from '@/utils/amountFormatHelpers' import { ExchangeRateContext } from '@/context/ExchangeRateContext' import { useTheme } from '@/context/theme/ThemeContext' -import { useWallet } from '@/context/WalletContext' +import { useWallet } from '@/context/WalletWebViewContext' type Props = { abbreviate?: boolean diff --git a/components/Balance.tsx b/components/Balance.tsx index ac2a6e6..48165fc 100644 --- a/components/Balance.tsx +++ b/components/Balance.tsx @@ -1,7 +1,7 @@ import React, { useCallback, useEffect } from 'react' import { View, Text, StyleSheet } from 'react-native' import { useTheme } from '@/context/theme/ThemeContext' -import { useWallet } from '@/context/WalletContext' +import { useWallet } from '@/context/WalletWebViewContext' import { sdk } from '@bsv/wallet-toolbox-mobile' import AmountDisplay from './AmountDisplay' import AppLogo from './AppLogo' diff --git a/components/BasketAccessModal.tsx b/components/BasketAccessModal.tsx index e44e140..9beccc6 100644 --- a/components/BasketAccessModal.tsx +++ b/components/BasketAccessModal.tsx @@ -1,6 +1,6 @@ import React, { useContext } from 'react' import { View, Text, StyleSheet, Modal, TouchableOpacity } from 'react-native' -import { WalletContext } from '../context/WalletContext' +import { WalletContext } from '../context/WalletWebViewContext' import { UserContext } from '../context/UserContext' import { useThemeStyles } from '../context/theme/useThemeStyles' import AppChip from './AppChip' diff --git a/components/CertificateAccessModal.tsx b/components/CertificateAccessModal.tsx index 5ef8f2d..d7e0f4f 100644 --- a/components/CertificateAccessModal.tsx +++ b/components/CertificateAccessModal.tsx @@ -1,6 +1,6 @@ import React, { useContext } from 'react' import { View, Text, StyleSheet, Modal, TouchableOpacity, ScrollView } from 'react-native' -import { WalletContext } from '../context/WalletContext' +import { WalletContext } from '../context/WalletWebViewContext' import { UserContext } from '../context/UserContext' import { useThemeStyles } from '../context/theme/useThemeStyles' import AppChip from './AppChip' diff --git a/components/ConfigModal.tsx b/components/ConfigModal.tsx index c948141..36fc90f 100644 --- a/components/ConfigModal.tsx +++ b/components/ConfigModal.tsx @@ -15,7 +15,7 @@ import { Ionicons } from '@expo/vector-icons' import { useTranslation } from 'react-i18next' import { useTheme } from '@/context/theme/ThemeContext' import { useThemeStyles } from '@/context/theme/useThemeStyles' -import { useWallet, WABConfig } from '@/context/WalletContext' +import { useWallet, WABConfig } from '@/context/WalletWebViewContext' interface ConfigModalProps { visible: boolean diff --git a/components/PasswordHandler.tsx b/components/PasswordHandler.tsx index 416dd55..296933a 100644 --- a/components/PasswordHandler.tsx +++ b/components/PasswordHandler.tsx @@ -13,7 +13,7 @@ import { Alert } from 'react-native' import { Ionicons } from '@expo/vector-icons' -import { useWallet } from '@/context/WalletContext' +import { useWallet } from '@/context/WalletWebViewContext' import { useTheme } from '@/context/theme/ThemeContext' import { useThemeStyles } from '@/context/theme/useThemeStyles' @@ -39,14 +39,9 @@ const PasswordHandler: React.FC = () => { const [wasOriginallyFocused, setWasOriginallyFocused] = useState(false) const [open, setOpen] = useState(false) const [reason, setReason] = useState('') - const [test, setTest] = useState(() => { - return Promise.resolve(true) - }) - const [resolve, setResolve] = useState(() => {}) - const [reject, setReject] = useState(() => {}) const [password, setPassword] = useState('') const [showPassword, setShowPassword] = useState(false) - const { setPasswordRetriever } = useWallet() + const { webviewComingEvent, sendWebViewEvent } = useWallet() const manageFocus = useCallback(() => { focusHandler.isFocused().then(focused => { @@ -57,40 +52,30 @@ const PasswordHandler: React.FC = () => { }) }, [focusHandler]) - // Define a dummy function for initialization - const dummyPasswordHandler = useCallback((reason: string, test: (pwd: string) => boolean): Promise => { - console.warn('Password handler called before initialization') - return Promise.resolve('') - }, []) - - // Create a ref to store the handler function - const handlerRef = useRef(dummyPasswordHandler) - - // Set up the actual handler function - useEffect(() => { - handlerRef.current = (reason: string, testFn: (passwordCandidate: string) => boolean): Promise => { - return new Promise((resolvePromise: Function, rejectPromise: Function) => { - setReason(reason) - setTest(() => testFn) - setResolve(() => resolvePromise) - setReject(() => rejectPromise) - setOpen(true) - manageFocus() - }) - } - }, [manageFocus]) - - // Register the handler exactly once on mount useEffect(() => { - // Provide a stable reference that delegates to our ref - const stableHandler = (): any => { - return (reason: string, test: (passwordCandidate: string) => boolean): Promise => { - return handlerRef.current(reason, test) + // Handle incoming events from the webview + if (webviewComingEvent) { + const { name, results } = webviewComingEvent; + + switch (name) { + case 'passwordRetriever.completed': + // setPasswordRetriver webview callback + setReason(results) + setOpen(true) + manageFocus() + break; + case 'testPassword.completed': + // Check password test success + if (results) { + sendWebViewEvent('testPasswordResolve') + handleClose() + } else { + Alert.alert('Error', 'Password validation failed') + } + break; } } - - setPasswordRetriever(stableHandler) - }, []) + }, [webviewComingEvent]) const handleClose = useCallback(() => { setOpen(false) @@ -103,23 +88,15 @@ const PasswordHandler: React.FC = () => { }, [focusHandler, wasOriginallyFocused]) const handleCancel = useCallback(() => { - reject(new Error('User cancelled')) + // Send the password rejected event + sendWebViewEvent('testPasswordReject', 'User cancelled') handleClose() - }, [handleClose, reject]) + }, [handleClose]) const handleSubmit = useCallback(async () => { - try { - const success = await test(password) - if (success) { - resolve(password) - handleClose() - } else { - Alert.alert('Error', 'Password validation failed') - } - } catch (error) { - Alert.alert('Error', 'Password validation failed') - } - }, [handleClose, password, resolve, test]) + // Send the password to the webview + sendWebViewEvent('testPassword', password) + }, [handleClose, password]) const toggleShowPassword = () => { setShowPassword(!showPassword) diff --git a/components/ProtocolAccessModal.tsx b/components/ProtocolAccessModal.tsx index b15b332..fee8b55 100644 --- a/components/ProtocolAccessModal.tsx +++ b/components/ProtocolAccessModal.tsx @@ -1,6 +1,6 @@ import React, { useContext } from 'react' import { View, Text, StyleSheet, Modal, TouchableOpacity } from 'react-native' -import { WalletContext } from '../context/WalletContext' +import { WalletContext } from '../context/WalletWebViewContext' import { UserContext } from '../context/UserContext' import { useThemeStyles } from '../context/theme/useThemeStyles' import AppChip from './AppChip' diff --git a/components/RecommendedApps.tsx b/components/RecommendedApps.tsx index a668165..b312bce 100644 --- a/components/RecommendedApps.tsx +++ b/components/RecommendedApps.tsx @@ -14,7 +14,7 @@ import { import { Ionicons } from '@expo/vector-icons' import Fuse from 'fuse.js' import { useTheme } from '@/context/theme/ThemeContext' -import { useWallet } from '@/context/WalletContext' +import { useWallet } from '@/context/WalletWebViewContext' import { useBrowserMode } from '@/context/BrowserModeContext' import { useTranslation } from 'react-i18next' diff --git a/components/RecoveryKeySaver.tsx b/components/RecoveryKeySaver.tsx index 135a8a6..4c0f9b0 100644 --- a/components/RecoveryKeySaver.tsx +++ b/components/RecoveryKeySaver.tsx @@ -16,7 +16,7 @@ import { import * as Clipboard from 'expo-clipboard' import { Ionicons } from '@expo/vector-icons' import { Utils } from '@bsv/sdk' -import { useWallet } from '@/context/WalletContext' +import { useWallet } from '@/context/WalletWebViewContext' import { useTheme } from '@/context/theme/ThemeContext' import { useThemeStyles } from '@/context/theme/useThemeStyles' @@ -27,10 +27,7 @@ const RecoveryKeySaver = () => { // State management const [open, setOpen] = useState(false) - const [wasOriginallyFocused, setWasOriginallyFocused] = useState(false) const [recoveryKey, setRecoveryKey] = useState('') - const [resolve, setResolve] = useState(() => {}) - const [reject, setReject] = useState(() => {}) const [copied, setCopied] = useState(false) // Checkbox states @@ -38,33 +35,24 @@ const RecoveryKeySaver = () => { const [affirmative2, setAffirmative2] = useState(false) const [affirmative3, setAffirmative3] = useState(false) - const { managers, setRecoveryKeySaver } = useWallet() + const { webviewComingEvent, sendWebViewEvent } = useWallet() const isAllChecked = affirmative1 && affirmative2 && affirmative3 - // Define a dummy function for initialization - const dummyHandler = useCallback((key: number[]): Promise => { - console.warn('Recovery key handler called before initialization') - return Promise.resolve(true) - }, []) - useEffect(() => { - setRecoveryKeySaver((): any => { - return (key: number[]): Promise => { - return new Promise((resolve, reject) => { - const keyAsStr = Utils.toBase64(key) - setResolve(() => { - return resolve - }) - setReject(() => { - return reject - }) - setRecoveryKey(keyAsStr) + // Handle incoming events from the webview + if (webviewComingEvent) { + const { name, results } = webviewComingEvent; + + switch (name) { + case 'recoveryKey.completed': + // setPasswordRetriver webview callback + setRecoveryKey(results) setOpen(true) - }) + break; } - }) - }, [managers]) + } + }, [webviewComingEvent]) const handleClose = () => { setOpen(false) @@ -75,12 +63,12 @@ const RecoveryKeySaver = () => { } const onAbandon = () => { - reject(new Error('User abandoned recovery key')) + sendWebViewEvent('recoveryKeyReject', 'User abandoned recovery key') handleClose() } const onKeySaved = () => { - resolve(true) + sendWebViewEvent('recoveryKeyResolve', true) handleClose() } diff --git a/components/SpendingAuthorizationModal.tsx b/components/SpendingAuthorizationModal.tsx index eda944c..eb26ba5 100644 --- a/components/SpendingAuthorizationModal.tsx +++ b/components/SpendingAuthorizationModal.tsx @@ -1,6 +1,6 @@ import React, { useContext, useState, useEffect } from 'react' import { View, Text, StyleSheet, Modal, TouchableOpacity, ScrollView } from 'react-native' -import { WalletContext } from '../context/WalletContext' +import { WalletContext } from '../context/WalletWebViewContext' import { UserContext } from '../context/UserContext' import { useThemeStyles } from '../context/theme/useThemeStyles' import { useTheme } from '../context/theme/ThemeContext' diff --git a/context/WalletWebViewContext.tsx b/context/WalletWebViewContext.tsx index 22590aa..daaeda4 100644 --- a/context/WalletWebViewContext.tsx +++ b/context/WalletWebViewContext.tsx @@ -36,7 +36,8 @@ import { router } from 'expo-router' import { logWithTimestamp } from '@/utils/logging' import WebView from 'react-native-webview' import { TouchableOpacity, View, Text } from 'react-native' -import webviewSource from '../wallet/dist/index.html'; +// For react-native-webview, use a static asset or a URI. Example: +const webviewSource = require('../wallet/dist/index.html'); // ----- // Context Types diff --git a/ios/Metanet.xcodeproj/project.pbxproj b/ios/Metanet.xcodeproj/project.pbxproj index 1ffd39a..cb0b0dd 100644 --- a/ios/Metanet.xcodeproj/project.pbxproj +++ b/ios/Metanet.xcodeproj/project.pbxproj @@ -69,13 +69,13 @@ name = Metanet; sourceTree = ""; }; - 2D16E6871FA4F8E400B85C8A /* Frameworks */ = { + 2C9848246E9E57E771D06B10 /* Metanet */ = { isa = PBXGroup; children = ( ED297162215061F000B7C4FE /* JavaScriptCore.framework */, A5B9AAFEE92E6E025984C3D9 /* Pods_Metanet.framework */, ); - name = Frameworks; + name = Metanet; sourceTree = ""; }; 7B5FFCC8BE8F659788D1A67B /* Pods */ = { @@ -278,6 +278,25 @@ shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Metanet/Pods-Metanet-frameworks.sh\"\n"; showEnvVarsInLog = 0; }; + 57DD25D2B98D13906BCD7932 /* [Expo] Configure project */ = { + isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + ); + name = "[Expo] Configure project"; + outputFileListPaths = ( + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "# This script configures Expo modules and generates the modules provider file.\nbash -l -c \"./Pods/Target\\ Support\\ Files/Pods-Metanet/expo-configure-project.sh\"\n"; + }; 800E24972A6A228C8D4807E9 /* [CP] Copy Pods Resources */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; diff --git a/ios/Metanet/Info.plist b/ios/Metanet/Info.plist index d4036ba..2853b6b 100644 --- a/ios/Metanet/Info.plist +++ b/ios/Metanet/Info.plist @@ -97,4 +97,4 @@ UIViewControllerBasedStatusBarAppearance - \ No newline at end of file + diff --git a/ios/Metanet/Metanet.entitlements b/ios/Metanet/Metanet.entitlements index e2abe8c..ed11946 100644 --- a/ios/Metanet/Metanet.entitlements +++ b/ios/Metanet/Metanet.entitlements @@ -7,4 +7,4 @@ com.apple.developer.web-browser - \ No newline at end of file + diff --git a/ios/MetanetNotificationService/Info.plist b/ios/MetanetNotificationService/Info.plist deleted file mode 100644 index 57421eb..0000000 --- a/ios/MetanetNotificationService/Info.plist +++ /dev/null @@ -1,13 +0,0 @@ - - - - - NSExtension - - NSExtensionPointIdentifier - com.apple.usernotifications.service - NSExtensionPrincipalClass - $(PRODUCT_MODULE_NAME).NotificationService - - - diff --git a/ios/MetanetNotificationService/MetanetNotificationService.entitlements b/ios/MetanetNotificationService/MetanetNotificationService.entitlements deleted file mode 100644 index f9d7fe2..0000000 --- a/ios/MetanetNotificationService/MetanetNotificationService.entitlements +++ /dev/null @@ -1,12 +0,0 @@ - - - - - aps-environment - development - com.apple.security.application-groups - - group.org.bsvblockchain.metanet - - - diff --git a/ios/MetanetNotificationService/NotificationService.swift b/ios/MetanetNotificationService/NotificationService.swift deleted file mode 100644 index 968d8b2..0000000 --- a/ios/MetanetNotificationService/NotificationService.swift +++ /dev/null @@ -1,1215 +0,0 @@ -// -// NotificationService.swift -// MetanetNotificationService -// -// Created by Brayden Langley on 8/8/25. -// Updated with structured logging, fetch polyfills, byte-accurate base64, atob/btoa shims, UTF-8 TextEncoder/Decoder, and abortable native fetch. -// -import UserNotifications -import JavaScriptCore -import os -import CryptoKit - - -// ADD: Lightweight timer center for JS setTimeout/setInterval -private final class JSTimerCenter { - private let logger: Logger - private let vm: JSVirtualMachine - private unowned let ctx: JSContext - private let lock = NSLock() - private var nextId: Int = 1 - private var timers: [Int: DispatchSourceTimer] = [:] - private var callbacks: [Int: JSManagedValue] = [:] - private var argsById: [Int: [Any]] = [:] - private var repeatsById: [Int: Bool] = [:] - - init(logger: Logger, vm: JSVirtualMachine, ctx: JSContext) { - self.logger = logger - self.vm = vm - self.ctx = ctx - } - - - @discardableResult - func set(ms: Int, repeats: Bool, callback: JSValue, args: [Any]) -> Int { - let delay = max(0, ms) - let interval = max(1, ms) - - lock.lock() - let id = nextId - nextId += 1 - let managed = JSManagedValue(value: callback) - vm.addManagedReference(managed, withOwner: self) - callbacks[id] = managed - argsById[id] = args - repeatsById[id] = repeats - lock.unlock() - - let q = DispatchQueue.global(qos: .utility) - let timer = DispatchSource.makeTimerSource(queue: q) - - let start = DispatchTime.now() + .milliseconds(delay) - if repeats { - timer.schedule(deadline: start, repeating: .milliseconds(interval)) - } else { - // schedule with any repeating value; we'll cancel on first fire - timer.schedule(deadline: start, repeating: .milliseconds(interval)) - } - - timer.setEventHandler { [weak self] in - guard let self = self else { return } - self.lock.lock() - let managed = self.callbacks[id] - let args = self.argsById[id] ?? [] - let repeatFlag = self.repeatsById[id] ?? false - self.lock.unlock() - - if let cb = managed?.value { - _ = cb.call(withArguments: args) - } - - if !repeatFlag { - self.clear(id: id) - } - } - - lock.lock() - timers[id] = timer - lock.unlock() - - timer.resume() - logger.debug("JSTimerCenter started id=\(id) ms=\(ms) repeats=\(repeats)") - return id - } - - func clear(id: Int) { - lock.lock() - let t = timers.removeValue(forKey: id) - let managed = callbacks.removeValue(forKey: id) - argsById.removeValue(forKey: id) - repeatsById.removeValue(forKey: id) - lock.unlock() - t?.cancel() - if let m = managed { vm.removeManagedReference(m, withOwner: self) } - logger.debug("JSTimerCenter cleared id=\(id)") - } - - func clearAll() { - lock.lock() - let allTimers = timers.values - timers.removeAll() - let allManaged = callbacks.values - callbacks.removeAll() - argsById.removeAll() - repeatsById.removeAll() - lock.unlock() - for t in allTimers { t.cancel() } - for m in allManaged { vm.removeManagedReference(m, withOwner: self) } - logger.debug("JSTimerCenter cleared all timers") - } -} - -private let APP_GROUP_ID = "group.org.bsvblockchain.metanet" -private let SNAP_KEY = "snap" - -final class NotificationService: UNNotificationServiceExtension { - enum CompletionReason: String { - case success, jsRejected, jsMissingRun, jsNoPromise, jsException, - loadJSBundleFailed, invalidRequestContent, timeout, - invalidURL, networkError, noHTTPResponse - } - - private let logger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "org.bsvblockchain.metanet", - category: "NSE") - - // Track in-flight URLSessionTasks so we can cancel from JS (AbortController) - private let tasksLock = NSLock() - private var tasks: [String: URLSessionTask] = [:] - - // Timer registry (for setTimeout / setInterval) - private let timersLock = NSLock() - private var timers: [String: DispatchSourceTimer] = [:] - private var jsTimerCenter: JSTimerCenter? - - var contentHandler: ((UNNotificationContent) -> Void)? - var bestAttemptContent: UNMutableNotificationContent? - - override func didReceive(_ request: UNNotificationRequest, - withContentHandler contentHandler: @escaping (UNNotificationContent) -> Void) { - let start = Date() - let reqID = UUID().uuidString - - func msElapsed() -> Int { Int(Date().timeIntervalSince(start) * 1000) } - - self.contentHandler = contentHandler - let threadName = Thread.isMainThread ? "main" : "bg" - logger.info("didReceive start id=\(reqID, privacy: .public) identifier=\(request.identifier, privacy: .public) thread=\(threadName, privacy: .public)") - - guard let best = (request.content.mutableCopy() as? UNMutableNotificationContent) else { - logger.error("Mutable copy of request content failed; returning original. id=\(reqID, privacy: .public)") - contentHandler(request.content) - return - } - - // Make sure serviceExtensionTimeWillExpire() can return something - self.bestAttemptContent = best - - // A small completion gate to ensure we only finish once - let lock = NSLock() - var completed = false - func complete(_ reason: CompletionReason) { - lock.lock(); defer { lock.unlock() } - guard !completed else { return } - completed = true - logger.info("Completing id=\(reqID, privacy: .public) reason=\(reason.rawValue, privacy: .public) elapsedMs=\(msElapsed())") - if let ch = self.contentHandler { - ch(self.bestAttemptContent ?? best) - } - } - - // Ephemeral session for the NSE - let cfg = URLSessionConfiguration.ephemeral - cfg.timeoutIntervalForRequest = 20 - cfg.timeoutIntervalForResource = 25 - cfg.waitsForConnectivity = false - let session = URLSession(configuration: cfg) - let waits = cfg.waitsForConnectivity ? "true" : "false" - logger.debug("URLSession configured requestTimeout=\(cfg.timeoutIntervalForRequest, privacy: .public)s resourceTimeout=\(cfg.timeoutIntervalForResource, privacy: .public)s waitsForConnectivity=\(waits, privacy: .public)") - - // One JS VM + context - let vm = JSVirtualMachine() - let ctx = JSContext(virtualMachine: vm)! - - // ADD: instantiate timer center - self.jsTimerCenter = JSTimerCenter(logger: logger, vm: vm!, ctx: ctx) - - // ADD: native timer bridges - let _setTimerNative: @convention(block) (Int, JSValue, Bool, JSValue?) -> Int = { [weak self] ms, fn, repeats, argsVal in - guard let self = self, let center = self.jsTimerCenter else { return 0 } - let args = argsVal?.toArray() ?? [] - return center.set(ms: ms, repeats: repeats, callback: fn, args: args) - } - let _clearTimerNative: @convention(block) (Int) -> Void = { [weak self] id in - guard let self = self, let center = self.jsTimerCenter else { return } - center.clear(id: id) - } - ctx.setObject(_setTimerNative, forKeyedSubscript: "_setTimerNative" as NSString) - ctx.setObject(_clearTimerNative, forKeyedSubscript: "_clearTimerNative" as NSString) - - // Log JS exceptions - ctx.exceptionHandler = { [weak self] _, exc in - self?.logger.error("JS exception id=\(reqID, privacy: .public): \(String(describing: exc), privacy: .public)") - } - - // ---- Globals ---- - if let g = ctx.globalObject { - g.setValue(g, forProperty: "globalThis") - g.setValue(g, forProperty: "self") - g.setValue(g, forProperty: "window") - } - - // ---- console bridge: support log/debug/info/warn/error/trace/time/timeEnd/group/groupCollapsed/groupEnd/assert/dir ---- - let nativeConsole: @convention(block) (String, String) -> Void = { [weak self] level, message in - guard let self = self else { return } - // TEMPORARY: Use .error for all messages to ensure visibility during debugging - switch level.lowercased() { - case "error": - self.logger.error("[JS][error] \(message, privacy: .public)") - case "warn": - self.logger.error("[JS][warn] \(message, privacy: .public)") // Elevated to error - case "info", "log": - self.logger.error("[JS][info] \(message, privacy: .public)") // Elevated to error - case "debug": - self.logger.error("[JS][debug] \(message, privacy: .public)") // Elevated to error - case "trace": - self.logger.error("[JS][trace] \(message, privacy: .public)") // Elevated to error - case "time": - self.logger.error("[JS][time] \(message, privacy: .public)") // Elevated to error - case "group", "groupcollapsed": - self.logger.error("[JS][group] \(message, privacy: .public)") // Elevated to error - case "groupend": - self.logger.error("[JS][groupEnd] \(message, privacy: .public)") // Elevated to error - case "assert": - self.logger.error("[JS][assert] \(message, privacy: .public)") - case "dir": - self.logger.error("[JS][dir] \(message, privacy: .public)") // Elevated to error - default: - self.logger.error("[JS][\(level, privacy: .public)] \(message, privacy: .public)") // Elevated to error - } - } - ctx.setObject(nativeConsole, forKeyedSubscript: "_consoleNative" as NSString) - - ctx.evaluateScript(#""" - (function(){ - function format(arg){ - try { - if (typeof arg === 'string') return arg; - if (typeof arg === 'number' || typeof arg === 'boolean' || arg == null) return String(arg); - if (arg && (arg.stack || arg.message)) { - // Format error objects with full details - var parts = []; - if (arg.name) parts.push(arg.name + ':'); - if (arg.message) parts.push(arg.message); - if (arg.stack) parts.push('\nStack trace:\n' + arg.stack); - return parts.join(' '); - } - try { return JSON.stringify(arg, null, 2); } catch(_) { return String(arg); } - } catch(e) { return String(arg); } - } - function send(level){ - var args = Array.prototype.slice.call(arguments, 1); - var msg; - try { msg = args.map(format).join(' '); } catch(e) { msg = String(args); } - try { _consoleNative(String(level), String(msg)); } catch(e) { /* ignore */ } - } - if (!globalThis.console) globalThis.console = {}; - var c = globalThis.console; - c.log = function(){ send('log', ...arguments); }; - c.debug = function(){ send('debug', ...arguments); }; - c.info = function(){ send('info', ...arguments); }; - c.warn = function(){ send('warn', ...arguments); }; - c.error = function(){ send('error', ...arguments); }; - c.trace = function(){ - var parts = Array.prototype.slice.call(arguments); - try { parts.push(new Error().stack || ''); } catch(_){ } - send('trace', ...parts); - }; - const _timers = new Map(); - c.time = function(label){ label = label || 'default'; try { _timers.set(label, Date.now()); } catch(_){ } }; - c.timeEnd = function(label){ label = label || 'default'; try { var t = _timers.get(label); if (typeof t === 'number') { send('time', label + ': ' + (Date.now()-t) + 'ms'); _timers.delete(label); } } catch(_){ } }; - c.group = function(){ send('group', ...arguments); }; - c.groupCollapsed = function(){ send('groupCollapsed', ...arguments); }; - c.groupEnd = function(){ send('groupEnd'); }; - c.assert = function(cond){ if (!cond) { var args = Array.prototype.slice.call(arguments, 1); if (!args.length) args = ['Assertion failed']; send('assert', ...args); } }; - c.dir = function(obj){ try { send('dir', JSON.stringify(obj)); } catch(_) { send('dir', String(obj)); } }; - })(); - """#) - - // ---- Byte-accurate base64 helpers (available early) ---- - ctx.evaluateScript(#""" - (function(){ - if (!globalThis.__b64) { - const ABC = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; - const LUT = (function(){ - const t = new Int16Array(128); for (let i=0;i> 2) - (b64.endsWith('==') ? 2 : b64.endsWith('=') ? 1 : 0); - const out = new Uint8Array(outLen); - let o = 0; - - for (let i = 0; i < len; i += 4) { - const c1 = LUT[b64.charCodeAt(i)]; - const c2 = LUT[b64.charCodeAt(i + 1)]; - const p3 = b64[i + 2] === '='; - const p4 = b64[i + 3] === '='; - const c3 = p3 ? 0 : LUT[b64.charCodeAt(i + 2)]; - const c4 = p4 ? 0 : LUT[b64.charCodeAt(i + 3)]; - - // (optional) basic validation - // if (c1 < 0 || c2 < 0 || (!p3 && c3 < 0) || (!p4 && c4 < 0)) throw new Error('bad base64'); - - const x = (c1 << 18) | (c2 << 12) | (c3 << 6) | c4; - - out[o++] = (x >>> 16) & 255; - if (!p3) out[o++] = (x >>> 8) & 255; - if (!p4) out[o++] = x & 255; - } - return out; - } - - - - function fromU8(u8){ - let out = ''; - let i = 0; - for (; i + 2 < u8.length; i += 3){ - const x = (u8[i]<<16) | (u8[i+1]<<8) | u8[i+2]; - out += ABC[(x>>>18)&63] + ABC[(x>>>12)&63] + ABC[(x>>>6)&63] + ABC[x&63]; - } - if (i < u8.length){ - let x = u8[i]<<16; - out += ABC[(x>>>18)&63] + ABC[(x>>>12)&63]; - if (i + 1 < u8.length){ - x |= u8[i+1]<<8; - out += ABC[(x>>>6)&63] + '='; - } else { - out += '=='; - } - } - return out; - } - - globalThis.__b64 = { toU8: toU8, fromU8: fromU8 }; - } - })(); - """#) - - // ---- Polyfills: TextEncoder/Decoder (UTF-8 correct) + atob/btoa ---- - ctx.evaluateScript(#""" - (function(){ - if (typeof TextEncoder === 'undefined') { - class TextEncoder { - encode(str){ - var out = [], i = 0, len = str.length; - while (i < len) { - var c = str.charCodeAt(i++); - // surrogate pair - if (c >= 0xD800 && c <= 0xDBFF && i < len) { - var c2 = str.charCodeAt(i++); - var cp = ((c - 0xD800) << 10) + (c2 - 0xDC00) + 0x10000; - out.push(0xF0 | (cp >> 18), - 0x80 | ((cp >> 12) & 63), - 0x80 | ((cp >> 6) & 63), - 0x80 | (cp & 63)); - } else if (c < 0x80) { - out.push(c); - } else if (c < 0x800) { - out.push(0xC0 | (c >> 6), - 0x80 | (c & 63)); - } else { - out.push(0xE0 | (c >> 12), - 0x80 | ((c >> 6) & 63), - 0x80 | (c & 63)); - } - } - return new Uint8Array(out); - } - } - globalThis.TextEncoder = TextEncoder; - } - if (typeof TextDecoder === 'undefined') { - class TextDecoder { - decode(u8){ - var out = "", i = 0, c = 0, c2 = 0, c3 = 0, c4 = 0; - while (i < u8.length) { - c = u8[i++]; - if (c < 0x80) { out += String.fromCharCode(c); } - else if (c < 0xE0) { c2 = u8[i++]; out += String.fromCharCode(((c & 31) << 6) | (c2 & 63)); } - else if (c < 0xF0) { c2 = u8[i++]; c3 = u8[i++]; out += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63)); } - else { c2 = u8[i++]; c3 = u8[i++]; c4 = u8[i++]; var cp = ((c & 7) << 18) | ((c2 & 63) << 12) | ((c3 & 63) << 6) | (c4 & 63); cp -= 0x10000; out += String.fromCharCode(0xD800 + (cp >> 10), 0xDC00 + (cp & 0x3FF)); } - } - return out; - } - } - globalThis.TextDecoder = TextDecoder; - } - - // Spec-correct atob/btoa for binary strings (use byte-accurate base64) - if (typeof atob !== 'function') { - globalThis.atob = function(b64){ - var u8 = __b64.toU8(String(b64)); - var s = ''; - for (var i=0;i -1 && host.indexOf(']') < idx) { hostname = host.slice(0, idx); port = host.slice(idx+1); } - this.hostname = hostname; - this.port = port; - this.host = hostname + (port ? (':' + port) : ''); - this.pathname = m[4] || ''; - this.search = m[5] ? ('?' + m[5]) : ''; - this.hash = m[6] ? ('#' + m[6]) : ''; - this.searchParams = new URLSearchParams(m[5] || ''); - this.username = ''; - this.password = ''; - this.origin = (this.protocol && this.host) ? (this.protocol + '//' + this.host) : ''; - this.href = this.toString(); - } - toString(){ - var auth = this.username || this.password ? (this.username + (this.password? ':'+this.password:'') + '@') : ''; - var base = (this.protocol || '') + '//' + auth + this.host; - var q = this.searchParams && typeof this.searchParams.toString === 'function' ? this.searchParams.toString() : (this.search && this.search[0]==='?' ? this.search.slice(1) : this.search); - var search = q ? ('?' + q) : ''; - return base + (this.pathname || '') + search + (this.hash || ''); - } - } - globalThis.URL = URL; - } - })(); - """#) - - // ADD: setTimeout/clearTimeout/setInterval/clearInterval shims - ctx.evaluateScript(#""" - (function(){ - if (typeof setTimeout === 'undefined') { - globalThis.setTimeout = function(handler, timeout){ - var fn = (typeof handler === 'function') ? handler : function(){ try { eval(String(handler)); } catch(e){ console.error(e); } }; - var ms = (timeout|0); - var args = Array.prototype.slice.call(arguments, 2); - try { return _setTimerNative(ms, fn, false, args); } catch (e) { console.error(e); return 0; } - }; - globalThis.clearTimeout = function(id){ - try { _clearTimerNative(id|0); } catch(_) {} - }; - globalThis.setInterval = function(handler, timeout){ - var fn = (typeof handler === 'function') ? handler : function(){ try { eval(String(handler)); } catch(e){ console.error(e); } }; - var ms = (timeout|0); - var args = Array.prototype.slice.call(arguments, 2); - try { return _setTimerNative(ms, fn, true, args); } catch (e) { console.error(e); return 0; } - }; - globalThis.clearInterval = function(id){ - try { _clearTimerNative(id|0); } catch(_) {} - }; - } - })(); - """#) - - // ---- Native fetch with abort support (URLSession) ---- - let nativeFetch2: @convention(block) (String, String, JSValue?, JSValue, JSValue) -> Void = { [weak self] opId, urlStr, initVal, resolve, reject in - guard let self = self else { return } - guard let url = URL(string: urlStr) else { - _ = reject.call(withArguments: ["Invalid URL"]) - self.logger.error("nativeFetch2 invalid URL: \(urlStr, privacy: .public) opId=\(opId, privacy: .public)") - return - } - var req = URLRequest(url: url) - req.httpMethod = (initVal?.forProperty("method").toString() ?? "GET") - req.timeoutInterval = 20 - - if let headersObj = initVal?.forProperty("headers"), let dict = headersObj.toDictionary() as? [String: Any] { - for (k,v) in dict { req.setValue(String(describing: v), forHTTPHeaderField: k) } - } - if let bodyVal = initVal?.forProperty("body"), !bodyVal.isUndefined { - if bodyVal.isString, let s = bodyVal.toString() { - if s.hasPrefix("base64:") { let b64 = String(s.dropFirst(7)); req.httpBody = Data(base64Encoded: b64) } - else { req.httpBody = s.data(using: .utf8) } - } - } - - self.logger.debug("nativeFetch2 → \(req.httpMethod ?? "GET", privacy: .public) \(url.absoluteString, privacy: .public) opId=\(opId, privacy: .public)") - - let task = session.dataTask(with: req) { data, resp, err in - // remove from registry - self.tasksLock.lock(); self.tasks.removeValue(forKey: opId); self.tasksLock.unlock() - - if let err = err { - _ = reject.call(withArguments: [String(describing: err)]) - self.logger.error("nativeFetch2 network error: \(String(describing: err), privacy: .public) opId=\(opId, privacy: .public)") - return - } - guard let http = resp as? HTTPURLResponse else { - _ = reject.call(withArguments: ["No response"]) - self.logger.error("nativeFetch2 missing HTTPURLResponse opId=\(opId, privacy: .public)") - return - } - let headers = http.allHeaderFields.reduce(into: [String:String]()) { acc, kv in - acc[String(describing: kv.key).lowercased()] = String(describing: kv.value) - } - self.logger.error("nativeFetch2 ← status=\(http.statusCode) bytes=\(data?.count ?? 0) opId=\(opId, privacy: .public)") - - // For JSON/text responses, pass body directly as string - var result: [String: Any] = [ - "status": http.statusCode, - "statusText": HTTPURLResponse.localizedString(forStatusCode: http.statusCode), - "ok": (200...299).contains(http.statusCode), - "url": http.url?.absoluteString ?? urlStr, - "headers": headers - ] - - // Always set a body field to prevent null body issues - if let data = data, !data.isEmpty { - self.logger.error("nativeFetch2 processing response data: \(data.count) bytes") - - // // Always override content-type to application/json and return as string - // result["headers"] = headers.merging(["content-type": "application/json"]) { $1 } - - if let bodyString = String(data: data, encoding: .utf8) { - self.logger.error("nativeFetch2 UTF-8 decode success: \(bodyString.count) chars, preview: \(String(bodyString.prefix(100)))") - result["body"] = bodyString - } else { - // UTF-8 decode failed - try base64 encoding as fallback - let base64String = data.base64EncodedString() - self.logger.error("nativeFetch2 UTF-8 decode failed, using base64: \(base64String.count) chars") - result["body"] = "" // Set empty string instead of missing key - result["bodyBase64"] = base64String - } - } else { - // No data or empty data - always set empty body - self.logger.error("nativeFetch2 no data, setting empty body") - result["body"] = "" - } - _ = resolve.call(withArguments: [result]) - } - - // register task and start - self.tasksLock.lock(); self.tasks[opId] = task; self.tasksLock.unlock() - task.resume() - } - ctx.setObject(nativeFetch2, forKeyedSubscript: "_nativeFetch2" as NSString) - - let nativeFetchCancel: @convention(block) (String) -> Void = { [weak self] opId in - guard let self = self else { return } - self.tasksLock.lock(); let task = self.tasks.removeValue(forKey: opId); self.tasksLock.unlock() - if let task = task { - self.logger.debug("nativeFetch cancel opId=\(opId, privacy: .public)") - task.cancel() - } - } - ctx.setObject(nativeFetchCancel, forKeyedSubscript: "_nativeFetchCancel" as NSString) - - // ---- Fetch polyfills: AbortController, Headers, Blob, Request, Response, fetch (uses __b64 + UTF-8 TD/TE) ---- - ctx.evaluateScript(#""" - (function(){ - function isArrayBuffer(v){ return (v && v.byteLength !== undefined && v.constructor && v.constructor.name === 'ArrayBuffer'); } - function isView(v){ return v && v.buffer && v.byteLength !== undefined && v.constructor && /Array$/.test(v.constructor.name); } - - // AbortController / AbortSignal (minimal) - if (typeof AbortSignal === 'undefined') { - class AbortSignal { - constructor(){ this.aborted = false; this._listeners = []; } - addEventListener(type, fn){ if (type === 'abort' && typeof fn === 'function') this._listeners.push(fn); } - removeEventListener(type, fn){ if (type !== 'abort') return; var i=this._listeners.indexOf(fn); if (i>=0) this._listeners.splice(i,1); } - _dispatch(){ if (this.aborted) return; this.aborted = true; var ev = { type: 'abort' }; var list = this._listeners.slice(); for (var i=0;ithis.set(k,v)); - else if (Array.isArray(init)) { for (var i=0;ilist.forEach(v=>cb.call(thisArg, v, k, this))); } - keys(){ return this._map.keys(); } - values(){ const self=this; return (function*(){ for (const [k,vs] of self._map) for (const v of vs) yield v; })(); } - entries(){ const self=this; return (function*(){ for (const [k,vs] of self._map) for (const v of vs) yield [k,v]; })(); } - [Symbol.iterator](){ return this.entries(); } - toObject(){ const obj={}; this._map.forEach((list,k)=>{ obj[k]=list.join(', '); }); return obj; } - } - globalThis.Headers = Headers; - } - - // Blob (minimal) - if (typeof Blob === 'undefined') { - class Blob { - constructor(parts, options){ - parts = parts || []; - this.type = options && options.type ? String(options.type) : ''; - var chunks = []; - for (var i=0;in+a.length,0); var u8 = new Uint8Array(len); var off=0; - for (var j=0;j Void = { [weak self, weak ctx] id, ms, repeats in - guard let self = self, let ctx = ctx else { return } - let msInt = max(0, Int(ms.rounded())) - let q = DispatchQueue.main - let timer = DispatchSource.makeTimerSource(queue: q) - // schedule; we cancel after first fire for one-shot - let interval = DispatchTimeInterval.milliseconds(max(1, msInt)) - timer.schedule(deadline: .now() + interval, repeating: repeats ? interval : interval) - timer.setEventHandler { [weak self, weak ctx] in - guard let self = self, let ctx = ctx else { return } - if let fire = ctx.objectForKeyedSubscript("_timerFire") { - _ = fire.call(withArguments: [id]) - } - if !repeats { - self.timersLock.lock() - let t = self.timers.removeValue(forKey: id) - self.timersLock.unlock() - t?.cancel() - } - } - self.timersLock.lock(); self.timers[id] = timer; self.timersLock.unlock() - timer.resume() - } - ctx.setObject(nativeTimerStart, forKeyedSubscript: "_nativeTimerStart" as NSString) - - let nativeTimerClear: @convention(block) (String) -> Void = { [weak self] id in - guard let self = self else { return } - self.timersLock.lock() - let t = self.timers.removeValue(forKey: id) - self.timersLock.unlock() - t?.cancel() - } - ctx.setObject(nativeTimerClear, forKeyedSubscript: "_nativeTimerClear" as NSString) - - ctx.evaluateScript(#""" - (function(){ - if (typeof globalThis.setTimeout !== 'function') { - var __timerSeq = 1; - var __timerFn = Object.create(null); - var __timerArgs = Object.create(null); - var __timerKind = Object.create(null); // 't' one-shot, 'i' interval - - globalThis._timerFire = function(id){ - var fn = __timerFn[id]; - if (!fn) return; - try { fn.apply(null, __timerArgs[id] || []); } - catch (e) { try { console.error(e && e.stack || String(e)); } catch(_){} } - if (__timerKind[id] === 't') { - delete __timerFn[id]; delete __timerArgs[id]; delete __timerKind[id]; - } - }; - - function _startTimer(fn, ms, argsArray, repeat){ - var id = String(__timerSeq++); - __timerFn[id] = (typeof fn === 'function') ? fn : function(){}; - __timerArgs[id] = Array.prototype.slice.call(argsArray || []); - __timerKind[id] = repeat ? 'i' : 't'; - try { _nativeTimerStart(id, Number(ms)||0, !!repeat); } catch(_){} - return id; - } - - globalThis.setTimeout = function(fn, ms){ - return +_startTimer(fn, ms, Array.prototype.slice.call(arguments, 2), false); - }; - globalThis.clearTimeout = function(id){ - id = String(id); - try { _nativeTimerClear(id); } catch(_){} - delete __timerFn[id]; delete __timerArgs[id]; delete __timerKind[id]; - }; - globalThis.setInterval = function(fn, ms){ - return +_startTimer(fn, ms, Array.prototype.slice.call(arguments, 2), true); - }; - globalThis.clearInterval = function(id){ - id = String(id); - try { _nativeTimerClear(id); } catch(_){} - delete __timerFn[id]; delete __timerArgs[id]; delete __timerKind[id]; - }; - } - })(); - """#) - - // ---- crypto.getRandomValues (secure) ---- - let nativeRandomBytes: @convention(block) (Int) -> String = { [weak self] len in - let n = max(0, len) - var bytes = [UInt8](repeating: 0, count: n) - let rc = SecRandomCopyBytes(kSecRandomDefault, n, &bytes) - if rc != errSecSuccess { self?.logger.error("SecRandomCopyBytes failed rc=\(rc)"); return "" } - return Data(bytes).base64EncodedString() - } - ctx.setObject(nativeRandomBytes, forKeyedSubscript: "_nativeRandomBytes" as NSString) - ctx.evaluateScript(#""" - (function(){ - if (!window.crypto) window.crypto = {}; - if (typeof window.crypto.getRandomValues !== 'function') { - window.crypto.getRandomValues = function(typedArray){ - var b64 = _nativeRandomBytes(typedArray.length|0); - var bin = atob(b64); - for (var i=0;i Void = { [weak self] algo, base64In, resolve, reject in - guard algo.uppercased() == "SHA-256" else { _ = reject.call(withArguments: ["Unsupported algorithm: \(algo)"]); self?.logger.error("nativeDigest unsupported algo=\(algo, privacy: .public)"); return } - let b64 = base64In.hasPrefix("base64:") ? String(base64In.dropFirst(7)) : base64In - guard let data = Data(base64Encoded: b64) else { _ = reject.call(withArguments: ["Bad data"]); self?.logger.error("nativeDigest bad base64 input"); return } - let digest = SHA256.hash(data: data) - let out = Data(digest).base64EncodedString() - _ = resolve.call(withArguments: ["base64:" + out]) - } - ctx.setObject(nativeDigest, forKeyedSubscript: "_nativeDigest" as NSString) - ctx.evaluateScript(#""" - (function(){ - function ab2b64(buf){ - var u8 = (buf instanceof ArrayBuffer) ? new Uint8Array(buf) - : (ArrayBuffer.isView(buf) ? new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength) - : new TextEncoder().encode(String(buf))); - return __b64.fromU8(u8); - } - if (!crypto.subtle) crypto.subtle = {}; - if (typeof crypto.subtle.digest !== 'function') { - crypto.subtle.digest = function(name, data){ - var b64 = 'base64:' + ab2b64(data); - return new Promise(function(resolve, reject){ - _nativeDigest(String(name), b64, resolve, reject); - }).then(function(outB64){ - var u8 = __b64.toU8(outB64.slice(7)); - return u8.buffer; - }); - }; - } - })(); - """#) - - // --- Pull snap: prefer App Group defaults, fallback to APNS payload --- - var snapJSON: String = "" - if let defaults = UserDefaults(suiteName: APP_GROUP_ID), let s = defaults.string(forKey: SNAP_KEY), !s.isEmpty { - snapJSON = s - logger.debug("Loaded snap from app group (len=\(s.count)) id=\(reqID, privacy: .public)") - } else if let s = request.content.userInfo["snap"] as? String { - snapJSON = s - logger.debug("Loaded snap from APNS payload (len=\(s.count)) id=\(reqID, privacy: .public)") - } else { - logger.notice("No snap found in app group or APNS payload. id=\(reqID, privacy: .public)") - } - - // ---- Load bundled JS (wallet-bundle.js) ---- - guard let jsURL = Bundle.main.url(forResource: "wallet-bundle", withExtension: "js"), - let script = try? String(contentsOf: jsURL) else { - logger.error("Failed to load wallet-bundle.js id=\(reqID, privacy: .public)") - complete(.loadJSBundleFailed) - return - } - ctx.evaluateScript(script) - if let exc = ctx.exception { - logger.error("Exception after evaluating bundle: \(String(describing: exc), privacy: .public)") - complete(.jsException) - return - } - - // ---- Invoke window.run(snap, messageId) ---- - let userInfo = request.content.userInfo - let messageId: String = - (userInfo["messageId"] as? String) ?? - ((userInfo["data"] as? [String: Any])?["messageId"] as? String) ?? - (userInfo["gcm.message_id"] as? String) ?? - (userInfo["fcmMessageId"] as? String) ?? - UUID().uuidString - - logger.debug("Invoking run(snap, messageId=\(messageId, privacy: .public)) id=\(reqID, privacy: .public)") - - guard let run = ctx.objectForKeyedSubscript("run"), !run.isUndefined else { - logger.error("window.run is missing/undefined id=\(reqID, privacy: .public)") - complete(.jsMissingRun) - return - } - - guard let promise = run.call(withArguments: [snapJSON, messageId]) else { - logger.error("run(...) did not return a value id=\(reqID, privacy: .public)") - complete(.jsNoPromise) - return - } - - guard promise.hasProperty("then") else { - logger.error("run(...) did not return a thenable id=\(reqID, privacy: .public)") - complete(.jsNoPromise) - return - } - - let fulfill: @convention(block) (JSValue?) -> Void = { [weak self] result in - guard let self = self else { return } - self.logger.info("JS fulfilled id=\(reqID, privacy: .public) elapsedMs=\(msElapsed())") - if let dict = result?.toDictionary() as? [String: Any] { - if let t = dict["title"] as? String { best.title = t } - if let b = dict["body"] as? String { best.body = b } - if let data = dict["data"] as? [String: Any] { - best.userInfo = best.userInfo.merging(["data": data]) { $1 } - } - } - self.bestAttemptContent = best - complete(.success) - } - - let reject: @convention(block) (JSValue?) -> Void = { [weak self] error in - guard let self = self else { return } - self.logger.error("JS rejected id=\(reqID, privacy: .public): \(String(describing: error), privacy: .public)") - complete(.jsRejected) - } - - let thenArg = JSValue(object: fulfill, in: ctx)! - let catchArg = JSValue(object: reject, in: ctx)! - - // Chain .then() and .catch() for robust error handling - let thenPromise = promise.invokeMethod("then", withArguments: [thenArg]) - _ = thenPromise?.invokeMethod("catch", withArguments: [catchArg]) - - // ---- Safety timeout (10s) ---- - DispatchQueue.global().asyncAfter(deadline: .now() + 10) { [weak self] in - guard let self = self else { return } - if !completed { - self.logger.error("Safety timeout fired id=\(reqID, privacy: .public) elapsedMs=\(msElapsed())") - complete(.timeout) - } - } - } - - override func serviceExtensionTimeWillExpire() { - // Cancel any in-flight fetches to free resources quickly - tasksLock.lock(); let opIds = Array(tasks.keys); let inFlight = tasks.values; tasks.removeAll(); tasksLock.unlock() - for t in inFlight { t.cancel() } - if !opIds.isEmpty { logger.debug("serviceExtensionTimeWillExpire: cancelled \(opIds.count) fetch task(s)") } - - // Cancel active timers - timersLock.lock(); let activeTimers = timers.values; timers.removeAll(); timersLock.unlock() - for t in activeTimers { t.cancel() } - - let hasBest = bestAttemptContent != nil - logger.error("serviceExtensionTimeWillExpire fired; returning bestAttemptContent? \(hasBest, privacy: .public)") - if let contentHandler = contentHandler, let best = bestAttemptContent { - contentHandler(best) - } - } -} diff --git a/ios/MetanetNotificationService/wallet-bundle.js b/ios/MetanetNotificationService/wallet-bundle.js deleted file mode 100644 index b54b523..0000000 --- a/ios/MetanetNotificationService/wallet-bundle.js +++ /dev/null @@ -1,20 +0,0 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.WalletBundle=t():e.WalletBundle=t()}(self,()=>(()=>{"use strict";let e,t,i;var r,s,n,a,o,c={};c.d=(e,t)=>{for(var i in t)c.o(t,i)&&!c.o(e,i)&&Object.defineProperty(e,i,{enumerable:!0,get:t[i]})},c.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),c.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})};var h={};c.r(h),c.d(h,{Decoder:()=>r_,Encoder:()=>rv,PacketType:()=>o,protocol:()=>rk});class l{static zeros=["","0","00","000","0000","00000","000000","0000000","00000000","000000000","0000000000","00000000000","000000000000","0000000000000","00000000000000","000000000000000","0000000000000000","00000000000000000","000000000000000000","0000000000000000000","00000000000000000000","000000000000000000000","0000000000000000000000","00000000000000000000000","000000000000000000000000","0000000000000000000000000"];static groupSizes=[0,0,25,16,12,11,10,9,8,8,7,7,7,7,6,6,6,6,6,6,6,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5];static groupBases=[0,0,0x2000000,0x290d741,0x1000000,0x2e90edd,0x39aa400,0x267bf47,0x1000000,0x290d741,1e7,0x12959c3,0x222c000,0x3bd7765,7529536,0xadcea1,0x1000000,0x1704f61,0x206fc40,0x2cddcf9,64e6,4084101,5153632,6436343,7962624,9765625,0xb54ba0,0xdaf26b,0x1069c00,0x138f9ad,243e5,0x1b4d89f,0x2000000,0x25528a1,0x2b54a20,0x3216b93,0x39aa400];static wordSize=26;static WORD_SIZE_BIGINT=BigInt(l.wordSize);static WORD_MASK=(1n<0n;)e.push(Number(t&l.WORD_MASK)),t>>=l.WORD_SIZE_BIGINT;return e.length>0?e:[0]}get words(){let e=this._computedWordsArray;if(this._nominalWordLength<=e.length)return e;let t=Array(this._nominalWordLength).fill(0);for(let i=0;i0?e.length:1;for(let t=r-1;t>=0;t--){let r=void 0===e[t]?0:e[t];i=i<0?e:t}static min(e,t){return 0>e.cmp(t)?e:t}constructor(e=0,t=10,i="be"){if(this._magnitude=0n,this._sign=0,this._nominalWordLength=1,this.red=null,void 0===e&&(e=0),null===e)return void this._initializeState(0n,0);if("bigint"==typeof e){this._initializeState(e<0n?-e:e,+(e<0n)),this.normSign();return}let r=t,s=i;if(("le"===t||"be"===t)&&(s=t,r=10),"number"==typeof e)return void this.initNumber(e,s);if(Array.isArray(e))return void this.initArray(e,s);if("string"==typeof e){"hex"===r&&(r=16),this.assert("number"==typeof r&&r===(0|r)&&r>=2&&r<=36,"Base must be an integer between 2 and 36");let t=e.toString().replace(/\s+/g,""),i=0,n=0;t.startsWith("-")?(i++,n=1):t.startsWith("+")&&i++;let a=t.substring(i);if(0===a.length){this._initializeState(0n,1===n&&t.startsWith("-")?1:0),this.normSign();return}if(16===r){let e;if("le"===s){let e=[],t=a;t.length%2!=0&&(t="0"+t);for(let i=0;i36)throw Error("Base must be between 2 and 36");let i="",r=e>0n?e:-e,s=BigInt(t);for(;r>0n;)i="0123456789abcdefghijklmnopqrstuvwxyz"[Number(r%s)]+i,r/=s;return i}_parseBaseString(e,t){if(0===e.length){this._magnitude=0n,this._finishInitialization();return}this._magnitude=0n;let i=BigInt(t),r=l.groupSizes[t],s=BigInt(l.groupBases[t]);(0===r||0n===s)&&(0===(r=Math.floor(Math.log(0x3ffffff)/Math.log(t)))&&(r=1),s=i**BigInt(r));let n=0,a=e.length,o=a%r;if(0===o&&a>0&&(o=r),o>0){let i=e.substring(n,n+o);this._magnitude=BigInt(this._parseBaseWord(i,t)),n+=o}for(;n=48&&n<=57)s=n-48;else if(n>=65&&n<=90)s=n-65+10;else if(n>=97&&n<=122)s=n-97+10;else throw Error("Invalid character: "+e[r]);if(s>=t)throw Error("Invalid character");i=i*t+s}return i}_initializeState(e,t){this._magnitude=e,this._sign=0n===e?0:t,this._finishInitialization()}_finishInitialization(){if(0n===this._magnitude)this._nominalWordLength=1,this._sign=0;else{let e=this._magnitude.toString(2).length;this._nominalWordLength=Math.max(1,Math.ceil(e/l.wordSize))}}assert(e,t="Assertion failed"){if(!e)throw Error(t)}initNumber(e,t="be"){if(this.assert(BigInt(Math.abs(e))<=l.MAX_NUMBER_CONSTRUCTOR_MAG_BIGINT,"The number is larger than 2 ^ 53 (unsafe)"),this.assert(e%1==0,"Number must be an integer for BigNumber conversion"),this._initializeState(BigInt(Math.abs(e)),+(e<0)),"le"===t){let e=this._sign,t=this.toArray("be");this.initArray(t,"le"),this._sign=e,this.normSign()}return this}initArray(e,t){if(0===e.length)return this._initializeState(0n,0),this;let i=0n;if("be"===t)for(let t=0;t=0;t--)i=i<<8n|BigInt(255&e[t]);return this._initializeState(i,0),this}copy(e){e._magnitude=this._magnitude,e._sign=this._sign,e._nominalWordLength=this._nominalWordLength,e.red=this.red}static move(e,t){e._magnitude=t._magnitude,e._sign=t._sign,e._nominalWordLength=t._nominalWordLength,e.red=t.red}clone(){let e=new l(0n);return this.copy(e),e}expand(e){return this.assert(e>=0,"Expand size must be non-negative"),this._nominalWordLength=Math.max(this._nominalWordLength,e,1),this}strip(){return this._finishInitialization(),this.normSign()}normSign(){return 0n===this._magnitude&&(this._sign=0),this}inspect(){return(null!==this.red?""}_getMinimalHex(){return 0n===this._magnitude?"0":this._magnitude.toString(16)}toString(e=10,t=1){if(16===e||"hex"===e){let e=this._getMinimalHex();if(t>1)for("0"!==e&&e.length%2!=0&&(e="0"+e);e.length%t!=0;)e="0"+e;return(this.isNeg()?"-":"")+e}if("number"!=typeof e||e<2||e>36||e%1!=0)throw Error("Base should be an integer between 2 and 36");return this.toBaseString(e,t)}toBaseString(e,t){if(0n===this._magnitude){let e="0";if(t>1)for(;e.length0n;){let t=n%r;n/=r;let a=this._bigIntToStringInBase(t,e);if(n>0n){let e=i-a.length;s=e>0&&e0?"0".repeat(e)+a+s:a+s}else s=a+s}if(t>0)for(;s.lengthl.MAX_SAFE_INTEGER_BIGINT||e=0&&r=0&&r>=8n,r+=s}}toArray(e="be",t){this.strip();let i=this.byteLength(),r=t??Math.max(1,i);this.assert(i<=r,"byte array longer than desired length"),this.assert(r>0,"Requested array length <= 0");let s=Array(r).fill(0);return 0n===this._magnitude&&r>0?s:0n===this._magnitude&&0===r?[]:(this.toArrayLikeGeneric(s,"le"===e),s)}bitLength(){return 0n===this._magnitude?0:this._magnitude.toString(2).length}static toBitArray(e){let t=e.bitLength();if(0===t)return[];let i=Array(t),r=e._magnitude;for(let e=0;e>BigInt(e)&1n)!==0n);return i}toBitArray(){return l.toBitArray(this)}zeroBits(){if(0n===this._magnitude)return 0;let e=0,t=this._magnitude;for(;(1n&t)===0n&&0n!==t;)e++,t>>=1n;return e}byteLength(){return 0n===this._magnitude?0:Math.ceil(this.bitLength()/8)}_getSignedValue(){return 1===this._sign?-this._magnitude:this._magnitude}_setValueFromSigned(e){e<0n?(this._magnitude=-e,this._sign=1):(this._magnitude=e,this._sign=0),this._finishInitialization(),this.normSign()}toTwos(e){this.assert(e>=0);let t=BigInt(e),i=this._getSignedValue();1===this._sign&&0n!==this._magnitude&&(i=(1n<=0);let t=BigInt(e),i=this._magnitude;if(e>0&&(i>>t-1n&1n)!==0n&&0===this._sign){let e=new l(0n);return e._setValueFromSigned(i-(1n<e^t),s=this._nominalWordLength;return r&&(s=Math.max(this.length,e.length)),this._magnitude=i,this._finishInitialization(),r&&(this._nominalWordLength=Math.max(this._nominalWordLength,s)),this.strip()}iuor(e){return this._iuop(e,(e,t)=>e|t)}iuand(e){return this._iuop(e,(e,t)=>e&t)}iuxor(e){return this._iuop(e,(e,t)=>e^t)}_iop(e,t){return this.assert(0===this._sign&&0===e._sign),this._iuop(e,t)}ior(e){return this._iop(e,(e,t)=>e|t)}iand(e){return this._iop(e,(e,t)=>e&t)}ixor(e){return this._iop(e,(e,t)=>e^t)}_uop_new(e,t){return this.length>=e.length?this.clone()[t](e):e.clone()[t](this)}or(e){return this.assert(0===this._sign&&0===e._sign),this._uop_new(e,"iuor")}uor(e){return this._uop_new(e,"iuor")}and(e){return this.assert(0===this._sign&&0===e._sign),this._uop_new(e,"iuand")}uand(e){return this._uop_new(e,"iuand")}xor(e){return this.assert(0===this._sign&&0===e._sign),this._uop_new(e,"iuxor")}uxor(e){return this._uop_new(e,"iuxor")}inotn(e){this.assert("number"==typeof e&&e>=0);let t=BigInt(e);this._magnitude=~this._magnitude&(1n<=0);let i=BigInt(e);1===t||!0===t?this._magnitude|=1n<=0),0===e)?this:(this._magnitude<<=BigInt(e),this._finishInitialization(),this.strip())}ishln(e){return this.assert(0===this._sign,"ishln requires positive number"),this.iushln(e)}iushrn(e,t,i){if(this.assert("number"==typeof e&&e>=0),0===e)return null!=i&&i._initializeState(0n,0),this;if(null!=i){let t=(1n<>=BigInt(e),this._finishInitialization(),this.strip()}ishrn(e,t,i){return this.assert(0===this._sign,"ishrn requires positive number"),this.iushrn(e,t,i)}shln(e){return this.clone().ishln(e)}ushln(e){return this.clone().iushln(e)}shrn(e){return this.clone().ishrn(e)}ushrn(e){return this.clone().iushrn(e)}testn(e){return this.assert("number"==typeof e&&e>=0),(this._magnitude>>BigInt(e)&1n)!==0n}imaskn(e){this.assert("number"==typeof e&&e>=0),this.assert(0===this._sign,"imaskn works only with positive numbers");let t=BigInt(e);this._magnitude&=0n===t?0n:(1n<=(i<0n?-i:i)&&(t>0n&&i>0n||t<0n&&i<0n?r+=1n:r-=1n);let n=new l(0n);return n._setValueFromSigned(r),n}modrn(e){this.assert(0!==e,"Division by zero in modrn");let t=BigInt(Math.abs(e));if(0n===t)throw Error("Division by zero in modrn");let i=this._magnitude%t;return e<0?Number(-i):Number(i)}idivn(e){return this.assert(0!==e),this.assert(Math.abs(e)<=l.MAX_IMULN_ARG,"num is too large"),this._setValueFromSigned(this._getSignedValue()/BigInt(e)),this}divn(e){return this.clone().idivn(e)}egcd(e){this.assert(0===e._sign,"p must not be negative"),this.assert(!e.isZero(),"p must not be zero");let t=this._getSignedValue(),i=e._magnitude,r=1n,s=0n,n=0n,a=1n;for(;0n!==i;){let e=t/i,o=i;i=t%i,t=o,o=s,s=r-e*s,r=o,o=a,a=n-e*a,n=o}let o=new l(0n);o._setValueFromSigned(r);let c=new l(0n);c._setValueFromSigned(n);let h=new l(0n);return h._initializeState(t<0n?-t:t,0),{a:o,b:c,gcd:h}}gcd(e){let t=this._magnitude,i=e._magnitude;if(0n===t){let e=new l(0n);return e._setValueFromSigned(i),e.iabs()}if(0n===i){let e=new l(0n);return e._setValueFromSigned(t),e.iabs()}for(;0n!==i;){let e=t%i;t=i,i=e}let r=new l(0n);return r._initializeState(t,0),r}invm(e){this.assert(!e.isZero()&&0===e._sign,"Modulus for invm must be positive and non-zero");let t=this.egcd(e);if(!t.gcd.eqn(1))throw Error("Inverse does not exist (numbers are not coprime).");return t.a.umod(e)}isEven(){return this._magnitude%2n===0n}isOdd(){return this._magnitude%2n===1n}andln(e){return this.assert(e>=0),Number(this._magnitude&BigInt(e))}bincn(e){this.assert("number"==typeof e&&e>=0);let t=1n<i)}cmp(e){let t=this._getSignedValue(),i=e._getSignedValue();return ti)}ucmp(e){return this._magnitudee._magnitude)}gtn(e){return 1===this.cmpn(e)}gt(e){return 1===this.cmp(e)}gten(e){return this.cmpn(e)>=0}gte(e){return this.cmp(e)>=0}ltn(e){return -1===this.cmpn(e)}lt(e){return -1===this.cmp(e)}lten(e){return 0>=this.cmpn(e)}lte(e){return 0>=this.cmp(e)}eqn(e){return 0===this.cmpn(e)}eq(e){return 0===this.cmp(e)}toRed(e){return this.assert(null==this.red,"Already a number in reduction context"),this.assert(0===this._sign,"toRed works only with positives"),e.convertTo(this).forceRed(e)}fromRed(){return this.assert(this.red,"fromRed works only with numbers in reduction context"),this.red.convertFrom(this)}forceRed(e){return this.red=e,this}redAdd(e){return this.assert(this.red,"redAdd works only with red numbers"),this.red.add(this,e)}redIAdd(e){return this.assert(this.red,"redIAdd works only with red numbers"),this.red.iadd(this,e)}redSub(e){return this.assert(this.red,"redSub works only with red numbers"),this.red.sub(this,e)}redISub(e){return this.assert(this.red,"redISub works only with red numbers"),this.red.isub(this,e)}redShl(e){return this.assert(this.red,"redShl works only with red numbers"),this.red.shl(this,e)}redMul(e){return this.assert(this.red,"redMul works only with red numbers"),this.red.verify2(this,e),this.red.mul(this,e)}redIMul(e){return this.assert(this.red,"redIMul works only with red numbers"),this.red.verify2(this,e),this.red.imul(this,e)}redSqr(){return this.assert(this.red,"redSqr works only with red numbers"),this.red.verify1(this),this.red.sqr(this)}redISqr(){return this.assert(this.red,"redISqr works only with red numbers"),this.red.verify1(this),this.red.isqr(this)}redSqrt(){return this.assert(this.red,"redSqrt works only with red numbers"),this.red.verify1(this),this.red.sqrt(this)}redInvm(){return this.assert(this.red,"redInvm works only with red numbers"),this.red.verify1(this),this.red.invm(this)}redNeg(){return this.assert(this.red,"redNeg works only with red numbers"),this.red.verify1(this),this.red.neg(this)}redPow(e){return this.assert(null!=this.red&&null==e.red,"redPow(normalNum)"),this.red.verify1(this),this.red.pow(this,e)}static fromHex(e,t){let i="be";return("little"===t||"le"===t)&&(i="le"),new l(e,16,i)}toHex(e=0){if(this.isZero()&&0===e)return"";let t=this._getMinimalHex();"0"!==t&&t.length%2!=0&&(t="0"+t);let i=2*e;for(;t.length=0;i--){let t=e[i];r+=(t<16?"0":"")+t.toString(16)}}else{let t=e[0];(128&t)!=0&&(i=1,t&=127),r+=(t<16?"0":"")+t.toString(16);for(let t=1;t>>24,r=8388607&e,s=(8388608&e)!=0;if(t&&s)throw Error("negative bit set");if(0===i&&0===r){if(s&&t)throw Error("negative bit set for zero value");return new l(0n)}let n=new l(r);return i<=3?n.iushrn((3-i)*8):n.iushln((i-3)*8),s&&n.ineg(),n}toBits(){let e;if(this.strip(),this.isZero()&&!this.isNeg())return 0;let t=this.isNeg(),i=this.abs(),r=i.toArray("be"),s=0;for(;s>>=8,n++);let a=n<<24|e;return t&&(a|=8388608),a>>>0}static fromScriptNum(e,t=!1,i){if(void 0!==i&&e.length>i)throw Error("script number overflow");if(0===e.length)return new l(0n);if(t&&(127&e[e.length-1])==0&&(e.length<=1||(128&e[e.length-2])==0))throw Error("non-minimally encoded script number");return l.fromSm(e,"little")}toScriptNum(){return this.toSm("little")}_invmp(e){let t;this.assert(0===e._sign,"p must not be negative for _invmp"),this.assert(!e.isZero(),"p must not be zero for _invmp");let i=this.umod(e)._magnitude,r=e._magnitude,s=1n,n=0n,a=e._magnitude;for(;i>1n&&r>1n;){let e=0;for(;(i>>BigInt(e)&1n)===0n;)e++;if(e>0){i>>=BigInt(e);for(let t=0;t>=1n}let t=0;for(;(r>>BigInt(t)&1n)===0n;)t++;if(t>0){r>>=BigInt(t);for(let e=0;e>=1n}i>=r?(i-=r,s-=n):(r-=i,n-=s)}if(1n===i)t=s;else if(1n===r)t=n;else if(0n===i&&1n===r)t=n;else if(0n===r&&1n===i)t=s;else throw Error("_invmp: GCD is not 1, inverse does not exist. aVal="+i+", bVal="+r);(t%=a)<0n&&(t+=a);let o=new l(0n);return o._initializeState(t,0),o}mulTo(e,t){return t._magnitude=this._magnitude*e._magnitude,t._sign=0n===t._magnitude?0:this._sign^e._sign,t._nominalWordLength=this.length+e.length,t.red=null,t.normSign(),t}}class d{name;p;k;n;tmp;constructor(e,t){this.name=e,this.p=new l(t,16),this.n=this.p.bitLength(),this.k=new l(BigInt(1)).iushln(this.n).isub(this.p),this.tmp=this._tmp()}_tmp(){let e=new l(BigInt(0)),t=Math.ceil(this.n/l.wordSize);return e.expand(Math.max(1,t)),e}ireduce(e){let t;do this.split(e,this.tmp),this.imulK(e),e.iadd(this.tmp),t=e.bitLength();while(t>this.n);let i=t0&&e.isub(this.p),e.strip(),e}split(e,t){e.iushrn(this.n,0,t)}imulK(e){return e.imul(this.k)}}class f extends d{constructor(){super("k256","ffffffff ffffffff ffffffff ffffffff ffffffff ffffffff fffffffe fffffc2f")}split(e,t){let i=e.words,r=e.length,s=Math.min(r,9),n=Array(s+ +(r>9)).fill(0);for(let e=0;e>>22),o=t}o>>>=22,l=0&&i.isub(this.m),i.forceRed(this)}iadd(e,t){this.verify2(e,t);let i=e.iadd(t);return i.cmp(this.m)>=0&&i.isub(this.m),i}sub(e,t){this.verify2(e,t);let i=e.sub(t);return 0>i.cmpn(0)&&i.iadd(this.m),i.forceRed(this)}isub(e,t){this.verify2(e,t);let i=e.isub(t);return 0>i.cmpn(0)&&i.iadd(this.m),i}shl(e,t){return this.verify1(e),this.imod(e.ushln(t))}imul(e,t){return this.verify2(e,t),this.imod(e.imul(t))}mul(e,t){return this.verify2(e,t),this.imod(e.mul(t))}isqr(e){return this.imul(e,e.clone())}sqr(e){return this.mul(e,e)}sqrt(e){if(e.isZero())return e.clone();let t=this.m.andln(3);if(this.assert(t%2==1),3===t){let t=this.m.add(new l(1)).iushrn(2);return this.pow(e,t)}let i=this.m.subn(1),r=0;for(;!i.isZero()&&0===i.andln(1);)r++,i.iushrn(1);this.assert(!i.isZero());let s=new l(1).toRed(this),n=s.redNeg(),a=this.m.subn(1).iushrn(1),o=this.m.bitLength(),c=new l(2*o*o).toRed(this);for(;0!==this.pow(c,a).cmp(n);)c.redIAdd(n);let h=this.pow(c,i),d=this.pow(e,i.addn(1).iushrn(1)),f=this.pow(e,i),u=r;for(;0!==f.cmp(s);){let e=f,t=0;for(;0!==e.cmp(s);t++)e=e.redSqr();this.assert(t=0;r--){let e=t.words[r];for(let t=o-1;t>=0;t--){let o=e>>t&1;if(s!==i[0]&&(s=this.sqr(s)),0===o&&0===n){a=0;continue}n<<=1,n|=o,(4==++a||0===r&&0===t)&&(s=this.mul(s,i[n]),a=0,n=0)}o=26}return s}convertTo(e){let t=e.umod(this.m);return t===e?t.clone():t}convertFrom(e){let t=e.clone();return t.red=null,t}}class p extends u{shift;r;r2;rinv;minv;constructor(e){super(e),this.shift=this.m.bitLength(),this.shift%26!=0&&(this.shift+=26-this.shift%26),this.r=new l(1).iushln(this.shift),this.r2=this.imod(this.r.sqr()),this.rinv=this.r._invmp(this.m),this.minv=this.rinv.mul(this.r).isubn(1).div(this.m),this.minv=this.minv.umod(this.r),this.minv=this.r.sub(this.minv)}convertTo(e){return this.imod(e.ushln(this.shift))}convertFrom(e){let t=this.imod(e.mul(this.rinv));return t.red=null,t}imul(e,t){if(e.isZero()||t.isZero())return e.words[0]=0,e.length=1,e;let i=e.imul(t),r=i.maskn(this.shift).mul(this.minv).imaskn(this.shift).mul(this.m),s=i.isub(r).iushrn(this.shift),n=s;return s.cmp(this.m)>=0?n=s.isub(this.m):0>s.cmpn(0)&&(n=s.iadd(this.m)),n.forceRed(this)}mul(e,t){if(e.isZero()||t.isZero())return new l(0).forceRed(this);let i=e.mul(t),r=i.maskn(this.shift).mul(this.minv).imaskn(this.shift).mul(this.m),s=i.isub(r).iushrn(this.shift),n=s;return s.cmp(this.m)>=0?n=s.isub(this.m):0>s.cmpn(0)&&(n=s.iadd(this.m)),n.forceRed(this)}invm(e){return this.imod(e._invmp(this.m).mul(this.r2)).forceRed(this)}}class b{curve;type;precomputed;constructor(e){this.curve=new e5,this.type=e,this.precomputed=null}}class g extends b{x;y;z;zOne;constructor(e,t,i){super("jacobian"),null===e&&null===t&&null===i?(this.x=this.curve.one,this.y=this.curve.one,this.z=new l(0)):(l.isBN(e)||(e=new l(e,16)),this.x=e,l.isBN(t)||(t=new l(t,16)),this.y=t,l.isBN(i)||(i=new l(i,16)),this.z=i),null==this.x.red&&(this.x=this.x.toRed(this.curve.red)),null==this.y.red&&(this.y=this.y.toRed(this.curve.red)),null==this.z.red&&(this.z=this.z.toRed(this.curve.red)),this.zOne=this.z===this.curve.one}toP(){if(this.isInfinity())return new e4(null,null);let e=this.z.redInvm(),t=e.redSqr();return new e4(this.x.redMul(t),this.y.redMul(t).redMul(e))}neg(){return new g(this.x,this.y.redNeg(),this.z)}add(e){if(this.isInfinity())return e;if(e.isInfinity())return this;let t=e.z.redSqr(),i=this.z.redSqr(),r=this.x.redMul(t),s=e.x.redMul(i),n=this.y.redMul(t.redMul(e.z)),a=e.y.redMul(i.redMul(this.z)),o=r.redSub(s),c=n.redSub(a);if(0===o.cmpn(0))if(0!==c.cmpn(0))return new g(null,null,null);else return this.dbl();let h=o.redSqr(),l=h.redMul(o),d=r.redMul(h),f=c.redSqr().redIAdd(l).redISub(d).redISub(d),u=c.redMul(d.redISub(f)).redISub(n.redMul(l));return new g(f,u,this.z.redMul(e.z).redMul(o))}mixedAdd(e){if(this.isInfinity())return e.toJ();if(e.isInfinity())return this;if(null===e.x||null===e.y)throw Error("Point coordinates cannot be null");let t=this.z.redSqr(),i=this.x,r=e.x.redMul(t),s=this.y,n=e.y.redMul(t).redMul(this.z),a=i.redSub(r),o=s.redSub(n);if(0===a.cmpn(0))if(0!==o.cmpn(0))return new g(null,null,null);else return this.dbl();let c=a.redSqr(),h=c.redMul(a),l=i.redMul(c),d=o.redSqr().redIAdd(h).redISub(l).redISub(l),f=o.redMul(l.redISub(d)).redISub(s.redMul(h));return new g(d,f,this.z.redMul(a))}dblp(e){if(0===e||this.isInfinity())return this;if(void 0===e)return this.dbl();let t=this;for(let i=0;ir.cmp(this.curve.p)&&(r.iadd(this.curve.n),!(r.cmp(this.curve.p)>=0));)if(i.redIAdd(s),0===this.x.cmp(i))return!0;return!1}inspect(){return this.isInfinity()?"":""}isInfinity(){return 0===this.z.cmpn(0)}}let y=(e,t="Hash assertion failed")=>{if(!e)throw Error(t)};class m{pending;pendingTotal;blockSize;outSize;endian;_delta8;_delta32;padLength;hmacStrength;constructor(e,t,i,r){this.pending=null,this.pendingTotal=0,this.blockSize=e,this.outSize=t,this.hmacStrength=i,this.padLength=r/8,this.endian="big",this._delta8=this.blockSize/8,this._delta32=this.blockSize/32}_update(e,t){throw Error("Not implemented")}_digest(){throw Error("Not implemented")}_digestHex(){throw Error("Not implemented")}update(e,t){if(e=w(e,t),null==this.pending?this.pending=e:this.pending=this.pending.concat(e),this.pendingTotal+=e.length,this.pending.length>=this._delta8){let t=(e=this.pending).length%this._delta8;this.pending=e.slice(e.length-t,e.length),0===this.pending.length&&(this.pending=null),e=function(e,t,i,r){let s=i-0;y(s%4==0);let n=Array(s/4);for(let i=0,s=t;i>>0}return n}(e,0,e.length-t,this.endian);for(let t=0;t>>24&255,n[e++]=i>>>16&255,n[e++]=i>>>8&255,n[e++]=255&i}else for(t=8,n[e++]=255&i,n[e++]=i>>>8&255,n[e++]=i>>>16&255,n[e++]=i>>>24&255,n[e++]=0,n[e++]=0,n[e++]=0,n[e++]=0;t>6|192,i[t++]=63&a|128;else{var r,s;(r=e,s=n,(64512&r.charCodeAt(s))!=55296||s<0||s+1>=r.length?1:(64512&r.charCodeAt(s+1))!=56320)?i[t++]=a>>12|224:(a=65536+((1023&a)<<10)+(1023&e.charCodeAt(++n)),i[t++]=a>>18|240,i[t++]=a>>12&63|128),i[t++]=a>>6&63|128,i[t++]=63&a|128}}}else{(e=e.replace(/[^a-z0-9]+/gi,"")).length%2!=0&&(e="0"+e);for(let t=0;t>>24|r>>>8&65280|r<<8&0xff0000|(255&r)<<24)>>>0),i+=function(e){if(7===e.length)return"0"+e;if(6===e.length)return"00"+e;if(5===e.length)return"000"+e;if(4===e.length)return"0000"+e;if(3===e.length)return"00000"+e;else if(2===e.length)return"000000"+e;else if(1===e.length)return"0000000"+e;else return e}(n.toString(16))}return i}function k(e){let t="";for(let i of e)t+=i.toString(16).padStart(2,"0");return t}function v(e,t){let i=Array(4*e.length);for(let r=0,s=0;r>>24,i[s+1]=n>>>16&255,i[s+2]=n>>>8&255,i[s+3]=255&n):(i[s+3]=n>>>24,i[s+2]=n>>>16&255,i[s+1]=n>>>8&255,i[s]=255&n)}return i}function S(e,t){return e>>>t|e<<32-t}function _(e,t){return e<>>32-t}function E(e,t){return e+t>>>0}let x=[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,7,4,13,1,10,6,15,3,12,0,9,5,2,14,11,8,3,10,14,4,9,15,8,1,2,7,0,6,13,11,5,12,1,9,11,10,0,8,12,4,13,3,7,15,14,5,6,2,4,0,5,9,7,12,2,10,14,1,3,8,11,6,15,13],O=[5,14,7,0,9,2,11,4,13,6,15,8,1,10,3,12,6,11,3,7,0,13,5,10,14,15,8,12,4,9,1,2,15,5,1,3,7,14,6,9,11,8,12,2,10,0,4,13,8,6,4,1,3,11,15,0,5,12,2,13,9,7,10,14,12,15,10,4,1,5,8,7,6,2,13,14,0,3,9,11],P=[11,14,15,12,5,8,7,9,11,13,14,15,6,7,9,8,7,6,8,13,11,9,7,15,7,12,15,9,11,7,13,12,11,13,6,7,14,9,13,15,14,8,13,6,5,12,7,5,11,12,14,15,14,15,9,8,9,14,5,6,8,6,5,12,9,15,5,11,6,8,13,12,5,12,13,14,11,8,5,6],N=[8,9,9,11,13,15,15,5,7,7,8,11,14,14,12,6,9,13,15,7,12,8,9,11,7,7,12,7,6,15,13,11,9,7,15,11,8,6,6,14,12,13,5,14,13,13,7,5,15,5,8,11,14,14,6,14,6,9,12,9,12,5,15,8,8,5,12,9,12,5,14,6,8,13,6,5,15,13,11,11];function A(e,t,i,r){return e<=15?t^i^r:e<=31?t&i|~t&r:e<=47?(t|~i)^r:e<=63?t&r|i&~r:t^(i|~r)}class T extends m{h;constructor(){super(512,160,192,64),this.endian="little",this.h=[0x67452301,0xefcdab89,0x98badcfe,0x10325476,0xc3d2e1f0],this.endian="little"}_update(e,t){var i,r,s,n,a,o;let c,h=this.h[0],l=this.h[1],d=this.h[2],f=this.h[3],u=this.h[4],p=h,b=l,g=d,y=f,m=u;for(let w=0;w<80;w++){c=E(_((i=h,r=A(w,l,d,f),i+r+e[x[w]+t]+((a=w)<=15?0:a<=31?0x5a827999:a<=47?0x6ed9eba1:a<=63?0x8f1bbcdc:0xa953fd4e)>>>0),P[w]),u),h=u,u=f,f=_(d,10),d=l,l=c,c=E(_((s=p,n=A(79-w,b,g,y),s+n+e[O[w]+t]+((o=w)<=15?0x50a28be6:o<=31?0x5c4dd124:o<=47?0x6d703ef3:0x7a6d76e9*!!(o<=63))>>>0),N[w]),m),p=m,m=y,y=_(g,10),g=b,b=c}c=this.h[1]+d+y>>>0,this.h[1]=this.h[2]+f+m>>>0,this.h[2]=this.h[3]+u+p>>>0,this.h[3]=this.h[4]+h+b>>>0,this.h[4]=this.h[0]+l+g>>>0,this.h[0]=c}_digest(){return v(this.h,"little")}_digestHex(){return I(this.h,"little")}}class C{h;constructor(){this.h=new ep}update(e,t){let i=Uint8Array.from(w(e,t));return this.h.update(i),this}digest(){return Array.from(this.h.digest())}digestHex(){return k(this.h.digest())}}class R extends m{h;W;k;constructor(){super(512,160,80,64),this.k=[0x5a827999,0x6ed9eba1,0x8f1bbcdc,0xca62c1d6],this.h=[0x67452301,0xefcdab89,0x98badcfe,0x10325476,0xc3d2e1f0],this.W=Array(80)}_update(e,t){let i,r=this.W;for(void 0===t&&(t=0),i=0;i<16;i++)r[i]=e[t+i];for(;i>>0);c=o,o=a,a=_(n,30),n=s,s=t}this.h[0]=E(this.h[0],s),this.h[1]=E(this.h[1],n),this.h[2]=E(this.h[2],a),this.h[3]=E(this.h[3],o),this.h[4]=E(this.h[4],c)}_digest(){return v(this.h,"big")}_digestHex(){return I(this.h,"big")}}class B{h;blockSize=64;outSize=32;constructor(e){let t=Uint8Array.from(w(e,"hex"));this.h=new e_(eb,t)}update(e,t){return this.h.update(Uint8Array.from(w(e,t))),this}digest(){return Array.from(this.h.digest())}digestHex(){return k(this.h.digest())}}class L{h;blockSize=128;outSize=32;constructor(e){let t=Uint8Array.from(w(e,"hex"));this.h=new e_(eS,t)}update(e,t){return this.h.update(Uint8Array.from(w(e,t))),this}digest(){return Array.from(this.h.digest())}digestHex(){return k(this.h.digest())}}let D=(e,t)=>new C().update(e,t).digest(),M=(e,t)=>{let i=new C().update(e,t).digest();return new C().update(i).digest()},F=(e,t)=>{let i=new C().update(e,t).digest();return new T().update(i).digest()},V=(e,t,i)=>new B(e).update(t,i).digest(),H=(e,t,i)=>new L(e).update(t,i).digest();function q(e){if(!Number.isSafeInteger(e)||e<0)throw Error(`positive integer expected, got ${e}`)}function U(e,...t){if(!(e instanceof Uint8Array||ArrayBuffer.isView(e)&&"Uint8Array"===e.constructor.name))throw Error("Uint8Array expected");if(t.length>0&&!t.includes(e.length)){let i=t.join(",");throw Error(`Uint8Array expected of length ${i}, got length=${e.length}`)}}function K(e,t=!0){if(!0===e.destroyed)throw Error("Hash instance has been destroyed");if(t&&!0===e.finished)throw Error("Hash#digest() has already been called")}function $(...e){for(let t=0;te().update(z(t)).digest(),i=e();return t.outputLen=i.outputLen,t.blockLen=i.blockLen,t.create=()=>e(),t}let X=BigInt(0x100000000-1),Y=BigInt(32),J=(e,t,i)=>e>>>i,Z=(e,t,i)=>e<<32-i|t>>>i,Q=(e,t,i)=>e>>>i|t<<32-i,ee=(e,t,i)=>e<<32-i|t>>>i,et=(e,t,i)=>e<<64-i|t>>>i-32,ei=(e,t,i)=>e>>>i-32|t<<64-i;function er(e,t,i,r){let s=(t>>>0)+(r>>>0);return{h:e+i+(s/0x100000000|0)|0,l:0|s}}let es=(e,t,i)=>(e>>>0)+(t>>>0)+(i>>>0),en=(e,t,i,r)=>t+i+r+(e/0x100000000|0)|0,ea=(e,t,i,r)=>(e>>>0)+(t>>>0)+(i>>>0)+(r>>>0),eo=(e,t,i,r,s)=>t+i+r+s+(e/0x100000000|0)|0,ec=(e,t,i,r,s)=>(e>>>0)+(t>>>0)+(i>>>0)+(r>>>0)+(s>>>0),eh=(e,t,i,r,s,n)=>t+i+r+s+n+(e/0x100000000|0)|0;class el extends W{blockLen;outputLen;padOffset;isLE;buffer;view;finished=!1;length=0;pos=0;destroyed=!1;constructor(e,t,i,r){super(),this.blockLen=e,this.outputLen=t,this.padOffset=i,this.isLE=r,this.buffer=new Uint8Array(e),this.view=j(this.buffer)}update(e){K(this),U(e=z(e));let{view:t,buffer:i,blockLen:r}=this,s=e.length;for(let n=0;ns-a&&(this.process(r,0),a=0);for(let e=a;e>s&n),o=Number(i&n),c=4*!!r,h=4*!r;e.setUint32(t+c,a,r),e.setUint32(t+h,o,r)})(r,s-8,BigInt(8*this.length),n),this.process(r,0);let o=j(e),c=this.outputLen;if(c%4!=0)throw Error("_sha2: outputLen should be aligned to 32bit");let h=c/4,l=this.get();if(h>l.length)throw Error("_sha2: outputLen bigger than state");for(let e=0;e>>3,s=S(i,17)^S(i,19)^i>>>10;eu[e]=E(E(r,eu[e-7]),E(s,eu[e-16]))}let{A:d,B:f,C:u,D:p,E:b,F:g,G:y,H:m}=this;for(let e=0;e<64;e++){let t=(s=m,n=S(i=b,6)^S(i,11)^S(i,25),a=(r=b)&g^~r&y,s+n+a+ef[e]+eu[e]>>>0),w=E(S(o=d,2)^S(o,13)^S(o,22),(c=d)&(h=f)^c&(l=u)^h&l);m=y,y=g,g=b,b=E(p,t),p=u,u=f,f=d,d=E(t,w)}this.A=E(this.A,d),this.B=E(this.B,f),this.C=E(this.C,u),this.D=E(this.D,p),this.E=E(this.E,b),this.F=E(this.F,g),this.G=E(this.G,y),this.H=E(this.H,m)}roundClean(){$(eu)}destroy(){$(this.buffer),this.set(0,0,0,0,0,0,0,0)}}let eb=G(()=>new ep),eg=Uint32Array.from([0x6a09e667,0xf3bcc908,0xbb67ae85,0x84caa73b,0x3c6ef372,0xfe94f82b,0xa54ff53a,0x5f1d36f1,0x510e527f,0xade682d1,0x9b05688c,0x2b3e6c1f,0x1f83d9ab,0xfb41bd6b,0x5be0cd19,0x137e2179]),ey=function(e,t=!1){let i=e.length,r=new Uint32Array(i),s=new Uint32Array(i);for(let n=0;n>Y&X)}:{h:0|Number(e>>Y&X),l:0|Number(e&X)}}(e[n],t);r[n]=i,s[n]=a}return[r,s]}(["0x428a2f98d728ae22","0x7137449123ef65cd","0xb5c0fbcfec4d3b2f","0xe9b5dba58189dbbc","0x3956c25bf348b538","0x59f111f1b605d019","0x923f82a4af194f9b","0xab1c5ed5da6d8118","0xd807aa98a3030242","0x12835b0145706fbe","0x243185be4ee4b28c","0x550c7dc3d5ffb4e2","0x72be5d74f27b896f","0x80deb1fe3b1696b1","0x9bdc06a725c71235","0xc19bf174cf692694","0xe49b69c19ef14ad2","0xefbe4786384f25e3","0x0fc19dc68b8cd5b5","0x240ca1cc77ac9c65","0x2de92c6f592b0275","0x4a7484aa6ea6e483","0x5cb0a9dcbd41fbd4","0x76f988da831153b5","0x983e5152ee66dfab","0xa831c66d2db43210","0xb00327c898fb213f","0xbf597fc7beef0ee4","0xc6e00bf33da88fc2","0xd5a79147930aa725","0x06ca6351e003826f","0x142929670a0e6e70","0x27b70a8546d22ffc","0x2e1b21385c26c926","0x4d2c6dfc5ac42aed","0x53380d139d95b3df","0x650a73548baf63de","0x766a0abb3c77b2a8","0x81c2c92e47edaee6","0x92722c851482353b","0xa2bfe8a14cf10364","0xa81a664bbc423001","0xc24b8b70d0f89791","0xc76c51a30654be30","0xd192e819d6ef5218","0xd69906245565a910","0xf40e35855771202a","0x106aa07032bbd1b8","0x19a4c116b8d2d0c8","0x1e376c085141ab53","0x2748774cdf8eeb99","0x34b0bcb5e19b48a8","0x391c0cb3c5c95a63","0x4ed8aa4ae3418acb","0x5b9cca4f7763e373","0x682e6ff3d6b2b8a3","0x748f82ee5defb2fc","0x78a5636f43172f60","0x84c87814a1f0ab72","0x8cc702081a6439ec","0x90befffa23631e28","0xa4506cebde82bde9","0xbef9a3f7b2c67915","0xc67178f2e372532b","0xca273eceea26619c","0xd186b8c721c0c207","0xeada7dd6cde0eb1e","0xf57d4f7fee6ed178","0x06f067aa72176fba","0x0a637dc5a2c898a6","0x113f9804bef90dae","0x1b710b35131c471b","0x28db77f523047d84","0x32caab7b40c72493","0x3c9ebe0a15c9bebc","0x431d67c49c100d4c","0x4cc5d4becb3e42b6","0x597f299cfc657e2a","0x5fcb6fab3ad6faec","0x6c44198c4a475817"].map(e=>BigInt(e))),em=ey[0],ew=ey[1],eI=new Uint32Array(80),ek=new Uint32Array(80);class ev extends el{Ah=0|eg[0];Al=0|eg[1];Bh=0|eg[2];Bl=0|eg[3];Ch=0|eg[4];Cl=0|eg[5];Dh=0|eg[6];Dl=0|eg[7];Eh=0|eg[8];El=0|eg[9];Fh=0|eg[10];Fl=0|eg[11];Gh=0|eg[12];Gl=0|eg[13];Hh=0|eg[14];Hl=0|eg[15];constructor(e=64){super(128,e,16,!1)}get(){let{Ah:e,Al:t,Bh:i,Bl:r,Ch:s,Cl:n,Dh:a,Dl:o,Eh:c,El:h,Fh:l,Fl:d,Gh:f,Gl:u,Hh:p,Hl:b}=this;return[e,t,i,r,s,n,a,o,c,h,l,d,f,u,p,b]}set(e,t,i,r,s,n,a,o,c,h,l,d,f,u,p,b){this.Ah=0|e,this.Al=0|t,this.Bh=0|i,this.Bl=0|r,this.Ch=0|s,this.Cl=0|n,this.Dh=0|a,this.Dl=0|o,this.Eh=0|c,this.El=0|h,this.Fh=0|l,this.Fl=0|d,this.Gh=0|f,this.Gl=0|u,this.Hh=0|p,this.Hl=0|b}process(e,t){for(let i=0;i<16;i++,t+=4)eI[i]=e.getUint32(t),ek[i]=e.getUint32(t+=4);for(let e=16;e<80;e++){let t=0|eI[e-15],i=0|ek[e-15],r=Q(t,i,1)^Q(t,i,8)^J(t,i,7),s=ee(t,i,1)^ee(t,i,8)^Z(t,i,7),n=0|eI[e-2],a=0|ek[e-2],o=Q(n,a,19)^et(n,a,61)^J(n,a,6),c=ea(s,ee(n,a,19)^ei(n,a,61)^Z(n,a,6),ek[e-7],ek[e-16]),h=eo(c,r,o,eI[e-7],eI[e-16]);eI[e]=0|h,ek[e]=0|c}let{Ah:i,Al:r,Bh:s,Bl:n,Ch:a,Cl:o,Dh:c,Dl:h,Eh:l,El:d,Fh:f,Fl:u,Gh:p,Gl:b,Hh:g,Hl:y}=this;for(let e=0;e<80;e++){let t=Q(l,d,14)^Q(l,d,18)^et(l,d,41),m=ee(l,d,14)^ee(l,d,18)^ei(l,d,41),w=l&f^~l&p,I=ec(y,m,d&u^~d&b,ew[e],ek[e]),k=eh(I,g,t,w,em[e],eI[e]),v=0|I,S=Q(i,r,28)^et(i,r,34)^et(i,r,39),_=ee(i,r,28)^ei(i,r,34)^ei(i,r,39),E=i&s^i&a^s&a,x=r&n^r&o^n&o;g=0|p,y=0|b,p=0|f,b=0|u,f=0|l,u=0|d,({h:l,l:d}=er(0|c,0|h,0|k,0|v)),c=0|a,h=0|o,a=0|s,o=0|n,s=0|i,n=0|r;let O=es(_,x,v);i=en(O,S,E,k),r=0|O}({h:i,l:r}=er(i,r,this.Ah,this.Al)),({h:s,l:n}=er(s,n,this.Bh,this.Bl)),({h:a,l:o}=er(a,o,this.Ch,this.Cl)),({h:c,l:h}=er(c,h,this.Dh,this.Dl)),({h:l,l:d}=er(l,d,this.Eh,this.El)),({h:f,l:u}=er(f,u,this.Fh,this.Fl)),({h:p,l:b}=er(p,b,this.Gh,this.Gl)),({h:g,l:y}=er(g,y,this.Hh,this.Hl)),this.set(i,r,s,n,a,o,c,h,l,d,f,u,p,b,g,y)}roundClean(){$(eI,ek)}destroy(){$(this.buffer),this.set(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0)}}let eS=G(()=>new ev);class e_ extends W{oHash;iHash;blockLen;outputLen;finished=!1;destroyed=!1;constructor(e,t){super(),function(e){if("function"!=typeof e||"function"!=typeof e.create)throw Error("Hash should be wrapped by utils.createHasher");q(e.outputLen),q(e.blockLen)}(e);let i=z(t);if(this.iHash=e.create(),"function"!=typeof this.iHash.update)throw Error("Expected instance of class which extends utils.Hash");this.blockLen=this.iHash.blockLen,this.outputLen=this.iHash.outputLen;let r=this.blockLen,s=new Uint8Array(r);s.set(i.length>r?e.create().update(i).digest():i);for(let e=0;ee.length%2==1?"0"+e:e,ex=e=>{let t="";for(let i of e)t+=eE(i.toString(16));return t},eO=(e,t)=>{if(Array.isArray(e))return e.slice();if(void 0===e)return[];if("string"!=typeof e)return Array.from(e,e=>0|e);switch(t){case"hex":return eP(e);case"base64":return eN(e);default:return function(e){let t=[];for(let i=0;i65535?i++:s>=55296&&s<=57343&&(s=65533),s<=127?t.push(s):s<=2047?t.push(192|s>>6,128|63&s):s<=65535?t.push(224|s>>12,128|s>>6&63,128|63&s):t.push(240|s>>18,128|s>>12&63,128|s>>6&63,128|63&s)}return t}(e)}},eP=e=>{(e=e.replace(/[^a-z0-9]+/gi,"")).length%2!=0&&(e="0"+e);let t=[];for(let i=0;i{let t=[],i=0,r=0;for(let s of e.replace(/=+$/,""))i=i<<6|"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".indexOf(s),(r+=6)>=8&&(r-=8,t.push(i>>r&255),i&=(1<{let t="",i=0;for(let r=0;r0){i--;continue}if(s<=127)t+=String.fromCharCode(s);else if(s>=192&&s<=223)i=1,t+=String.fromCharCode((31&s)<<6|63&e[r+1]);else if(s>=224&&s<=239){let n=e[r+1];i=2,t+=String.fromCharCode((15&s)<<12|(63&n)<<6|63&e[r+2])}else if(s>=240&&s<=247){let n=e[r+1],a=e[r+2],o=e[r+3];i=3;let c=(7&s)<<18|(63&n)<<12|(63&a)<<6|63&o;t+=String.fromCharCode(55296+(c-65536>>10),56320+(c-65536&1023))}}return t},eT=(e,t)=>{switch(t){case"hex":return ex(e);case"utf8":return eA(e);default:return e}};function eC(e){let t,i="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",r="";for(t=0;t>2,c=(3&s)<<4|n>>4,h=(15&n)<<2|a>>6,l=63&a;r+=i.charAt(o)+i.charAt(c),r+=t+1{if(""===e||"string"!=typeof e)throw Error(`Expected base58 string but got “${e}”`);let t=e.match(/[IOl0]/gmu);if(null!==t)throw Error(`Invalid base58 character “${t.join("")}”`);let i=e.match(/^1+/gmu),r=null!==i?i[0].length:0,s=(e.length-r)*(Math.log(58)/Math.log(256))+1>>>0;return[...new Uint8Array([...new Uint8Array(r),...(e.match(/./gmu)??[]).map(e=>eR.indexOf(e)).reduce((e,t)=>e=e.map(e=>{let i=58*e+t;return t=i>>8,i}),new Uint8Array(s)).reverse().filter((e=>t=>e=e||t)(!1))])]},eL=e=>{let t=Array(256).fill(-1);for(let e=0;e{let i=M([...t,...e]);return eL(i=[...t,...e,...i.slice(0,4)])},eM=(e,t,i=1)=>{let r=eB(e),s=r.slice(0,i),n=r.slice(i,-4),a=[...s,...n];return a=M(a),r.slice(-4).forEach((e,t)=>{if(e!==a[t])throw Error("Invalid checksum")}),"hex"===t&&(s=ex(s),n=ex(n)),{prefix:s,data:n}};class eF{bufs;length;constructor(e){for(let t of(this.bufs=void 0!==e?e:[],this.length=0,this.bufs))this.length+=t.length}getLength(){return this.length}toArray(){let e=Array(this.length),t=0;for(let i of this.bufs)for(let r of i)e[t++]=r;return e}write(e){return this.bufs.push(e),this.length+=e.length,this}writeReverse(e){let t=Array(e.length);for(let i=0;i>8&255,255&e]),this.length+=2,this}writeInt16BE(e){return this.writeUInt16BE(65535&e)}writeUInt16LE(e){return this.bufs.push([255&e,e>>8&255]),this.length+=2,this}writeInt16LE(e){return this.writeUInt16LE(65535&e)}writeUInt32BE(e){return this.bufs.push([e>>24&255,e>>16&255,e>>8&255,255&e]),this.length+=4,this}writeInt32BE(e){return this.writeUInt32BE(e>>>0)}writeUInt32LE(e){return this.bufs.push([255&e,e>>8&255,e>>16&255,e>>24&255]),this.length+=4,this}writeInt32LE(e){return this.writeUInt32LE(e>>>0)}writeUInt64BEBn(e){let t=e.toArray("be",8);return this.write(t),this}writeUInt64LEBn(e){let t=e.toArray("be",8);return this.writeReverse(t),this}writeUInt64LE(e){let t=new l(e).toArray("be",8);return this.writeReverse(t),this}writeVarIntNum(e){let t=eF.varIntNum(e);return this.write(t),this}writeVarIntBn(e){let t=eF.varIntBn(e);return this.write(t),this}static varIntNum(e){let t;if(e<0)return this.varIntBn(new l(e));if(e<253)t=[e];else if(e<65536)t=[253,255&e,e>>8&255];else if(e<0x100000000)t=[254,255&e,e>>8&255,e>>16&255,e>>24&255];else{let i=0|e,r=0|Math.floor(e/0x100000000);t=[255,255&i,i>>8&255,i>>16&255,i>>24&255,255&r,r>>8&255,r>>16&255,r>>24&255]}return t}static varIntBn(e){let t;if(e.isNeg()&&(e=e.add(eU)),e.ltn(253))t=[e.toNumber()];else if(e.ltn(65536)){let i=e.toNumber();t=[253,255&i,i>>8&255]}else if(e.lt(new l(0x100000000))){let i=e.toNumber();t=[254,255&i,i>>8&255,i>>16&255,i>>24&255]}else{let i=new eF;i.writeUInt8(255),i.writeUInt64LEBn(e),t=i.toArray()}return t}}class eV{bin;pos;length;constructor(e=[],t=0){this.bin=e,this.pos=t,this.length=e.length}eof(){return this.pos>=this.length}read(e=this.length){let t=this.pos,i=this.pos+e;return this.pos=i,this.bin.slice(t,i)}readReverse(e=this.length){let t=Array(e);for(let i=0;i>>0;return this.pos+=4,e}readInt32LE(){let e=this.readUInt32LE();return(0x80000000&e)!=0?e-0x100000000:e}readUInt64BEBn(){let e=new l(this.bin.slice(this.pos,this.pos+8));return this.pos=this.pos+8,e}readUInt64LEBn(){return new l(this.readReverse(8))}readInt64LEBn(){let e=new l(this.readReverse(8));return e.gte(eq)&&(e=e.sub(eU)),e}readVarIntNum(e=!0){let t,i=this.readUInt8();switch(i){case 253:return this.readUInt16LE();case 254:return this.readUInt32LE();case 255:if((t=e?this.readInt64LEBn():this.readUInt64LEBn()).lte(new l(2).pow(new l(53))))return t.toNumber();throw Error("number too large to retain precision - use readVarIntBn");default:return i}}readVarInt(){switch(this.bin[this.pos]){case 253:return this.read(3);case 254:return this.read(5);case 255:return this.read(9);default:return this.read(1)}}readVarIntBn(){let e=this.readUInt8();switch(e){case 253:return new l(this.readUInt16LE());case 254:return new l(this.readUInt32LE());case 255:return this.readUInt64LEBn();default:return new l(e)}}}let eH=e=>{if(0===e.length)return e;let t=e[e.length-1];if((127&t)!=0)return e;if(1===e.length)return[];if((128&e[e.length-2])!=0)return e;for(let i=e.length-1;i>0;i--)if(0!==e[i-1])if((128&e[i-1])!=0)return e[i]=t,e.slice(0,i+1);else return e[i-1]|=t,e.slice(0,i);return[]},eq=new l(2).pow(new l(63)),eU=new l(2).pow(new l(64)),eK=(1n<<256n)-1n;function e$(e){let t=e>>256n;return t=(e=(e&eK)+(t<<32n)+977n*t)>>256n,(e=(e&eK)+(t<<32n)+977n*t)>=0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn&&(e-=0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn),e}let ej=e=>e$((e%0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn+0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn)%0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn),ez=(e,t)=>e>=t?e-t:0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn-(t-e),eW=(e,t)=>e$(e*t),eG=e=>{let t=1n,i=0n,r=ej(e),s=0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn;for(;r>1n;){let e=s/r;[t,i]=[i-t*e,t],[r,s]=[s-r*e,r]}return ej(t)},eX=0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn+1n>>2n,eY=BigInt("0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798"),eJ=BigInt("0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8"),eZ=new Map,eQ=e=>{let{X:t,Y:i,Z:r}=e;if(0n===i)return{X:0n,Y:1n,Z:0n};let s=eW(i,i),n=eW(4n,eW(t,s)),a=eW(3n,eW(t,t)),o=ez(eW(a,a),eW(2n,n)),c=ez(eW(a,ez(n,o)),eW(8n,eW(s,s)));return{X:o,Y:c,Z:eW(2n,eW(i,r))}},e0=(e,t)=>{if(0n===e.Z)return t;if(0n===t.Z)return e;let i=eW(e.Z,e.Z),r=eW(t.Z,t.Z),s=eW(e.X,r),n=eW(t.X,i),a=eW(e.Y,eW(r,t.Z)),o=eW(t.Y,eW(i,e.Z)),c=ez(n,s),h=ez(o,a);if(0n===c)return 0n===h?eQ(e):{X:0n,Y:1n,Z:0n};let l=eW(c,c),d=eW(c,l),f=eW(s,l),u=ez(ez(eW(h,h),d),eW(2n,f)),p=ez(eW(h,ez(f,u)),eW(a,d));return{X:u,Y:p,Z:eW(c,eW(e.Z,t.Z))}},e1=e=>0n===e.Z?e:{X:e.X,Y:0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn-e.Y,Z:e.Z},e2=(e,t,i=5)=>{let r,s=`${i}:${t.x.toString(16)}:${t.y.toString(16)}`,n=eZ.get(s);if(void 0===n){let e=1<>1n,h=e;for(;h>0n;)if((1n&h)===0n)a.push(0),h>>=1n;else{let e=h&o-1n;e>c&&(e-=o),a.push(Number(e)),h-=e,h>>=1n}let l={X:0n,Y:1n,Z:0n};for(let e=a.length-1;e>=0;e--){l=eQ(l);let t=a[e];if(0!==t){let e=Math.abs(t)>>1;l=e0(l,t>0?n[e]:e1(n[e]))}}return l},e3=e=>{let t=e%0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n;return t<0n&&(t+=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n),t},e6=(e,t)=>e3(e*t),e8=e=>{let t=1n,i=0n,r=e3(e),s=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n;for(;r>1n;){let e=s/r;[t,i]=[i-t*e,t],[r,s]=[s-r*e,r]}return e3(t)};class e4 extends b{x;y;inf;static fromDER(e){if((4===e[0]||6===e[0]||7===e[0])&&e.length-1==64){if(6===e[0]){if(e[e.length-1]%2!=0)throw Error("Point string value is wrong length")}else if(7===e[0]&&e[e.length-1]%2!=1)throw Error("Point string value is wrong length");return new e4(e.slice(1,33),e.slice(33,65))}if((2===e[0]||3===e[0])&&e.length-1==32)return e4.fromX(e.slice(1,33),3===e[0]);throw Error("Unknown point format")}static fromString(e){let t=eO(e,"hex");return e4.fromDER(t)}static fromX(e,t){let i,r=l.isBN(e)?BigInt("0x"+e.toString(16)):"string"==typeof e?BigInt("0x"+e):Array.isArray(e)?BigInt("0x"+ex(e)):BigInt(e),s=(e=>{let t=((e,t)=>{let i=1n;e=ej(e);let r=t;for(;r>0n;)(1n&r)===1n&&(i=eW(i,e)),e=eW(e,e),r>>=1n;return i})(e,eX);return eW(t,t)===ej(e)?t:null})(e$(eW(eW(i=r=ej(r),i),r)+7n));if(null===s)throw Error("Invalid point");let n=s;return(1n&n)!==(t?1n:0n)&&(n=ez(0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fn,n)),new e4(new l(r.toString(16),16),new l(n.toString(16),16))}static fromJSON(e,t){"string"==typeof e&&(e=JSON.parse(e));let i=new e4(e[0],e[1],t);if("object"!=typeof e[2])return i;let r=e=>new e4(e[0],e[1],t),s=e[2];return i.precomputed={beta:null,doubles:"object"==typeof s.doubles&&null!==s.doubles?{step:s.doubles.step,points:[i].concat(s.doubles.points.map(r))}:void 0,naf:"object"==typeof s.naf&&null!==s.naf?{wnd:s.naf.wnd,points:[i].concat(s.naf.points.map(r))}:void 0},i}constructor(e,t,i=!0){super("affine"),this.precomputed=null,null===e&&null===t?(this.x=null,this.y=null,this.inf=!0):(l.isBN(e)||(e=new l(e,16)),this.x=e,l.isBN(t)||(t=new l(t,16)),this.y=t,i&&(this.x.forceRed(this.curve.red),this.y.forceRed(this.curve.red)),null===this.x.red&&(this.x=this.x.toRed(this.curve.red)),null===this.y.red&&(this.y=this.y.toRed(this.curve.red)),this.inf=!1)}validate(){return this.curve.validate(this)}encode(e=!0,t){let i,r=this.curve.p.byteLength(),s=this.getX().toArray("be",r);return(i=e?[this.getY().isEven()?2:3].concat(s):[4].concat(s,this.getY().toArray("be",r)),"hex"!==t)?i:ex(i)}toString(){return this.encode(!0,"hex")}toJSON(){return null==this.precomputed?[this.x,this.y]:[this.x,this.y,"object"==typeof this.precomputed&&null!==this.precomputed?{doubles:null!=this.precomputed.doubles?{step:this.precomputed.doubles.step,points:this.precomputed.doubles.points.slice(1)}:void 0,naf:null!=this.precomputed.naf?{wnd:this.precomputed.naf.wnd,points:this.precomputed.naf.points.slice(1)}:void 0}:void 0]}inspect(){return this.isInfinity()?"":""}isInfinity(){return this.inf}add(e){if(this.inf)return e;if(e.inf)return this;if(this.eq(e))return this.dbl();if(this.neg().eq(e)||this.x?.cmp(e.x??new l(0))===0)return new e4(null,null);let t=e0({X:BigInt("0x"+this.x.fromRed().toString(16)),Y:BigInt("0x"+this.y.fromRed().toString(16)),Z:1n},{X:BigInt("0x"+e.x.fromRed().toString(16)),Y:BigInt("0x"+e.y.fromRed().toString(16)),Z:1n});if(0n===t.Z)return new e4(null,null);let i=eG(t.Z),r=eW(i,i),s=eW(t.X,r),n=eW(t.Y,eW(r,i));return new e4(s.toString(16),n.toString(16))}dbl(){if(this.inf)return this;if(null===this.x||null===this.y)throw Error("Point coordinates cannot be null");let e=BigInt("0x"+this.x.fromRed().toString(16)),t=BigInt("0x"+this.y.fromRed().toString(16));if(0n===t)return new e4(null,null);let i=eQ({X:e,Y:t,Z:1n}),r=eG(i.Z),s=eW(r,r),n=eW(i.X,s),a=eW(i.Y,eW(s,r));return new e4(n.toString(16),a.toString(16))}getX(){return(this.x??new l(0)).fromRed()}getY(){return(this.y??new l(0)).fromRed()}mul(e){let t,i;if(l.isBN(e)||(e=new l(e,16)),this.inf)return this;let r=BigInt("0x"+e.toString(16)),s=r<0n;if(s&&(r=-r),0n===(r=ej(r)))return new e4(null,null);if(null===this.x||null===this.y)throw Error("Point coordinates cannot be null");this===this.curve.g?(t=eY,i=eJ):(t=BigInt("0x"+this.x.fromRed().toString(16)),i=BigInt("0x"+this.y.fromRed().toString(16)));let n=e2(r,{x:t,y:i});if(0n===n.Z)return new e4(null,null);let a=eG(n.Z),o=eW(a,a),c=eW(n.X,o),h=eW(n.Y,eW(o,a)),d=new e4(new l(c.toString(16),16),new l(h.toString(16),16));return s?d.neg():d}mulAdd(e,t,i){return this._endoWnafMulAdd([this,t],[e,i])}jmulAdd(e,t,i){return this._endoWnafMulAdd([this,t],[e,i],!0)}eq(e){return this===e||this.inf===e.inf&&(this.inf||0===(this.x??new l(0)).cmp(e.x??new l(0))&&0===(this.y??new l(0)).cmp(e.y??new l(0)))}neg(e){if(this.inf)return this;let t=new e4(this.x,(this.y??new l(0)).redNeg());if(!0===e&&null!=this.precomputed){let e=this.precomputed;t.precomputed={naf:null!=e.naf?{wnd:e.naf.wnd,points:e.naf.points.map(e=>e.neg())}:void 0,doubles:null!=e.doubles?{step:e.doubles.step,points:e.doubles.points.map(e=>e.neg())}:void 0,beta:void 0}}return t}dblp(e){let t=this;for(let i=0;i{if(null===e.x)throw Error("p.x is null");if(void 0===i.endo||null===i.endo)throw Error("curve.endo is undefined");return new e4(e.x.redMul(i.endo.beta),e.y)};e.beta=t,t.precomputed={beta:null,naf:null!=e.naf?{wnd:e.naf.wnd,points:e.naf.points.map(r)}:void 0,doubles:null!=e.doubles?{step:e.doubles.step,points:e.doubles.points.map(r)}:void 0}}return t}_fixedNafMul(e){if("object"!=typeof this.precomputed||null===this.precomputed)throw Error("_fixedNafMul requires precomputed values for the point");let t=this._getDoubles(),i=this.curve.getNAF(e,1,this.curve._bitLength),r=(1<=e;s--)r=(r<<1)+i[s];s.push(r)}let n=new g(null,null,null),a=new g(null,null,null);for(let e=r;e>0;e--){for(let i=0;ie.toNumber()),a=this.curve._wnafT2.map(()=>[]),o=this.curve._wnafT3.map(()=>[]),c=0;for(let i=0;i=1;e-=2){let r=e-1,s=e;if(1!==n[r]||1!==n[s]){o[r]=this.curve.getNAF(i[r],n[r],this.curve._bitLength),o[s]=this.curve.getNAF(i[s],n[s],this.curve._bitLength),c=Math.max(o[r].length,c),c=Math.max(o[s].length,c);continue}let h=[t[r],null,null,t[s]];0===(t[r].y??new l(0)).cmp(t[s].y??new l(0))?(h[1]=t[r].add(t[s]),h[2]=t[r].toJ().mixedAdd(t[s].neg())):0===(t[r].y??new l(0)).cmp((t[s].y??new l(0)).redNeg())?(h[1]=t[r].toJ().mixedAdd(t[s]),h[2]=t[r].add(t[s].neg())):(h[1]=t[r].toJ().mixedAdd(t[s]),h[2]=t[r].toJ().mixedAdd(t[s].neg()));let d=[-3,-1,-5,-7,0,7,5,1,3],f=this.curve.getJSF(i[r],i[s]);c=Math.max(f[0].length,c),o[r]=Array(c),o[s]=Array(c);for(let e=0;e=0;e--){let t=0;for(;e>=0;){let i=!0;for(let t=0;t=0&&t++,h=h.dblp(t),e<0)break;let i=new l(1),s=new l(2);for(let e=0;e=Math.ceil((e.bitLength()+1)/t.step)}_getDoubles(e,t){if("object"==typeof this.precomputed&&null!==this.precomputed&&"object"==typeof this.precomputed.doubles&&null!==this.precomputed.doubles)return this.precomputed.doubles;let i=[this],r=this;for(let s=0;s<(t??0);s+=e??1){for(let t=0;t<(e??1);t++)r=r.dbl();i.push(r)}return{step:e??1,points:i}}_getNAFPoints(e){if("object"==typeof this.precomputed&&null!==this.precomputed&&"object"==typeof this.precomputed.naf&&null!==this.precomputed.naf)return this.precomputed.naf;let t=[this],i=(1<(s>>1)-1?(s>>1)-i:i,n.isubn(t)):t=0,r[e]=t,n.iushrn(1)}return r}getJSF(e,t){let i=[[],[]];e=e.clone(),t=t.clone();let r=0,s=0;for(;e.cmpn(-r)>0||t.cmpn(-s)>0;){let n,a,o=e.andln(3)+r&3,c=t.andln(3)+s&3;if(3===o&&(o=-1),3===c&&(c=-1),(1&o)==0)n=0;else{let t=e.andln(7)+r&7;n=(3===t||5===t)&&2===c?-o:o}if(i[0].push(n),(1&c)==0)a=0;else{let e=t.andln(7)+s&7;a=(3===e||5===e)&&2===o?-c:c}i[1].push(a),2*r===n+1&&(r=1-r),2*s===a+1&&(s=1-s),e.iushrn(1),t.iushrn(1)}return i}static cachedProperty(e,t,i){let r="_"+t;e.prototype[t]=function(){return void 0!==this[r]?this[r]:this[r]=i.call(this)}}static parseBytes(e){return"string"==typeof e?eO(e,"hex"):e}static intFromLE(e){return new l(e,"hex","le")}constructor(){if(void 0!==e)return e;e=this;let t={prime:"k256",p:"ffffffff ffffffff ffffffff ffffffff ffffffff ffffffff fffffffe fffffc2f",a:"0",b:"7",n:"ffffffff ffffffff ffffffff fffffffe baaedce6 af48a03b bfd25e8c d0364141",h:"1",beta:"7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee",lambda:"5363ad4cc05c30e0a5261c028812645a122e22ea20816678df02967c1b23bd72",basis:[{a:"3086d221a7d46bcde86c90e49284eb15",b:"-e4437ed6010e88286f547fa90abfe4c3"},{a:"114ca50f7a8e2f3f657c1108d9d44cfd8",b:"3086d221a7d46bcde86c90e49284eb15"}],gRed:!1,g:["79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798","483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8",{doubles:{step:4,points:[["e60fce93b59e9ec53011aabc21c23e97b2a31369b87a5ae9c44ee89e2a6dec0a","f7e3507399e595929db99f34f57937101296891e44d23f0be1f32cce69616821"],["8282263212c609d9ea2a6e3e172de238d8c39cabd5ac1ca10646e23fd5f51508","11f8a8098557dfe45e8256e830b60ace62d613ac2f7b17bed31b6eaff6e26caf"],["175e159f728b865a72f99cc6c6fc846de0b93833fd2222ed73fce5b551e5b739","d3506e0d9e3c79eba4ef97a51ff71f5eacb5955add24345c6efa6ffee9fed695"],["363d90d447b00c9c99ceac05b6262ee053441c7e55552ffe526bad8f83ff4640","4e273adfc732221953b445397f3363145b9a89008199ecb62003c7f3bee9de9"],["8b4b5f165df3c2be8c6244b5b745638843e4a781a15bcd1b69f79a55dffdf80c","4aad0a6f68d308b4b3fbd7813ab0da04f9e336546162ee56b3eff0c65fd4fd36"],["723cbaa6e5db996d6bf771c00bd548c7b700dbffa6c0e77bcb6115925232fcda","96e867b5595cc498a921137488824d6e2660a0653779494801dc069d9eb39f5f"],["eebfa4d493bebf98ba5feec812c2d3b50947961237a919839a533eca0e7dd7fa","5d9a8ca3970ef0f269ee7edaf178089d9ae4cdc3a711f712ddfd4fdae1de8999"],["100f44da696e71672791d0a09b7bde459f1215a29b3c03bfefd7835b39a48db0","cdd9e13192a00b772ec8f3300c090666b7ff4a18ff5195ac0fbd5cd62bc65a09"],["e1031be262c7ed1b1dc9227a4a04c017a77f8d4464f3b3852c8acde6e534fd2d","9d7061928940405e6bb6a4176597535af292dd419e1ced79a44f18f29456a00d"],["feea6cae46d55b530ac2839f143bd7ec5cf8b266a41d6af52d5e688d9094696d","e57c6b6c97dce1bab06e4e12bf3ecd5c981c8957cc41442d3155debf18090088"],["da67a91d91049cdcb367be4be6ffca3cfeed657d808583de33fa978bc1ec6cb1","9bacaa35481642bc41f463f7ec9780e5dec7adc508f740a17e9ea8e27a68be1d"],["53904faa0b334cdda6e000935ef22151ec08d0f7bb11069f57545ccc1a37b7c0","5bc087d0bc80106d88c9eccac20d3c1c13999981e14434699dcb096b022771c8"],["8e7bcd0bd35983a7719cca7764ca906779b53a043a9b8bcaeff959f43ad86047","10b7770b2a3da4b3940310420ca9514579e88e2e47fd68b3ea10047e8460372a"],["385eed34c1cdff21e6d0818689b81bde71a7f4f18397e6690a841e1599c43862","283bebc3e8ea23f56701de19e9ebf4576b304eec2086dc8cc0458fe5542e5453"],["6f9d9b803ecf191637c73a4413dfa180fddf84a5947fbc9c606ed86c3fac3a7","7c80c68e603059ba69b8e2a30e45c4d47ea4dd2f5c281002d86890603a842160"],["3322d401243c4e2582a2147c104d6ecbf774d163db0f5e5313b7e0e742d0e6bd","56e70797e9664ef5bfb019bc4ddaf9b72805f63ea2873af624f3a2e96c28b2a0"],["85672c7d2de0b7da2bd1770d89665868741b3f9af7643397721d74d28134ab83","7c481b9b5b43b2eb6374049bfa62c2e5e77f17fcc5298f44c8e3094f790313a6"],["948bf809b1988a46b06c9f1919413b10f9226c60f668832ffd959af60c82a0a","53a562856dcb6646dc6b74c5d1c3418c6d4dff08c97cd2bed4cb7f88d8c8e589"],["6260ce7f461801c34f067ce0f02873a8f1b0e44dfc69752accecd819f38fd8e8","bc2da82b6fa5b571a7f09049776a1ef7ecd292238051c198c1a84e95b2b4ae17"],["e5037de0afc1d8d43d8348414bbf4103043ec8f575bfdc432953cc8d2037fa2d","4571534baa94d3b5f9f98d09fb990bddbd5f5b03ec481f10e0e5dc841d755bda"],["e06372b0f4a207adf5ea905e8f1771b4e7e8dbd1c6a6c5b725866a0ae4fce725","7a908974bce18cfe12a27bb2ad5a488cd7484a7787104870b27034f94eee31dd"],["213c7a715cd5d45358d0bbf9dc0ce02204b10bdde2a3f58540ad6908d0559754","4b6dad0b5ae462507013ad06245ba190bb4850f5f36a7eeddff2c27534b458f2"],["4e7c272a7af4b34e8dbb9352a5419a87e2838c70adc62cddf0cc3a3b08fbd53c","17749c766c9d0b18e16fd09f6def681b530b9614bff7dd33e0b3941817dcaae6"],["fea74e3dbe778b1b10f238ad61686aa5c76e3db2be43057632427e2840fb27b6","6e0568db9b0b13297cf674deccb6af93126b596b973f7b77701d3db7f23cb96f"],["76e64113f677cf0e10a2570d599968d31544e179b760432952c02a4417bdde39","c90ddf8dee4e95cf577066d70681f0d35e2a33d2b56d2032b4b1752d1901ac01"],["c738c56b03b2abe1e8281baa743f8f9a8f7cc643df26cbee3ab150242bcbb891","893fb578951ad2537f718f2eacbfbbbb82314eef7880cfe917e735d9699a84c3"],["d895626548b65b81e264c7637c972877d1d72e5f3a925014372e9f6588f6c14b","febfaa38f2bc7eae728ec60818c340eb03428d632bb067e179363ed75d7d991f"],["b8da94032a957518eb0f6433571e8761ceffc73693e84edd49150a564f676e03","2804dfa44805a1e4d7c99cc9762808b092cc584d95ff3b511488e4e74efdf6e7"],["e80fea14441fb33a7d8adab9475d7fab2019effb5156a792f1a11778e3c0df5d","eed1de7f638e00771e89768ca3ca94472d155e80af322ea9fcb4291b6ac9ec78"],["a301697bdfcd704313ba48e51d567543f2a182031efd6915ddc07bbcc4e16070","7370f91cfb67e4f5081809fa25d40f9b1735dbf7c0a11a130c0d1a041e177ea1"],["90ad85b389d6b936463f9d0512678de208cc330b11307fffab7ac63e3fb04ed4","e507a3620a38261affdcbd9427222b839aefabe1582894d991d4d48cb6ef150"],["8f68b9d2f63b5f339239c1ad981f162ee88c5678723ea3351b7b444c9ec4c0da","662a9f2dba063986de1d90c2b6be215dbbea2cfe95510bfdf23cbf79501fff82"],["e4f3fb0176af85d65ff99ff9198c36091f48e86503681e3e6686fd5053231e11","1e63633ad0ef4f1c1661a6d0ea02b7286cc7e74ec951d1c9822c38576feb73bc"],["8c00fa9b18ebf331eb961537a45a4266c7034f2f0d4e1d0716fb6eae20eae29e","efa47267fea521a1a9dc343a3736c974c2fadafa81e36c54e7d2a4c66702414b"],["e7a26ce69dd4829f3e10cec0a9e98ed3143d084f308b92c0997fddfc60cb3e41","2a758e300fa7984b471b006a1aafbb18d0a6b2c0420e83e20e8a9421cf2cfd51"],["b6459e0ee3662ec8d23540c223bcbdc571cbcb967d79424f3cf29eb3de6b80ef","67c876d06f3e06de1dadf16e5661db3c4b3ae6d48e35b2ff30bf0b61a71ba45"],["d68a80c8280bb840793234aa118f06231d6f1fc67e73c5a5deda0f5b496943e8","db8ba9fff4b586d00c4b1f9177b0e28b5b0e7b8f7845295a294c84266b133120"],["324aed7df65c804252dc0270907a30b09612aeb973449cea4095980fc28d3d5d","648a365774b61f2ff130c0c35aec1f4f19213b0c7e332843967224af96ab7c84"],["4df9c14919cde61f6d51dfdbe5fee5dceec4143ba8d1ca888e8bd373fd054c96","35ec51092d8728050974c23a1d85d4b5d506cdc288490192ebac06cad10d5d"],["9c3919a84a474870faed8a9c1cc66021523489054d7f0308cbfc99c8ac1f98cd","ddb84f0f4a4ddd57584f044bf260e641905326f76c64c8e6be7e5e03d4fc599d"],["6057170b1dd12fdf8de05f281d8e06bb91e1493a8b91d4cc5a21382120a959e5","9a1af0b26a6a4807add9a2daf71df262465152bc3ee24c65e899be932385a2a8"],["a576df8e23a08411421439a4518da31880cef0fba7d4df12b1a6973eecb94266","40a6bf20e76640b2c92b97afe58cd82c432e10a7f514d9f3ee8be11ae1b28ec8"],["7778a78c28dec3e30a05fe9629de8c38bb30d1f5cf9a3a208f763889be58ad71","34626d9ab5a5b22ff7098e12f2ff580087b38411ff24ac563b513fc1fd9f43ac"],["928955ee637a84463729fd30e7afd2ed5f96274e5ad7e5cb09eda9c06d903ac","c25621003d3f42a827b78a13093a95eeac3d26efa8a8d83fc5180e935bcd091f"],["85d0fef3ec6db109399064f3a0e3b2855645b4a907ad354527aae75163d82751","1f03648413a38c0be29d496e582cf5663e8751e96877331582c237a24eb1f962"],["ff2b0dce97eece97c1c9b6041798b85dfdfb6d8882da20308f5404824526087e","493d13fef524ba188af4c4dc54d07936c7b7ed6fb90e2ceb2c951e01f0c29907"],["827fbbe4b1e880ea9ed2b2e6301b212b57f1ee148cd6dd28780e5e2cf856e241","c60f9c923c727b0b71bef2c67d1d12687ff7a63186903166d605b68baec293ec"],["eaa649f21f51bdbae7be4ae34ce6e5217a58fdce7f47f9aa7f3b58fa2120e2b3","be3279ed5bbbb03ac69a80f89879aa5a01a6b965f13f7e59d47a5305ba5ad93d"],["e4a42d43c5cf169d9391df6decf42ee541b6d8f0c9a137401e23632dda34d24f","4d9f92e716d1c73526fc99ccfb8ad34ce886eedfa8d8e4f13a7f7131deba9414"],["1ec80fef360cbdd954160fadab352b6b92b53576a88fea4947173b9d4300bf19","aeefe93756b5340d2f3a4958a7abbf5e0146e77f6295a07b671cdc1cc107cefd"],["146a778c04670c2f91b00af4680dfa8bce3490717d58ba889ddb5928366642be","b318e0ec3354028add669827f9d4b2870aaa971d2f7e5ed1d0b297483d83efd0"],["fa50c0f61d22e5f07e3acebb1aa07b128d0012209a28b9776d76a8793180eef9","6b84c6922397eba9b72cd2872281a68a5e683293a57a213b38cd8d7d3f4f2811"],["da1d61d0ca721a11b1a5bf6b7d88e8421a288ab5d5bba5220e53d32b5f067ec2","8157f55a7c99306c79c0766161c91e2966a73899d279b48a655fba0f1ad836f1"],["a8e282ff0c9706907215ff98e8fd416615311de0446f1e062a73b0610d064e13","7f97355b8db81c09abfb7f3c5b2515888b679a3e50dd6bd6cef7c73111f4cc0c"],["174a53b9c9a285872d39e56e6913cab15d59b1fa512508c022f382de8319497c","ccc9dc37abfc9c1657b4155f2c47f9e6646b3a1d8cb9854383da13ac079afa73"],["959396981943785c3d3e57edf5018cdbe039e730e4918b3d884fdff09475b7ba","2e7e552888c331dd8ba0386a4b9cd6849c653f64c8709385e9b8abf87524f2fd"],["d2a63a50ae401e56d645a1153b109a8fcca0a43d561fba2dbb51340c9d82b151","e82d86fb6443fcb7565aee58b2948220a70f750af484ca52d4142174dcf89405"],["64587e2335471eb890ee7896d7cfdc866bacbdbd3839317b3436f9b45617e073","d99fcdd5bf6902e2ae96dd6447c299a185b90a39133aeab358299e5e9faf6589"],["8481bde0e4e4d885b3a546d3e549de042f0aa6cea250e7fd358d6c86dd45e458","38ee7b8cba5404dd84a25bf39cecb2ca900a79c42b262e556d64b1b59779057e"],["13464a57a78102aa62b6979ae817f4637ffcfed3c4b1ce30bcd6303f6caf666b","69be159004614580ef7e433453ccb0ca48f300a81d0942e13f495a907f6ecc27"],["bc4a9df5b713fe2e9aef430bcc1dc97a0cd9ccede2f28588cada3a0d2d83f366","d3a81ca6e785c06383937adf4b798caa6e8a9fbfa547b16d758d666581f33c1"],["8c28a97bf8298bc0d23d8c749452a32e694b65e30a9472a3954ab30fe5324caa","40a30463a3305193378fedf31f7cc0eb7ae784f0451cb9459e71dc73cbef9482"],["8ea9666139527a8c1dd94ce4f071fd23c8b350c5a4bb33748c4ba111faccae0","620efabbc8ee2782e24e7c0cfb95c5d735b783be9cf0f8e955af34a30e62b945"],["dd3625faef5ba06074669716bbd3788d89bdde815959968092f76cc4eb9a9787","7a188fa3520e30d461da2501045731ca941461982883395937f68d00c644a573"],["f710d79d9eb962297e4f6232b40e8f7feb2bc63814614d692c12de752408221e","ea98e67232d3b3295d3b535532115ccac8612c721851617526ae47a9c77bfc82"]]},naf:{wnd:7,points:[["f9308a019258c31049344f85f89d5229b531c845836f99b08601f113bce036f9","388f7b0f632de8140fe337e62a37f3566500a99934c2231b6cb9fd7584b8e672"],["2f8bde4d1a07209355b4a7250a5c5128e88b84bddc619ab7cba8d569b240efe4","d8ac222636e5e3d6d4dba9dda6c9c426f788271bab0d6840dca87d3aa6ac62d6"],["5cbdf0646e5db4eaa398f365f2ea7a0e3d419b7e0330e39ce92bddedcac4f9bc","6aebca40ba255960a3178d6d861a54dba813d0b813fde7b5a5082628087264da"],["acd484e2f0c7f65309ad178a9f559abde09796974c57e714c35f110dfc27ccbe","cc338921b0a7d9fd64380971763b61e9add888a4375f8e0f05cc262ac64f9c37"],["774ae7f858a9411e5ef4246b70c65aac5649980be5c17891bbec17895da008cb","d984a032eb6b5e190243dd56d7b7b365372db1e2dff9d6a8301d74c9c953c61b"],["f28773c2d975288bc7d1d205c3748651b075fbc6610e58cddeeddf8f19405aa8","ab0902e8d880a89758212eb65cdaf473a1a06da521fa91f29b5cb52db03ed81"],["d7924d4f7d43ea965a465ae3095ff41131e5946f3c85f79e44adbcf8e27e080e","581e2872a86c72a683842ec228cc6defea40af2bd896d3a5c504dc9ff6a26b58"],["defdea4cdb677750a420fee807eacf21eb9898ae79b9768766e4faa04a2d4a34","4211ab0694635168e997b0ead2a93daeced1f4a04a95c0f6cfb199f69e56eb77"],["2b4ea0a797a443d293ef5cff444f4979f06acfebd7e86d277475656138385b6c","85e89bc037945d93b343083b5a1c86131a01f60c50269763b570c854e5c09b7a"],["352bbf4a4cdd12564f93fa332ce333301d9ad40271f8107181340aef25be59d5","321eb4075348f534d59c18259dda3e1f4a1b3b2e71b1039c67bd3d8bcf81998c"],["2fa2104d6b38d11b0230010559879124e42ab8dfeff5ff29dc9cdadd4ecacc3f","2de1068295dd865b64569335bd5dd80181d70ecfc882648423ba76b532b7d67"],["9248279b09b4d68dab21a9b066edda83263c3d84e09572e269ca0cd7f5453714","73016f7bf234aade5d1aa71bdea2b1ff3fc0de2a887912ffe54a32ce97cb3402"],["daed4f2be3a8bf278e70132fb0beb7522f570e144bf615c07e996d443dee8729","a69dce4a7d6c98e8d4a1aca87ef8d7003f83c230f3afa726ab40e52290be1c55"],["c44d12c7065d812e8acf28d7cbb19f9011ecd9e9fdf281b0e6a3b5e87d22e7db","2119a460ce326cdc76c45926c982fdac0e106e861edf61c5a039063f0e0e6482"],["6a245bf6dc698504c89a20cfded60853152b695336c28063b61c65cbd269e6b4","e022cf42c2bd4a708b3f5126f16a24ad8b33ba48d0423b6efd5e6348100d8a82"],["1697ffa6fd9de627c077e3d2fe541084ce13300b0bec1146f95ae57f0d0bd6a5","b9c398f186806f5d27561506e4557433a2cf15009e498ae7adee9d63d01b2396"],["605bdb019981718b986d0f07e834cb0d9deb8360ffb7f61df982345ef27a7479","2972d2de4f8d20681a78d93ec96fe23c26bfae84fb14db43b01e1e9056b8c49"],["62d14dab4150bf497402fdc45a215e10dcb01c354959b10cfe31c7e9d87ff33d","80fc06bd8cc5b01098088a1950eed0db01aa132967ab472235f5642483b25eaf"],["80c60ad0040f27dade5b4b06c408e56b2c50e9f56b9b8b425e555c2f86308b6f","1c38303f1cc5c30f26e66bad7fe72f70a65eed4cbe7024eb1aa01f56430bd57a"],["7a9375ad6167ad54aa74c6348cc54d344cc5dc9487d847049d5eabb0fa03c8fb","d0e3fa9eca8726909559e0d79269046bdc59ea10c70ce2b02d499ec224dc7f7"],["d528ecd9b696b54c907a9ed045447a79bb408ec39b68df504bb51f459bc3ffc9","eecf41253136e5f99966f21881fd656ebc4345405c520dbc063465b521409933"],["49370a4b5f43412ea25f514e8ecdad05266115e4a7ecb1387231808f8b45963","758f3f41afd6ed428b3081b0512fd62a54c3f3afbb5b6764b653052a12949c9a"],["77f230936ee88cbbd73df930d64702ef881d811e0e1498e2f1c13eb1fc345d74","958ef42a7886b6400a08266e9ba1b37896c95330d97077cbbe8eb3c7671c60d6"],["f2dac991cc4ce4b9ea44887e5c7c0bce58c80074ab9d4dbaeb28531b7739f530","e0dedc9b3b2f8dad4da1f32dec2531df9eb5fbeb0598e4fd1a117dba703a3c37"],["463b3d9f662621fb1b4be8fbbe2520125a216cdfc9dae3debcba4850c690d45b","5ed430d78c296c3543114306dd8622d7c622e27c970a1de31cb377b01af7307e"],["f16f804244e46e2a09232d4aff3b59976b98fac14328a2d1a32496b49998f247","cedabd9b82203f7e13d206fcdf4e33d92a6c53c26e5cce26d6579962c4e31df6"],["caf754272dc84563b0352b7a14311af55d245315ace27c65369e15f7151d41d1","cb474660ef35f5f2a41b643fa5e460575f4fa9b7962232a5c32f908318a04476"],["2600ca4b282cb986f85d0f1709979d8b44a09c07cb86d7c124497bc86f082120","4119b88753c15bd6a693b03fcddbb45d5ac6be74ab5f0ef44b0be9475a7e4b40"],["7635ca72d7e8432c338ec53cd12220bc01c48685e24f7dc8c602a7746998e435","91b649609489d613d1d5e590f78e6d74ecfc061d57048bad9e76f302c5b9c61"],["754e3239f325570cdbbf4a87deee8a66b7f2b33479d468fbc1a50743bf56cc18","673fb86e5bda30fb3cd0ed304ea49a023ee33d0197a695d0c5d98093c536683"],["e3e6bd1071a1e96aff57859c82d570f0330800661d1c952f9fe2694691d9b9e8","59c9e0bba394e76f40c0aa58379a3cb6a5a2283993e90c4167002af4920e37f5"],["186b483d056a033826ae73d88f732985c4ccb1f32ba35f4b4cc47fdcf04aa6eb","3b952d32c67cf77e2e17446e204180ab21fb8090895138b4a4a797f86e80888b"],["df9d70a6b9876ce544c98561f4be4f725442e6d2b737d9c91a8321724ce0963f","55eb2dafd84d6ccd5f862b785dc39d4ab157222720ef9da217b8c45cf2ba2417"],["5edd5cc23c51e87a497ca815d5dce0f8ab52554f849ed8995de64c5f34ce7143","efae9c8dbc14130661e8cec030c89ad0c13c66c0d17a2905cdc706ab7399a868"],["290798c2b6476830da12fe02287e9e777aa3fba1c355b17a722d362f84614fba","e38da76dcd440621988d00bcf79af25d5b29c094db2a23146d003afd41943e7a"],["af3c423a95d9f5b3054754efa150ac39cd29552fe360257362dfdecef4053b45","f98a3fd831eb2b749a93b0e6f35cfb40c8cd5aa667a15581bc2feded498fd9c6"],["766dbb24d134e745cccaa28c99bf274906bb66b26dcf98df8d2fed50d884249a","744b1152eacbe5e38dcc887980da38b897584a65fa06cedd2c924f97cbac5996"],["59dbf46f8c94759ba21277c33784f41645f7b44f6c596a58ce92e666191abe3e","c534ad44175fbc300f4ea6ce648309a042ce739a7919798cd85e216c4a307f6e"],["f13ada95103c4537305e691e74e9a4a8dd647e711a95e73cb62dc6018cfd87b8","e13817b44ee14de663bf4bc808341f326949e21a6a75c2570778419bdaf5733d"],["7754b4fa0e8aced06d4167a2c59cca4cda1869c06ebadfb6488550015a88522c","30e93e864e669d82224b967c3020b8fa8d1e4e350b6cbcc537a48b57841163a2"],["948dcadf5990e048aa3874d46abef9d701858f95de8041d2a6828c99e2262519","e491a42537f6e597d5d28a3224b1bc25df9154efbd2ef1d2cbba2cae5347d57e"],["7962414450c76c1689c7b48f8202ec37fb224cf5ac0bfa1570328a8a3d7c77ab","100b610ec4ffb4760d5c1fc133ef6f6b12507a051f04ac5760afa5b29db83437"],["3514087834964b54b15b160644d915485a16977225b8847bb0dd085137ec47ca","ef0afbb2056205448e1652c48e8127fc6039e77c15c2378b7e7d15a0de293311"],["d3cc30ad6b483e4bc79ce2c9dd8bc54993e947eb8df787b442943d3f7b527eaf","8b378a22d827278d89c5e9be8f9508ae3c2ad46290358630afb34db04eede0a4"],["1624d84780732860ce1c78fcbfefe08b2b29823db913f6493975ba0ff4847610","68651cf9b6da903e0914448c6cd9d4ca896878f5282be4c8cc06e2a404078575"],["733ce80da955a8a26902c95633e62a985192474b5af207da6df7b4fd5fc61cd4","f5435a2bd2badf7d485a4d8b8db9fcce3e1ef8e0201e4578c54673bc1dc5ea1d"],["15d9441254945064cf1a1c33bbd3b49f8966c5092171e699ef258dfab81c045c","d56eb30b69463e7234f5137b73b84177434800bacebfc685fc37bbe9efe4070d"],["a1d0fcf2ec9de675b612136e5ce70d271c21417c9d2b8aaaac138599d0717940","edd77f50bcb5a3cab2e90737309667f2641462a54070f3d519212d39c197a629"],["e22fbe15c0af8ccc5780c0735f84dbe9a790badee8245c06c7ca37331cb36980","a855babad5cd60c88b430a69f53a1a7a38289154964799be43d06d77d31da06"],["311091dd9860e8e20ee13473c1155f5f69635e394704eaa74009452246cfa9b3","66db656f87d1f04fffd1f04788c06830871ec5a64feee685bd80f0b1286d8374"],["34c1fd04d301be89b31c0442d3e6ac24883928b45a9340781867d4232ec2dbdf","9414685e97b1b5954bd46f730174136d57f1ceeb487443dc5321857ba73abee"],["f219ea5d6b54701c1c14de5b557eb42a8d13f3abbcd08affcc2a5e6b049b8d63","4cb95957e83d40b0f73af4544cccf6b1f4b08d3c07b27fb8d8c2962a400766d1"],["d7b8740f74a8fbaab1f683db8f45de26543a5490bca627087236912469a0b448","fa77968128d9c92ee1010f337ad4717eff15db5ed3c049b3411e0315eaa4593b"],["32d31c222f8f6f0ef86f7c98d3a3335ead5bcd32abdd94289fe4d3091aa824bf","5f3032f5892156e39ccd3d7915b9e1da2e6dac9e6f26e961118d14b8462e1661"],["7461f371914ab32671045a155d9831ea8793d77cd59592c4340f86cbc18347b5","8ec0ba238b96bec0cbdddcae0aa442542eee1ff50c986ea6b39847b3cc092ff6"],["ee079adb1df1860074356a25aa38206a6d716b2c3e67453d287698bad7b2b2d6","8dc2412aafe3be5c4c5f37e0ecc5f9f6a446989af04c4e25ebaac479ec1c8c1e"],["16ec93e447ec83f0467b18302ee620f7e65de331874c9dc72bfd8616ba9da6b5","5e4631150e62fb40d0e8c2a7ca5804a39d58186a50e497139626778e25b0674d"],["eaa5f980c245f6f038978290afa70b6bd8855897f98b6aa485b96065d537bd99","f65f5d3e292c2e0819a528391c994624d784869d7e6ea67fb18041024edc07dc"],["78c9407544ac132692ee1910a02439958ae04877151342ea96c4b6b35a49f51","f3e0319169eb9b85d5404795539a5e68fa1fbd583c064d2462b675f194a3ddb4"],["494f4be219a1a77016dcd838431aea0001cdc8ae7a6fc688726578d9702857a5","42242a969283a5f339ba7f075e36ba2af925ce30d767ed6e55f4b031880d562c"],["a598a8030da6d86c6bc7f2f5144ea549d28211ea58faa70ebf4c1e665c1fe9b5","204b5d6f84822c307e4b4a7140737aec23fc63b65b35f86a10026dbd2d864e6b"],["c41916365abb2b5d09192f5f2dbeafec208f020f12570a184dbadc3e58595997","4f14351d0087efa49d245b328984989d5caf9450f34bfc0ed16e96b58fa9913"],["841d6063a586fa475a724604da03bc5b92a2e0d2e0a36acfe4c73a5514742881","73867f59c0659e81904f9a1c7543698e62562d6744c169ce7a36de01a8d6154"],["5e95bb399a6971d376026947f89bde2f282b33810928be4ded112ac4d70e20d5","39f23f366809085beebfc71181313775a99c9aed7d8ba38b161384c746012865"],["36e4641a53948fd476c39f8a99fd974e5ec07564b5315d8bf99471bca0ef2f66","d2424b1b1abe4eb8164227b085c9aa9456ea13493fd563e06fd51cf5694c78fc"],["336581ea7bfbbb290c191a2f507a41cf5643842170e914faeab27c2c579f726","ead12168595fe1be99252129b6e56b3391f7ab1410cd1e0ef3dcdcabd2fda224"],["8ab89816dadfd6b6a1f2634fcf00ec8403781025ed6890c4849742706bd43ede","6fdcef09f2f6d0a044e654aef624136f503d459c3e89845858a47a9129cdd24e"],["1e33f1a746c9c5778133344d9299fcaa20b0938e8acff2544bb40284b8c5fb94","60660257dd11b3aa9c8ed618d24edff2306d320f1d03010e33a7d2057f3b3b6"],["85b7c1dcb3cec1b7ee7f30ded79dd20a0ed1f4cc18cbcfcfa410361fd8f08f31","3d98a9cdd026dd43f39048f25a8847f4fcafad1895d7a633c6fed3c35e999511"],["29df9fbd8d9e46509275f4b125d6d45d7fbe9a3b878a7af872a2800661ac5f51","b4c4fe99c775a606e2d8862179139ffda61dc861c019e55cd2876eb2a27d84b"],["a0b1cae06b0a847a3fea6e671aaf8adfdfe58ca2f768105c8082b2e449fce252","ae434102edde0958ec4b19d917a6a28e6b72da1834aff0e650f049503a296cf2"],["4e8ceafb9b3e9a136dc7ff67e840295b499dfb3b2133e4ba113f2e4c0e121e5","cf2174118c8b6d7a4b48f6d534ce5c79422c086a63460502b827ce62a326683c"],["d24a44e047e19b6f5afb81c7ca2f69080a5076689a010919f42725c2b789a33b","6fb8d5591b466f8fc63db50f1c0f1c69013f996887b8244d2cdec417afea8fa3"],["ea01606a7a6c9cdd249fdfcfacb99584001edd28abbab77b5104e98e8e3b35d4","322af4908c7312b0cfbfe369f7a7b3cdb7d4494bc2823700cfd652188a3ea98d"],["af8addbf2b661c8a6c6328655eb96651252007d8c5ea31be4ad196de8ce2131f","6749e67c029b85f52a034eafd096836b2520818680e26ac8f3dfbcdb71749700"],["e3ae1974566ca06cc516d47e0fb165a674a3dabcfca15e722f0e3450f45889","2aeabe7e4531510116217f07bf4d07300de97e4874f81f533420a72eeb0bd6a4"],["591ee355313d99721cf6993ffed1e3e301993ff3ed258802075ea8ced397e246","b0ea558a113c30bea60fc4775460c7901ff0b053d25ca2bdeee98f1a4be5d196"],["11396d55fda54c49f19aa97318d8da61fa8584e47b084945077cf03255b52984","998c74a8cd45ac01289d5833a7beb4744ff536b01b257be4c5767bea93ea57a4"],["3c5d2a1ba39c5a1790000738c9e0c40b8dcdfd5468754b6405540157e017aa7a","b2284279995a34e2f9d4de7396fc18b80f9b8b9fdd270f6661f79ca4c81bd257"],["cc8704b8a60a0defa3a99a7299f2e9c3fbc395afb04ac078425ef8a1793cc030","bdd46039feed17881d1e0862db347f8cf395b74fc4bcdc4e940b74e3ac1f1b13"],["c533e4f7ea8555aacd9777ac5cad29b97dd4defccc53ee7ea204119b2889b197","6f0a256bc5efdf429a2fb6242f1a43a2d9b925bb4a4b3a26bb8e0f45eb596096"],["c14f8f2ccb27d6f109f6d08d03cc96a69ba8c34eec07bbcf566d48e33da6593","c359d6923bb398f7fd4473e16fe1c28475b740dd098075e6c0e8649113dc3a38"],["a6cbc3046bc6a450bac24789fa17115a4c9739ed75f8f21ce441f72e0b90e6ef","21ae7f4680e889bb130619e2c0f95a360ceb573c70603139862afd617fa9b9f"],["347d6d9a02c48927ebfb86c1359b1caf130a3c0267d11ce6344b39f99d43cc38","60ea7f61a353524d1c987f6ecec92f086d565ab687870cb12689ff1e31c74448"],["da6545d2181db8d983f7dcb375ef5866d47c67b1bf31c8cf855ef7437b72656a","49b96715ab6878a79e78f07ce5680c5d6673051b4935bd897fea824b77dc208a"],["c40747cc9d012cb1a13b8148309c6de7ec25d6945d657146b9d5994b8feb1111","5ca560753be2a12fc6de6caf2cb489565db936156b9514e1bb5e83037e0fa2d4"],["4e42c8ec82c99798ccf3a610be870e78338c7f713348bd34c8203ef4037f3502","7571d74ee5e0fb92a7a8b33a07783341a5492144cc54bcc40a94473693606437"],["3775ab7089bc6af823aba2e1af70b236d251cadb0c86743287522a1b3b0dedea","be52d107bcfa09d8bcb9736a828cfa7fac8db17bf7a76a2c42ad961409018cf7"],["cee31cbf7e34ec379d94fb814d3d775ad954595d1314ba8846959e3e82f74e26","8fd64a14c06b589c26b947ae2bcf6bfa0149ef0be14ed4d80f448a01c43b1c6d"],["b4f9eaea09b6917619f6ea6a4eb5464efddb58fd45b1ebefcdc1a01d08b47986","39e5c9925b5a54b07433a4f18c61726f8bb131c012ca542eb24a8ac07200682a"],["d4263dfc3d2df923a0179a48966d30ce84e2515afc3dccc1b77907792ebcc60e","62dfaf07a0f78feb30e30d6295853ce189e127760ad6cf7fae164e122a208d54"],["48457524820fa65a4f8d35eb6930857c0032acc0a4a2de422233eeda897612c4","25a748ab367979d98733c38a1fa1c2e7dc6cc07db2d60a9ae7a76aaa49bd0f77"],["dfeeef1881101f2cb11644f3a2afdfc2045e19919152923f367a1767c11cceda","ecfb7056cf1de042f9420bab396793c0c390bde74b4bbdff16a83ae09a9a7517"],["6d7ef6b17543f8373c573f44e1f389835d89bcbc6062ced36c82df83b8fae859","cd450ec335438986dfefa10c57fea9bcc521a0959b2d80bbf74b190dca712d10"],["e75605d59102a5a2684500d3b991f2e3f3c88b93225547035af25af66e04541f","f5c54754a8f71ee540b9b48728473e314f729ac5308b06938360990e2bfad125"],["eb98660f4c4dfaa06a2be453d5020bc99a0c2e60abe388457dd43fefb1ed620c","6cb9a8876d9cb8520609af3add26cd20a0a7cd8a9411131ce85f44100099223e"],["13e87b027d8514d35939f2e6892b19922154596941888336dc3563e3b8dba942","fef5a3c68059a6dec5d624114bf1e91aac2b9da568d6abeb2570d55646b8adf1"],["ee163026e9fd6fe017c38f06a5be6fc125424b371ce2708e7bf4491691e5764a","1acb250f255dd61c43d94ccc670d0f58f49ae3fa15b96623e5430da0ad6c62b2"],["b268f5ef9ad51e4d78de3a750c2dc89b1e626d43505867999932e5db33af3d80","5f310d4b3c99b9ebb19f77d41c1dee018cf0d34fd4191614003e945a1216e423"],["ff07f3118a9df035e9fad85eb6c7bfe42b02f01ca99ceea3bf7ffdba93c4750d","438136d603e858a3a5c440c38eccbaddc1d2942114e2eddd4740d098ced1f0d8"],["8d8b9855c7c052a34146fd20ffb658bea4b9f69e0d825ebec16e8c3ce2b526a1","cdb559eedc2d79f926baf44fb84ea4d44bcf50fee51d7ceb30e2e7f463036758"],["52db0b5384dfbf05bfa9d472d7ae26dfe4b851ceca91b1eba54263180da32b63","c3b997d050ee5d423ebaf66a6db9f57b3180c902875679de924b69d84a7b375"],["e62f9490d3d51da6395efd24e80919cc7d0f29c3f3fa48c6fff543becbd43352","6d89ad7ba4876b0b22c2ca280c682862f342c8591f1daf5170e07bfd9ccafa7d"],["7f30ea2476b399b4957509c88f77d0191afa2ff5cb7b14fd6d8e7d65aaab1193","ca5ef7d4b231c94c3b15389a5f6311e9daff7bb67b103e9880ef4bff637acaec"],["5098ff1e1d9f14fb46a210fada6c903fef0fb7b4a1dd1d9ac60a0361800b7a00","9731141d81fc8f8084d37c6e7542006b3ee1b40d60dfe5362a5b132fd17ddc0"],["32b78c7de9ee512a72895be6b9cbefa6e2f3c4ccce445c96b9f2c81e2778ad58","ee1849f513df71e32efc3896ee28260c73bb80547ae2275ba497237794c8753c"],["e2cb74fddc8e9fbcd076eef2a7c72b0ce37d50f08269dfc074b581550547a4f7","d3aa2ed71c9dd2247a62df062736eb0baddea9e36122d2be8641abcb005cc4a4"],["8438447566d4d7bedadc299496ab357426009a35f235cb141be0d99cd10ae3a8","c4e1020916980a4da5d01ac5e6ad330734ef0d7906631c4f2390426b2edd791f"],["4162d488b89402039b584c6fc6c308870587d9c46f660b878ab65c82c711d67e","67163e903236289f776f22c25fb8a3afc1732f2b84b4e95dbda47ae5a0852649"],["3fad3fa84caf0f34f0f89bfd2dcf54fc175d767aec3e50684f3ba4a4bf5f683d","cd1bc7cb6cc407bb2f0ca647c718a730cf71872e7d0d2a53fa20efcdfe61826"],["674f2600a3007a00568c1a7ce05d0816c1fb84bf1370798f1c69532faeb1a86b","299d21f9413f33b3edf43b257004580b70db57da0b182259e09eecc69e0d38a5"],["d32f4da54ade74abb81b815ad1fb3b263d82d6c692714bcff87d29bd5ee9f08f","f9429e738b8e53b968e99016c059707782e14f4535359d582fc416910b3eea87"],["30e4e670435385556e593657135845d36fbb6931f72b08cb1ed954f1e3ce3ff6","462f9bce619898638499350113bbc9b10a878d35da70740dc695a559eb88db7b"],["be2062003c51cc3004682904330e4dee7f3dcd10b01e580bf1971b04d4cad297","62188bc49d61e5428573d48a74e1c655b1c61090905682a0d5558ed72dccb9bc"],["93144423ace3451ed29e0fb9ac2af211cb6e84a601df5993c419859fff5df04a","7c10dfb164c3425f5c71a3f9d7992038f1065224f72bb9d1d902a6d13037b47c"],["b015f8044f5fcbdcf21ca26d6c34fb8197829205c7b7d2a7cb66418c157b112c","ab8c1e086d04e813744a655b2df8d5f83b3cdc6faa3088c1d3aea1454e3a1d5f"],["d5e9e1da649d97d89e4868117a465a3a4f8a18de57a140d36b3f2af341a21b52","4cb04437f391ed73111a13cc1d4dd0db1693465c2240480d8955e8592f27447a"],["d3ae41047dd7ca065dbf8ed77b992439983005cd72e16d6f996a5316d36966bb","bd1aeb21ad22ebb22a10f0303417c6d964f8cdd7df0aca614b10dc14d125ac46"],["463e2763d885f958fc66cdd22800f0a487197d0a82e377b49f80af87c897b065","bfefacdb0e5d0fd7df3a311a94de062b26b80c61fbc97508b79992671ef7ca7f"],["7985fdfd127c0567c6f53ec1bb63ec3158e597c40bfe747c83cddfc910641917","603c12daf3d9862ef2b25fe1de289aed24ed291e0ec6708703a5bd567f32ed03"],["74a1ad6b5f76e39db2dd249410eac7f99e74c59cb83d2d0ed5ff1543da7703e9","cc6157ef18c9c63cd6193d83631bbea0093e0968942e8c33d5737fd790e0db08"],["30682a50703375f602d416664ba19b7fc9bab42c72747463a71d0896b22f6da3","553e04f6b018b4fa6c8f39e7f311d3176290d0e0f19ca73f17714d9977a22ff8"],["9e2158f0d7c0d5f26c3791efefa79597654e7a2b2464f52b1ee6c1347769ef57","712fcdd1b9053f09003a3481fa7762e9ffd7c8ef35a38509e2fbf2629008373"],["176e26989a43c9cfeba4029c202538c28172e566e3c4fce7322857f3be327d66","ed8cc9d04b29eb877d270b4878dc43c19aefd31f4eee09ee7b47834c1fa4b1c3"],["75d46efea3771e6e68abb89a13ad747ecf1892393dfc4f1b7004788c50374da8","9852390a99507679fd0b86fd2b39a868d7efc22151346e1a3ca4726586a6bed8"],["809a20c67d64900ffb698c4c825f6d5f2310fb0451c869345b7319f645605721","9e994980d9917e22b76b061927fa04143d096ccc54963e6a5ebfa5f3f8e286c1"],["1b38903a43f7f114ed4500b4eac7083fdefece1cf29c63528d563446f972c180","4036edc931a60ae889353f77fd53de4a2708b26b6f5da72ad3394119daf408f9"]]}}]};this.p=new l(t.p,16),this.red=new u(t.prime),this.zero=new l(0).toRed(this.red),this.one=new l(1).toRed(this.red),this.two=new l(2).toRed(this.red),this.n=new l(t.n,16),this.g=e4.fromJSON(t.g,t.gRed),this._wnafT1=[,,,,],this._wnafT2=[,,,,],this._wnafT3=[,,,,],this._wnafT4=[,,,,],this._bitLength=this.n.bitLength(),this.redN=this.n.toRed(this.red),this.a=new l(t.a,16).toRed(this.red),this.b=new l(t.b,16).toRed(this.red),this.tinv=this.two.redInvm(),this.zeroA=0===this.a.fromRed().cmpn(0),this.threeA=0===this.a.fromRed().sub(this.p).cmpn(-3),this.endo=this._getEndomorphism(t),this._endoWnafT1=[,,,,],this._endoWnafT2=[,,,,]}_getEndomorphism(e){let t,i,r;if(this.zeroA&&1===this.p.modrn(3)){if(void 0!==e.beta)t=new l(e.beta,16).toRed(this.red);else{let e=this._getEndoRoots(this.p);if(null===e)throw Error("Failed to get endomorphism roots for beta.");t=(t=0>e[0].cmp(e[1])?e[0]:e[1]).toRed(this.red)}if(void 0!==e.lambda)i=new l(e.lambda,16);else{let e=this._getEndoRoots(this.n);if(null===e)throw Error("Failed to get endomorphism roots for lambda.");if(null==this.g)throw Error("Curve generator point (g) is not defined.");let r=this.g.mul(e[0])?.x,s=null!=this.g.x?this.g.x.redMul(t):void 0;if(null!=r&&null!=s&&0===r.cmp(s))i=e[0];else{if(i=e[1],null==this.g)throw Error("Curve generator point (g) is not defined.");let r=this.g.mul(i)?.x,s=null!=this.g.x?this.g.x.redMul(t):void 0;if(null==r||null==s)throw Error("Lambda computation failed: g.mul(lambda).x or g.x.redMul(beta) is undefined.");e5.assert(0===r.cmp(s),"Lambda selection does not match computed beta.")}}return r="object"==typeof e.basis&&null!==e.basis?e.basis.map(function(e){return{a:new l(e.a,16),b:new l(e.b,16)}}):this._getEndoBasis(i),{beta:t,lambda:i,basis:r}}}_getEndoRoots(e){let t=e===this.p?this.red:new p(e),i=new l(2).toRed(t).redInvm(),r=i.redNeg(),s=new l(3).toRed(t).redNeg().redSqrt().redMul(i);return[r.redAdd(s).fromRed(),r.redSub(s).fromRed()]}_getEndoBasis(e){let t,i,r,s,n,a,o=this.n.ushrn(Math.floor(this.n.bitLength()/2)),c=e,h=this.n.clone(),d=new l(1),f=new l(0),u=new l(0),p=new l(1),b=new l(0),g=0,y=new l(0),m=new l(0);for(;0!==c.cmpn(0);){let e=h.div(c);y=h.sub(e.mul(c)),m=u.sub(e.mul(d));let n=p.sub(e.mul(f));if(void 0===r&&0>y.cmp(o))t=b.neg(),i=d,r=y.neg(),s=m;else if(void 0!==r&&2==++g)break;b=y,h=c,c=y,u=d,d=m,p=f,f=n}if(void 0===t||void 0===i||void 0===r||void 0===s)throw Error("Failed to compute Endo Basis values");n=y.neg(),a=m;let w=r.sqr().add(s.sqr());return n.sqr().add(a.sqr()).cmp(w)>=0&&(n=t,a=i),0!==r.negative&&(r=r.neg(),s=s.neg()),0!==n.negative&&(n=n.neg(),a=a.neg()),[{a:r,b:s},{a:n,b:a}]}_endoSplit(e){if(null==this.endo)throw Error("Endomorphism is not defined.");let t=this.endo.basis,i=t[0],r=t[1],s=r.b.mul(e).divRound(this.n),n=i.b.neg().mul(e).divRound(this.n),a=s.mul(i.a),o=n.mul(r.a),c=s.mul(i.b),h=n.mul(r.b);return{k1:e.sub(a).sub(o),k2:c.add(h).neg()}}validate(e){if(e.inf)return!0;let t=e.x,i=e.y;if(null===t||null===i)throw Error("Point coordinates cannot be null");let r=this.a.redMul(t),s=t.redSqr().redMul(t).redIAdd(r).redIAdd(this.b);return 0===i.redSqr().redISub(s).cmpn(0)}}class e9{r;s;static fromDER(e,t){let i=(e,t)=>{let i=e[t.place++];if((128&i)==0)return i;throw Error("Invalid DER entity length")};e=eO(e,t);let r=new class{place;constructor(){this.place=0}};if(48!==e[r.place++])throw Error("Signature DER must start with 0x30");if(i(e,r)+r.place!==e.length||2!==e[r.place++])throw Error("Signature DER invalid");let s=i(e,r),n=e.slice(r.place,s+r.place);if(r.place+=s,2!==e[r.place++])throw Error("Signature DER invalid");let a=i(e,r);if(e.length!==a+r.place)throw Error("Invalid R-length in signature DER");let o=e.slice(r.place,a+r.place);if(0===n[0])if((128&n[1])!=0)n=n.slice(1);else throw Error("Invalid R-value in signature DER");if(0===o[0])if((128&o[1])!=0)o=o.slice(1);else throw Error("Invalid S-value in signature DER");return new e9(new l(n),new l(o))}static fromCompact(e,t){if(65!==(e=eO(e,t)).length)throw Error("Invalid Compact Signature");let i=e[0];if(i<27||i>=35)throw Error("Invalid Compact Byte");return new e9(new l(e.slice(1,33)),new l(e.slice(33,65)))}constructor(e,t){this.r=e,this.s=t}verify(e,t,i){return ta(new l(D(e,i),16),this,t)}toString(e){return this.toDER(e)}toDER(e){let t=(e,t)=>{if(t<128)e.push(t);else throw Error("len must be < 0x80")},i=e=>{let t=0,i=e.length-1;for(;0===e[t]&&(128&e[t+1])==0&&t3)throw Error("Invalid recovery param");if("boolean"!=typeof t)throw Error("Invalid compressed param");let r=27+e;t&&(r+=4);let s=[r];return(s=(s=s.concat(this.r.toArray("be",32))).concat(this.s.toArray("be",32)),"hex"===i)?ex(s):"base64"===i?eC(s):s}RecoverPublicKey(e,t){let i=this.r,r=this.s,s=new e5,n=s.n,a=s.g,o=0!=e>>1?i.add(n):i,c=e4.fromX(o,(1&e)!=0);if(!c.mul(n).isInfinity())throw Error("nR is not at infinity");let h=t.neg().umod(n),l=i.invm(n),d=l.mul(r).umod(n),f=l.mul(h).umod(n),u=new to(a.mul(f).add(c.mul(d)));return u.validate(),u}CalculateRecoveryFactor(e,t){for(let i=0;i<4;i++){let r;try{r=this.RecoverPublicKey(i,t)}catch{continue}if(e.eq(r))return i}throw Error("Unable to find valid recovery factor")}}class e7{K;V;constructor(e,t){if(e=eO(e,"hex"),t=eO(t,"hex"),e.length<32)throw Error("Not enough entropy. Minimum is 256 bits");let i=e.concat(t);this.K=Array(32),this.V=Array(32);for(let e=0;e<32;e++)this.K[e]=0,this.V[e]=1;this.update(i)}hmac(){return new B(this.K)}update(e){let t=this.hmac().update(this.V).update([0]);void 0!==e&&(t=t.update(e)),this.K=t.digest(),this.V=this.hmac().update(this.V).digest(),void 0!==e&&(this.K=this.hmac().update(this.V).update([1]).update(e).digest(),this.V=this.hmac().update(this.V).digest())}generate(e){let t=[];for(;t.length0&&e.iushrn(r),null===t&&e.cmp(i.n)>=0)?e.sub(i.n):e}let tt=new e5,ti=tt.n.byteLength(),tr=tt.n.subn(1),ts=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n>>1n,tn=(e,t,i=!1,r)=>{let s=BigInt("0x"+(e=te(e)).toString(16)),n=BigInt("0x"+t.toString(16)),a=new e7(t.toArray("be",ti),e.toArray("be",ti));for(let e=0;;e++){let t="function"==typeof r?r(e):l.isBN(r)?r:new l(a.generate(ti),16);if(null==t)throw Error("k is undefined");if(0>=(t=te(t,!0)).cmpn(1)||t.cmp(tr)>=0){if(l.isBN(r))throw Error("Invalid fixed custom K value (must be >1 and ts&&(b=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n-b),new e9(new l(f.toString(16),16),new l(b.toString(16),16))}},ta=(e,t,i)=>{let r=BigInt("0x"+e.toString(16));if(null==i.x||null==i.y)throw Error("Invalid public key: missing coordinates.");let s={x:BigInt("0x"+i.x.toString(16)),y:BigInt("0x"+i.y.toString(16))},{r:n,s:a}={r:BigInt("0x"+t.r.toString(16)),s:BigInt("0x"+t.s.toString(16))};if(n<=0n||n>=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n||a<=0n||a>=0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141n)return!1;let o=e8(a);if(0n===o)return!1;let c=e6(r,o),h=e6(n,o),l=e0(e2(c,{x:eY,y:eJ}),e2(h,s));if(0n===l.Z)return!1;let d=eG(l.Z),f=eW(d,d);return e3(eW(l.X,f))===n};class to extends e4{static fromPrivateKey(e){let t=new e5().g.mul(e);return new to(t.x,t.y)}static fromString(e){let t=e4.fromString(e);return new to(t.x,t.y)}static fromDER(e){let t=e4.fromDER(e);return new to(t.x,t.y)}constructor(e,t=null,i=!0){if(e instanceof e4)super(e.getX(),e.getY());else{if(null===t&&i&&"string"==typeof e&&(66===e.length||130===e.length))throw Error('You are using the "new PublicKey()" constructor with a DER hex string. You need to use "PublicKey.fromString()" instead.');super(e,t,i)}}deriveSharedSecret(e){if(!this.validate())throw Error("Public key not valid for ECDH secret derivation");return this.mul(e)}verify(e,t,i){return ta(new l(D(e,i),16),t,this)}toDER(e){return"hex"===e?this.encode(!0,e):this.encode(!0)}toHash(e){let t=F(this.encode(!0));return"hex"===e?ex(t):t}toAddress(e=[0]){if("string"==typeof e)if("testnet"===e||"test"===e)e=[111];else if("mainnet"===e||"main"===e)e=[0];else throw Error(`Invalid prefix ${e}`);return eD(this.toHash(),e)}deriveChild(e,t,i,r){let s;if("function"==typeof r){let t=r(e,this);void 0!==t?s=t:(s=this.deriveSharedSecret(e),"function"==typeof i&&i(e,this,s))}else s=this.deriveSharedSecret(e);let n=eO(t,"utf8"),a=V(s.encode(!0),n),o=new e5().g.mul(new l(a)),c=this.add(o);return new to(c.x,c.y)}static fromMsgHashAndCompactSignature(e,t,i){let r=eO(t,i);if(65!==r.length)throw Error("Invalid Compact Signature");let s=r[0];if(s<27||s>=35)throw Error("Invalid Compact Byte");let n=r[0]-27;return n>3&&(n-=4),new e9(new l(r.slice(1,33)),new l(r.slice(33,65))).RecoverPublicKey(n,e)}}class tc{_rand;constructor(){let e=()=>{throw Error("No secure random number generator is available in this environment.")};if(this._rand=e,"object"==typeof self)self.crypto?.getRandomValues?this._rand=e=>{let t=new Uint8Array(e);return self.crypto.getRandomValues(t),[...t]}:this._rand=e;else try{let e=require("crypto");"function"==typeof e.randomBytes&&(this._rand=t=>[...e.randomBytes(t)])}catch{this._rand=e}}generate(e){return this._rand(e)}}let th=null,tl=e=>(null==th&&(th=new tc),th.generate(e));class td{x;y;constructor(e,t){let i=new e5().p;this.x=e.umod(i),this.y=t.umod(i)}toString(){return eL(this.x.toArray())+"."+eL(this.y.toArray())}static fromString(e){let[t,i]=e.split(".");return new td(new l(eB(t)),new l(eB(i)))}}class tf{points;threshold;constructor(e,t){this.points=e,this.threshold=t??e.length}static fromPrivateKey(e,t){let i=new e5().p,r=[new td(new l(0),new l(e.toArray()))];for(let e=1;e{let s=e.split(".");if(4!==s.length)throw Error("Invalid share format in share "+r.toString()+'. Expected format: "x.y.t.i" - received '+e);let[n,a,o,c]=s;if(void 0===o)throw Error("Threshold not found in share "+r.toString());if(void 0===c)throw Error("Integrity not found in share "+r.toString());let h=parseInt(o);if(0!==r&&t!==h)throw Error("Threshold mismatch in share "+r.toString());if(0!==r&&i!==c)throw Error("Integrity mismatch in share "+r.toString());return t=h,i=c,td.fromString([n,a].join("."))}),t,i)}toBackupFormat(){return this.points.map(e=>e.toString()+"."+this.threshold.toString()+"."+this.integrity)}}class tp extends l{static fromRandom(){return new tp(tl(32))}static fromString(e,t="hex"){return new tp(super.fromString(e,t).toArray())}static fromHex(e){return new tp(super.fromHex(e,"big"))}static fromWif(e,t=1){let i=eM(e,void 0,t);if(33!==i.data.length)throw Error("Invalid WIF length");if(1!==i.data[32])throw Error("Invalid WIF padding");return new tp(i.data.slice(0,32))}constructor(e=0,t=10,i="be",r="apply"){if(e instanceof l?(super(),e.copy(this)):super(e,t,i),"nocheck"!==r){let e=this.checkInField();if(!e.inField){if("error"===r)throw Error("Input is out of field");l.move(this,e.modN)}}}checkInField(){let e=new e5,t=this.mod(e.n);return{inField:0===this.cmp(t),modN:t}}isValid(){return this.checkInField().inField}sign(e,t,i=!0,r){return tn(new l(D(e,t),16),this,i,r)}verify(e,t,i){return ta(new l(D(e,i),16),t,this.toPublicKey())}toPublicKey(){let e=new e5().g.mul(this);return new to(e.x,e.y)}toWif(e=[128]){if(!this.isValid())throw Error("Value is out of field");return eD([...this.toArray("be",32),1],e)}toAddress(e=[0]){return this.toPublicKey().toAddress(e)}toHex(){return super.toHex(32)}toString(e="hex",t=64){return super.toString(e,t)}deriveSharedSecret(e){if(!e.validate())throw Error("Public key not valid for ECDH secret derivation");return e.mul(this)}deriveChild(e,t,i,r){let s;if("function"==typeof r){let t=r(this,e);void 0!==t?s=t:(s=this.deriveSharedSecret(e),"function"==typeof i&&i(this,e,s))}else s=this.deriveSharedSecret(e);let n=eO(t,"utf8"),a=V(s.encode(!0),n),o=new e5;return new tp(this.add(new l(a)).mod(o.n).toArray())}toKeyShares(e,t){if("number"!=typeof e||"number"!=typeof t)throw Error("threshold and totalShares must be numbers");if(e<2)throw Error("threshold must be at least 2");if(t<2)throw Error("totalShares must be at least 2");if(e>t)throw Error("threshold should be less than or equal to totalShares");let i=tf.fromPrivateKey(this,e),r=[],s=new Set,n=new e5,a=tl(64);for(let e=0;e5)throw Error("Failed to generate unique x coordinate after 5 attempts");while(t.isZero()||s.has(t.toString()));s.add(t.toString());let c=i.valueAt(t);r.push(new td(t,c))}return new tu(r,e,this.toPublicKey().toHash("hex").slice(0,8))}toBackupShares(e,t){return this.toKeyShares(e,t).toBackupFormat()}static fromBackupShares(e){return tp.fromKeyShares(tu.fromBackupFormat(e))}static fromKeyShares(e){let{points:t,threshold:i,integrity:r}=e;if(i<2)throw Error("threshold must be at least 2");if(t.lengthnew Uint8Array(e)),ty=new Uint8Array(256),tm=new Uint8Array(256);for(let e=0;e<256;e++){let t=(e<<1^27*((128&e)!=0))&255;ty[e]=t,tm[e]=t^e}function tw(e,t,i){for(let r=0;r<4;r++){let s=t[i+r];for(let t=0;t<4;t++)e[t][r]^=s[t]}}function tI(e){for(let t=0;t<4;t++)e[t]=tb[e[t]]}function tk(e,t){let i,r,s,n,a=[[],[],[],[]],o=[],c=Array.from(t);if(c.length<=16){for(;c.length<16;)c.unshift(0);n=11}else if(c.length<=24){for(;c.length<24;)c.unshift(0);n=13}else if(t.length<=32){for(;c.length<32;)c.unshift(0);n=15}else throw Error("Illegal key length: "+String(t.length));let h=function(e,t){let i=t.length/4,r=[];for(let e=0;e>2].push(t[e]);for(let t=i;t<4*e;t++){r[t]=[];let e=r[t-1].slice();if(t%i==0){let r=e[0];e[0]=e[1],e[1]=e[2],e[2]=e[3],e[3]=r,tI(e);let s=tg[t/i];for(let t=0;t<4;t++)e[t]^=s[t]}else i>6&&t%i==4&&tI(e);for(let s=0;s<4;s++)r[t][s]=r[t-i][s]^e[s]}return r}(n,c);for(let t=0;t<4;t++)a[0][t]=e[4*t],a[1][t]=e[4*t+1],a[2][t]=e[4*t+2],a[3][t]=e[4*t+3];for(tw(a,h,0),s=1;s>>24,(0xff0000&e)>>16,(65280&e)>>8,255&e]},tS=function(e){return Array(e).fill(0)},t_=[225].concat(tS(15)),tE=function(e,t){for(let i=0;i>1,0!==r&&(e[t]=128|e[t]);return e},tO=function(e,t){let i=t.slice(),r=tS(16);for(let t=0;t<16;t++)for(let s=7;s>=0;s--)(e[t]&1<{let e=new eF;for(let t of i){if(void 0===t.sourceTXID){if(null==t.sourceTransaction)throw Error("Missing sourceTransaction for input");e.write(t.sourceTransaction.hash())}else e.writeReverse(eO(t.sourceTXID,"hex"));e.writeUInt32LE(t.sourceOutputIndex)}return M(e.toArray())})()),(e.scope&tC.SIGHASH_ANYONECANPAY)==0&&(31&e.scope)!==tC.SIGHASH_SINGLE&&(31&e.scope)!==tC.SIGHASH_NONE&&(n=(()=>{let e=new eF;for(let t of i){let i=t.sequence??0xffffffff;e.writeUInt32LE(i)}return M(e.toArray())})()),(31&e.scope)!==tC.SIGHASH_SINGLE&&(31&e.scope)!==tC.SIGHASH_NONE?a=r():(31&e.scope)===tC.SIGHASH_SINGLE&&e.inputIndex>>0),o.toArray()}static fromChecksigFormat(e){if(0===e.length){let e=new l(1);return new tC(e,new l(1),1)}let t=e[e.length-1],i=e.slice(0,e.length-1),r=e9.fromDER(i);return new tC(r.r,r.s,t)}constructor(e,t,i){super(e,t),this.scope=i}hasLowS(){return!(this.s.ltn(1)||this.s.gt(new l("7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0","hex")))}toChecksigFormat(){return[...this.toDER(),this.scope]}}class tR{curve;constructor(){this.curve=new e5}generateProof(e,t,i,r){let s=tp.fromRandom(),n=s.toPublicKey(),a=i.mul(s),o=this.computeChallenge(t,i,r,a,n);return{R:n,SPrime:a,z:s.add(o.mul(e)).umod(this.curve.n)}}verifyProof(e,t,i,r){let{R:s,SPrime:n,z:a}=r,o=this.computeChallenge(e,t,i,n,s),c=this.curve.g.mul(a),h=s.add(e.mul(o));if(!c.eq(h))return!1;let l=t.mul(a),d=n.add(i.mul(o));return!!l.eq(d)}computeChallenge(e,t,i,r,s){return new l(D([...e.encode(!0),...t.encode(!0),...i.encode(!0),...r.encode(!0),...s.encode(!0)])).umod(this.curve.n)}}let tB={OP_FALSE:0,OP_0:0,OP_PUSHDATA1:76,OP_PUSHDATA2:77,OP_PUSHDATA4:78,OP_1NEGATE:79,OP_RESERVED:80,OP_TRUE:81,OP_1:81,OP_2:82,OP_3:83,OP_4:84,OP_5:85,OP_6:86,OP_7:87,OP_8:88,OP_9:89,OP_10:90,OP_11:91,OP_12:92,OP_13:93,OP_14:94,OP_15:95,OP_16:96,OP_NOP:97,OP_VER:98,OP_IF:99,OP_NOTIF:100,OP_VERIF:101,OP_VERNOTIF:102,OP_ELSE:103,OP_ENDIF:104,OP_VERIFY:105,OP_RETURN:106,OP_TOALTSTACK:107,OP_FROMALTSTACK:108,OP_2DROP:109,OP_2DUP:110,OP_3DUP:111,OP_2OVER:112,OP_2ROT:113,OP_2SWAP:114,OP_IFDUP:115,OP_DEPTH:116,OP_DROP:117,OP_DUP:118,OP_NIP:119,OP_OVER:120,OP_PICK:121,OP_ROLL:122,OP_ROT:123,OP_SWAP:124,OP_TUCK:125,OP_CAT:126,OP_SUBSTR:127,OP_SPLIT:127,OP_LEFT:128,OP_NUM2BIN:128,OP_RIGHT:129,OP_BIN2NUM:129,OP_SIZE:130,OP_INVERT:131,OP_AND:132,OP_OR:133,OP_XOR:134,OP_EQUAL:135,OP_EQUALVERIFY:136,OP_RESERVED1:137,OP_RESERVED2:138,OP_1ADD:139,OP_1SUB:140,OP_2MUL:141,OP_2DIV:142,OP_NEGATE:143,OP_ABS:144,OP_NOT:145,OP_0NOTEQUAL:146,OP_ADD:147,OP_SUB:148,OP_MUL:149,OP_DIV:150,OP_MOD:151,OP_LSHIFT:152,OP_RSHIFT:153,OP_BOOLAND:154,OP_BOOLOR:155,OP_NUMEQUAL:156,OP_NUMEQUALVERIFY:157,OP_NUMNOTEQUAL:158,OP_LESSTHAN:159,OP_GREATERTHAN:160,OP_LESSTHANOREQUAL:161,OP_GREATERTHANOREQUAL:162,OP_MIN:163,OP_MAX:164,OP_WITHIN:165,OP_RIPEMD160:166,OP_SHA1:167,OP_SHA256:168,OP_HASH160:169,OP_HASH256:170,OP_CODESEPARATOR:171,OP_CHECKSIG:172,OP_CHECKSIGVERIFY:173,OP_CHECKMULTISIG:174,OP_CHECKMULTISIGVERIFY:175,OP_NOP1:176,OP_NOP2:177,OP_NOP3:178,OP_NOP4:179,OP_NOP5:180,OP_NOP6:181,OP_NOP7:182,OP_NOP8:183,OP_NOP9:184,OP_NOP10:185,OP_NOP11:186,OP_NOP12:187,OP_NOP13:188,OP_NOP14:189,OP_NOP15:190,OP_NOP16:191,OP_NOP17:192,OP_NOP18:193,OP_NOP19:194,OP_NOP20:195,OP_NOP21:196,OP_NOP22:197,OP_NOP23:198,OP_NOP24:199,OP_NOP25:200,OP_NOP26:201,OP_NOP27:202,OP_NOP28:203,OP_NOP29:204,OP_NOP30:205,OP_NOP31:206,OP_NOP32:207,OP_NOP33:208,OP_NOP34:209,OP_NOP35:210,OP_NOP36:211,OP_NOP37:212,OP_NOP38:213,OP_NOP39:214,OP_NOP40:215,OP_NOP41:216,OP_NOP42:217,OP_NOP43:218,OP_NOP44:219,OP_NOP45:220,OP_NOP46:221,OP_NOP47:222,OP_NOP48:223,OP_NOP49:224,OP_NOP50:225,OP_NOP51:226,OP_NOP52:227,OP_NOP53:228,OP_NOP54:229,OP_NOP55:230,OP_NOP56:231,OP_NOP57:232,OP_NOP58:233,OP_NOP59:234,OP_NOP60:235,OP_NOP61:236,OP_NOP62:237,OP_NOP63:238,OP_NOP64:239,OP_NOP65:240,OP_NOP66:241,OP_NOP67:242,OP_NOP68:243,OP_NOP69:244,OP_NOP70:245,OP_NOP71:246,OP_NOP72:247,OP_NOP73:248,OP_NOP77:252,OP_SMALLDATA:249,OP_SMALLINTEGER:250,OP_PUBKEYS:251,OP_PUBKEYHASH:253,OP_PUBKEY:254,OP_INVALIDOPCODE:255};for(let e in tB)tB[tB[e]]=e;class tL{chunks;static fromASM(e){let t=[],i=e.split(" "),r=0;for(;r=0&&a0&&e=0&&0>=e.cmpn(16))this.chunks.push({op:e.toNumber()+tB.OP_1-1});else{let t=e.toSm("little");this.writeBin(t)}return this}writeBin(e){let t;if(e.length>0&&e.lengthtB.OP_16)return!1;return!0}isLockingScript(){throw Error("Not implemented")}isUnlockingScript(){throw Error("Not implemented")}_chunkToString(e){let t=e.op,i="";if(void 0===e.data){let e=tB[t];i=`${i} ${e}`}else i=`${i} ${ex(e.data)}`;return i}}class tD extends tL{isLockingScript(){return!0}isUnlockingScript(){return!1}}class tM extends tL{isLockingScript(){return!1}isUnlockingScript(){return!0}}class tF extends Error{txid;outputIndex;context;programCounter;stackState;altStackState;ifStackState;stackMem;altStackMem;constructor(e){let t=e.stackState.map(e=>null!=e&&void 0!==e.length?ex(e):null==e?"null/undef":"INVALID_STACK_ITEM").join(", "),i=e.altStackState.map(e=>null!=e&&void 0!==e.length?ex(e):null==e?"null/undef":"INVALID_STACK_ITEM").join(", "),r=`Context: ${e.context}, PC: ${e.programCounter}`,s=`Stack: [${t}] (len: ${e.stackState.length}, mem: ${e.stackMem})`,n=`AltStack: [${i}] (len: ${e.altStackState.length}, mem: ${e.altStackMem})`;super(`Script evaluation error: ${e.message} -TXID: ${e.txid}, OutputIdx: ${e.outputIndex} -${r} -${s} -${n} -IfStack: [${e.ifStackState.join(", ")}]`),this.name=this.constructor.name,this.txid=e.txid,this.outputIndex=e.outputIndex,this.context=e.context,this.programCounter=e.programCounter,this.stackState=e.stackState.map(e=>e.slice()),this.altStackState=e.altStackState.map(e=>e.slice()),this.ifStackState=e.ifStackState.slice(),this.stackMem=e.stackMem,this.altStackMem=e.altStackMem}}let tV=Object.freeze(new l(-1).toScriptNum()),tH=Object.freeze(Array.from({length:17},(e,t)=>Object.freeze(new l(t).toScriptNum())));class tq{sourceTXID;sourceOutputIndex;sourceSatoshis;lockingScript;transactionVersion;otherInputs;outputs;inputIndex;unlockingScript;inputSequence;lockTime;context;programCounter;lastCodeSeparator;stack;altStack;ifStack;memoryLimit;stackMem;altStackMem;constructor(e){this.sourceTXID=e.sourceTXID,this.sourceOutputIndex=e.sourceOutputIndex,this.sourceSatoshis=e.sourceSatoshis,this.lockingScript=e.lockingScript,this.transactionVersion=e.transactionVersion,this.otherInputs=e.otherInputs,this.outputs=e.outputs,this.inputIndex=e.inputIndex,this.unlockingScript=e.unlockingScript,this.inputSequence=e.inputSequence,this.lockTime=e.lockTime,this.memoryLimit=e.memoryLimit??32e6,this.stack=[],this.altStack=[],this.ifStack=[],this.stackMem=0,this.altStackMem=0,this.reset()}reset(){this.context="UnlockingScript",this.programCounter=0,this.lastCodeSeparator=null,this.stack=[],this.altStack=[],this.ifStack=[],this.stackMem=0,this.altStackMem=0}ensureStackMem(e){this.stackMem+e>this.memoryLimit&&this.scriptEvaluationError("Stack memory usage has exceeded "+String(this.memoryLimit)+" bytes")}ensureAltStackMem(e){this.altStackMem+e>this.memoryLimit&&this.scriptEvaluationError("Alt stack memory usage has exceeded "+String(this.memoryLimit)+" bytes")}pushStack(e){this.ensureStackMem(e.length),this.stack.push(e),this.stackMem+=e.length}pushStackCopy(e){this.ensureStackMem(e.length);let t=e.slice();this.stack.push(t),this.stackMem+=t.length}popStack(){0===this.stack.length&&this.scriptEvaluationError("Attempted to pop from an empty stack.");let e=this.stack.pop();return this.stackMem-=e.length,e}stackTop(e=-1){return(0===this.stack.length||this.stack.length=0&&e>=this.stack.length)&&this.scriptEvaluationError(`Stack underflow accessing element at index ${e}. Stack length is ${this.stack.length}.`),this.stack[this.stack.length+e]}pushAltStack(e){this.ensureAltStackMem(e.length),this.altStack.push(e),this.altStackMem+=e.length}popAltStack(){0===this.altStack.length&&this.scriptEvaluationError("Attempted to pop from an empty alt stack.");let e=this.altStack.pop();return this.altStackMem-=e.length,e}checkSignatureEncoding(e){if(0===e.length)return!0;if(!function(e){if(e.length<9||e.length>73||48!==e[0]||e[1]!==e.length-3)return!1;let t=e[2],i=e[3];if(2!==t||0===i||5+i>=e.length)return!1;let r=4+i,s=e[r],n=e[r+1];if(2!==s||0===n||(128&e[4])!=0||i>1&&0===e[4]&&(128&e[5])==0)return!1;let a=r+2;return(128&e[a])==0&&(!(n>1)||0!==e[a]||(128&e[a+1])!=0)&&i+n+7===e.length}(e))return this.scriptEvaluationError("The signature format is invalid."),!1;try{let t=tC.fromChecksigFormat(e);if(!t.hasLowS())return this.scriptEvaluationError("The signature must have a low S value."),!1;if((t.scope&tC.SIGHASH_FORKID)==0)return this.scriptEvaluationError("The signature must use SIGHASH_FORKID."),!1}catch(e){return this.scriptEvaluationError("The signature format is invalid."),!1}return!0}checkPublicKeyEncoding(e){if(0===e.length)return this.scriptEvaluationError("Public key is empty."),!1;if(e.length<33)return this.scriptEvaluationError("The public key is too short, it must be at least 33 bytes."),!1;if(4===e[0]){if(65!==e.length)return this.scriptEvaluationError("The non-compressed public key must be 65 bytes."),!1}else if(2!==e[0]&&3!==e[0])return this.scriptEvaluationError("The public key is in an unknown format."),!1;else if(33!==e.length)return this.scriptEvaluationError("The compressed public key must be 33 bytes."),!1;try{to.fromDER(e)}catch(e){return this.scriptEvaluationError("The public key is in an unknown format."),!1}return!0}verifySignature(e,t,i){return ta(new l(M(tC.format({sourceTXID:this.sourceTXID,sourceOutputIndex:this.sourceOutputIndex,sourceSatoshis:this.sourceSatoshis,transactionVersion:this.transactionVersion,otherInputs:this.otherInputs,outputs:this.outputs,inputIndex:this.inputIndex,subscript:i,inputSequence:this.inputSequence,lockTime:this.lockTime,scope:e.scope}))),e,t)}step(){if(this.stackMem>this.memoryLimit)return this.scriptEvaluationError("Stack memory usage has exceeded "+String(this.memoryLimit)+" bytes"),!1;if(this.altStackMem>this.memoryLimit)return this.scriptEvaluationError("Alt stack memory usage has exceeded "+String(this.memoryLimit)+" bytes"),!1;"UnlockingScript"===this.context&&this.programCounter>=this.unlockingScript.chunks.length&&(this.context="LockingScript",this.programCounter=0);let e="UnlockingScript"===this.context?this.unlockingScript:this.lockingScript;if(this.programCounter>=e.chunks.length)return!1;let t=e.chunks[this.programCounter],i=t.op;void 0===i&&this.scriptEvaluationError(`Missing opcode in ${this.context} at pc=${this.programCounter}.`),Array.isArray(t.data)&&t.data.length>0x40000000&&this.scriptEvaluationError(`Data push > 1073741824 bytes (pc=${this.programCounter}).`);let r=!this.ifStack.includes(!1);if(r&&(i===tB.OP_2MUL||i===tB.OP_2DIV||i===tB.OP_VERIF||i===tB.OP_VERNOTIF||i===tB.OP_VER)&&this.scriptEvaluationError(`This opcode is currently disabled. (Opcode: ${tB[i]}, PC: ${this.programCounter})`),r&&i>=0&&i<=tB.OP_PUSHDATA4)!function(e){let t=e.data,i=e.op;return!Array.isArray(t)||(0===t.length?i===tB.OP_0:1===t.length&&t[0]>=1&&t[0]<=16?i===tB.OP_1+(t[0]-1):1===t.length&&129===t[0]?i===tB.OP_1NEGATE:t.length<=75?i===t.length:t.length<=255?i===tB.OP_PUSHDATA1:!(t.length<=65535)||i===tB.OP_PUSHDATA2)}(t)&&this.scriptEvaluationError(`This data is not minimally-encoded. (PC: ${this.programCounter})`),this.pushStack(Array.isArray(t.data)?t.data:[]);else if(r||i>=tB.OP_IF&&i<=tB.OP_ENDIF){let e,t,s,n,a,o,c,h,d,f,u,p,b,g,y,m,w,I,k,v,S,_,E,x,O,P;switch(i){case tB.OP_1NEGATE:this.pushStackCopy(tV);break;case tB.OP_0:this.pushStackCopy(tH[0]);break;case tB.OP_1:case tB.OP_2:case tB.OP_3:case tB.OP_4:case tB.OP_5:case tB.OP_6:case tB.OP_7:case tB.OP_8:case tB.OP_9:case tB.OP_10:case tB.OP_11:case tB.OP_12:case tB.OP_13:case tB.OP_14:case tB.OP_15:case tB.OP_16:p=i-(tB.OP_1-1),this.pushStackCopy(tH[p]);break;case tB.OP_NOP:case tB.OP_NOP2:case tB.OP_NOP3:case tB.OP_NOP1:case tB.OP_NOP4:case tB.OP_NOP5:case tB.OP_NOP6:case tB.OP_NOP7:case tB.OP_NOP8:case tB.OP_NOP9:case tB.OP_NOP10:case tB.OP_NOP11:case tB.OP_NOP12:case tB.OP_NOP13:case tB.OP_NOP14:case tB.OP_NOP15:case tB.OP_NOP16:case tB.OP_NOP17:case tB.OP_NOP18:case tB.OP_NOP19:case tB.OP_NOP20:case tB.OP_NOP21:case tB.OP_NOP22:case tB.OP_NOP23:case tB.OP_NOP24:case tB.OP_NOP25:case tB.OP_NOP26:case tB.OP_NOP27:case tB.OP_NOP28:case tB.OP_NOP29:case tB.OP_NOP30:case tB.OP_NOP31:case tB.OP_NOP32:case tB.OP_NOP33:case tB.OP_NOP34:case tB.OP_NOP35:case tB.OP_NOP36:case tB.OP_NOP37:case tB.OP_NOP38:case tB.OP_NOP39:case tB.OP_NOP40:case tB.OP_NOP41:case tB.OP_NOP42:case tB.OP_NOP43:case tB.OP_NOP44:case tB.OP_NOP45:case tB.OP_NOP46:case tB.OP_NOP47:case tB.OP_NOP48:case tB.OP_NOP49:case tB.OP_NOP50:case tB.OP_NOP51:case tB.OP_NOP52:case tB.OP_NOP53:case tB.OP_NOP54:case tB.OP_NOP55:case tB.OP_NOP56:case tB.OP_NOP57:case tB.OP_NOP58:case tB.OP_NOP59:case tB.OP_NOP60:case tB.OP_NOP61:case tB.OP_NOP62:case tB.OP_NOP63:case tB.OP_NOP64:case tB.OP_NOP65:case tB.OP_NOP66:case tB.OP_NOP67:case tB.OP_NOP68:case tB.OP_NOP69:case tB.OP_NOP70:case tB.OP_NOP71:case tB.OP_NOP72:case tB.OP_NOP73:case tB.OP_NOP77:break;case tB.OP_IF:case tB.OP_NOTIF:g=!1,r&&(this.stack.length<1&&this.scriptEvaluationError("OP_IF and OP_NOTIF require at least one item on the stack when they are used!"),e=this.popStack(),g=this.castToBool(e),i===tB.OP_NOTIF&&(g=!g)),this.ifStack.push(g);break;case tB.OP_ELSE:0===this.ifStack.length&&this.scriptEvaluationError("OP_ELSE requires a preceeding OP_IF."),this.ifStack[this.ifStack.length-1]=!this.ifStack[this.ifStack.length-1];break;case tB.OP_ENDIF:0===this.ifStack.length&&this.scriptEvaluationError("OP_ENDIF requires a preceeding OP_IF."),this.ifStack.pop();break;case tB.OP_VERIFY:this.stack.length<1&&this.scriptEvaluationError("OP_VERIFY requires at least one item to be on the stack."),t=this.stackTop(),(g=this.castToBool(t))||this.scriptEvaluationError("OP_VERIFY requires the top stack value to be truthy."),this.popStack();break;case tB.OP_RETURN:"UnlockingScript"===this.context?this.programCounter=this.unlockingScript.chunks.length:this.programCounter=this.lockingScript.chunks.length,this.ifStack=[],this.programCounter--;break;case tB.OP_TOALTSTACK:this.stack.length<1&&this.scriptEvaluationError("OP_TOALTSTACK requires at oeast one item to be on the stack."),this.pushAltStack(this.popStack());break;case tB.OP_FROMALTSTACK:this.altStack.length<1&&this.scriptEvaluationError("OP_FROMALTSTACK requires at least one item to be on the stack."),this.pushStack(this.popAltStack());break;case tB.OP_2DROP:this.stack.length<2&&this.scriptEvaluationError("OP_2DROP requires at least two items to be on the stack."),this.popStack(),this.popStack();break;case tB.OP_2DUP:this.stack.length<2&&this.scriptEvaluationError("OP_2DUP requires at least two items to be on the stack."),t=this.stackTop(-2),s=this.stackTop(-1),this.pushStackCopy(t),this.pushStackCopy(s);break;case tB.OP_3DUP:this.stack.length<3&&this.scriptEvaluationError("OP_3DUP requires at least three items to be on the stack."),t=this.stackTop(-3),s=this.stackTop(-2),n=this.stackTop(-1),this.pushStackCopy(t),this.pushStackCopy(s),this.pushStackCopy(n);break;case tB.OP_2OVER:this.stack.length<4&&this.scriptEvaluationError("OP_2OVER requires at least four items to be on the stack."),t=this.stackTop(-4),s=this.stackTop(-3),this.pushStackCopy(t),this.pushStackCopy(s);break;case tB.OP_2ROT:{this.stack.length<6&&this.scriptEvaluationError("OP_2ROT requires at least six items to be on the stack.");let e=this.popStack(),t=this.popStack(),i=this.popStack(),r=this.popStack(),s=this.popStack(),n=this.popStack();this.pushStack(r),this.pushStack(i),this.pushStack(t),this.pushStack(e),this.pushStack(n),this.pushStack(s);break}case tB.OP_2SWAP:{this.stack.length<4&&this.scriptEvaluationError("OP_2SWAP requires at least four items to be on the stack.");let e=this.popStack(),t=this.popStack(),i=this.popStack(),r=this.popStack();this.pushStack(t),this.pushStack(e),this.pushStack(r),this.pushStack(i);break}case tB.OP_IFDUP:this.stack.length<1&&this.scriptEvaluationError("OP_IFDUP requires at least one item to be on the stack."),t=this.stackTop(),this.castToBool(t)&&this.pushStackCopy(t);break;case tB.OP_DEPTH:this.pushStack(new l(this.stack.length).toScriptNum());break;case tB.OP_DROP:this.stack.length<1&&this.scriptEvaluationError("OP_DROP requires at least one item to be on the stack."),this.popStack();break;case tB.OP_DUP:this.stack.length<1&&this.scriptEvaluationError("OP_DUP requires at least one item to be on the stack."),this.pushStackCopy(this.stackTop());break;case tB.OP_NIP:this.stack.length<2&&this.scriptEvaluationError("OP_NIP requires at least two items to be on the stack."),s=this.popStack(),this.popStack(),this.pushStack(s);break;case tB.OP_OVER:this.stack.length<2&&this.scriptEvaluationError("OP_OVER requires at least two items to be on the stack."),this.pushStackCopy(this.stackTop(-2));break;case tB.OP_PICK:case tB.OP_ROLL:{this.stack.length<2&&this.scriptEvaluationError(`${tB[i]} requires at least two items to be on the stack.`),((p=(h=l.fromScriptNum(this.popStack(),!0)).toNumber())<0||p>=this.stack.length)&&this.scriptEvaluationError(`${tB[i]} requires the top stack element to be 0 or a positive number less than the current size of the stack.`);let e=this.stack[this.stack.length-1-p];i===tB.OP_ROLL?(this.stack.splice(this.stack.length-1-p,1),this.stackMem-=e.length,this.pushStack(e)):this.pushStackCopy(e);break}case tB.OP_ROT:this.stack.length<3&&this.scriptEvaluationError("OP_ROT requires at least three items to be on the stack."),c=this.popStack(),o=this.popStack(),a=this.popStack(),this.pushStack(o),this.pushStack(c),this.pushStack(a);break;case tB.OP_SWAP:this.stack.length<2&&this.scriptEvaluationError("OP_SWAP requires at least two items to be on the stack."),o=this.popStack(),a=this.popStack(),this.pushStack(o),this.pushStack(a);break;case tB.OP_TUCK:this.stack.length<2&&this.scriptEvaluationError("OP_TUCK requires at least two items to be on the stack."),t=this.stackTop(-1),this.ensureStackMem(t.length),this.stack.splice(this.stack.length-2,0,t.slice()),this.stackMem+=t.length;break;case tB.OP_SIZE:this.stack.length<1&&this.scriptEvaluationError("OP_SIZE requires at least one item to be on the stack."),this.pushStack(new l(this.stackTop().length).toScriptNum());break;case tB.OP_AND:case tB.OP_OR:case tB.OP_XOR:{this.stack.length<2&&this.scriptEvaluationError(`${tB[i]} requires at least two items on the stack.`),s=this.popStack(),(t=this.popStack()).length!==s.length&&this.scriptEvaluationError(`${tB[i]} requires the top two stack items to be the same size.`);let e=Array(t.length);for(let r=0;rd.cmp(f)));break;case tB.OP_GREATERTHAN:r=new l(+(d.cmp(f)>0));break;case tB.OP_LESSTHANOREQUAL:r=new l(+(0>=d.cmp(f)));break;case tB.OP_GREATERTHANOREQUAL:r=new l(+(d.cmp(f)>=0));break;case tB.OP_MIN:r=0>d.cmp(f)?d:f;break;case tB.OP_MAX:r=d.cmp(f)>0?d:f}this.pushStack(r.toScriptNum()),i===tB.OP_NUMEQUALVERIFY&&(this.castToBool(this.stackTop())||this.scriptEvaluationError("OP_NUMEQUALVERIFY requires the top stack item to be truthy."),this.popStack());break}case tB.OP_WITHIN:this.stack.length<3&&this.scriptEvaluationError("OP_WITHIN requires at least three items to be on the stack."),u=l.fromScriptNum(this.popStack(),!0),f=l.fromScriptNum(this.popStack(),!0),g=(d=l.fromScriptNum(this.popStack(),!0)).cmp(f)>=0&&0>d.cmp(u),this.pushStack(g?[1]:[]);break;case tB.OP_RIPEMD160:case tB.OP_SHA1:case tB.OP_SHA256:case tB.OP_HASH160:case tB.OP_HASH256:{this.stack.length<1&&this.scriptEvaluationError(`${tB[i]} requires at least one item to be on the stack.`),e=this.popStack();let t=[];if(i===tB.OP_RIPEMD160){let i,r;i=e,t=new T().update(i,r).digest()}else if(i===tB.OP_SHA1){let i,r;i=e,t=new R().update(i,r).digest()}else i===tB.OP_SHA256?t=D(e):i===tB.OP_HASH160?t=F(e):i===tB.OP_HASH256&&(t=M(e));this.pushStack(t);break}case tB.OP_CODESEPARATOR:this.lastCodeSeparator=this.programCounter;break;case tB.OP_CHECKSIG:case tB.OP_CHECKSIGVERIFY:if(this.stack.length<2&&this.scriptEvaluationError(`${tB[i]} requires at least two items to be on the stack.`),I=this.popStack(),w=this.popStack(),this.checkSignatureEncoding(w)&&this.checkPublicKeyEncoding(I)||this.scriptEvaluationError(`${tB[i]} requires correct encoding for the public key and signature.`),(m=new tL(("UnlockingScript"===this.context?this.unlockingScript:this.lockingScript).chunks.slice(null===this.lastCodeSeparator?0:this.lastCodeSeparator+1))).findAndDelete(new tL().writeBin(w)),y=!1,w.length>0)try{k=tC.fromChecksigFormat(w),v=to.fromDER(I),y=this.verifySignature(k,v,m)}catch(e){y=!1}this.pushStack(y?[1]:[]),i===tB.OP_CHECKSIGVERIFY&&(y||this.scriptEvaluationError("OP_CHECKSIGVERIFY requires that a valid signature is provided."),this.popStack());break;case tB.OP_CHECKMULTISIG:case tB.OP_CHECKMULTISIGVERIFY:{S=1,this.stack.length0x7fffffff)&&this.scriptEvaluationError(`${tB[i]} requires a key count between 0 and 2147483647.`),_=++S,S+=x,this.stack.lengthx)&&this.scriptEvaluationError(`${tB[i]} requires the number of signatures to be no greater than the number of keys.`),E=++S,S+=O,this.stack.length0;){if(0===x){y=!1;break}if(w=this.stackTop(-E),I=this.stackTop(-_),this.checkSignatureEncoding(w)&&this.checkPublicKeyEncoding(I)||this.scriptEvaluationError(`${tB[i]} requires correct encoding for the public key and signature.`),P=!1,w.length>0)try{k=tC.fromChecksigFormat(w),v=to.fromDER(I),P=this.verifySignature(k,v,m)}catch(e){P=!1}P&&(E++,O--),_++,O>--x&&(y=!1)}let e=1+l.fromScriptNum(this.stackTop(-1),!1).toNumber()+1+l.fromScriptNum(this.stackTop(-(1+l.fromScriptNum(this.stackTop(-1),!1).toNumber()+1)),!1).toNumber()+1-1;for(;e>0;)this.popStack(),e--;this.stack.length<1&&this.scriptEvaluationError(`${tB[i]} requires an extra item (dummy) to be on the stack.`),this.popStack().length>0&&this.scriptEvaluationError(`${tB[i]} requires the extra stack item (dummy) to be empty.`),this.pushStack(y?[1]:[]),i===tB.OP_CHECKMULTISIGVERIFY&&(y||this.scriptEvaluationError("OP_CHECKMULTISIGVERIFY requires that a sufficient number of valid signatures are provided."),this.popStack());break}case tB.OP_CAT:{this.stack.length<2&&this.scriptEvaluationError("OP_CAT requires at least two items to be on the stack."),s=this.popStack();let e=(t=this.popStack()).concat(s);e.length>0x40000000&&this.scriptEvaluationError("It's not currently possible to push data larger than 1073741824 bytes."),this.pushStack(e);break}case tB.OP_SPLIT:{this.stack.length<2&&this.scriptEvaluationError("OP_SPLIT requires at least two items to be on the stack.");let e=this.popStack(),t=this.popStack();((p=l.fromScriptNum(e,!0).toNumber())<0||p>t.length)&&this.scriptEvaluationError("OP_SPLIT requires the first stack item to be a non-negative number less than or equal to the size of the second-from-top stack item."),this.pushStack(t.slice(0,p)),this.pushStack(t.slice(p));break}case tB.OP_NUM2BIN:{this.stack.length<2&&this.scriptEvaluationError("OP_NUM2BIN requires at least two items to be on the stack."),((b=l.fromScriptNum(this.popStack(),!0).toNumber())>0x40000000||b<0)&&this.scriptEvaluationError("It's not currently possible to push data larger than 1073741824 bytes or negative size.");let e=this.popStack();if((e=eH(e)).length>b&&this.scriptEvaluationError("OP_NUM2BIN requires that the size expressed in the top stack item is large enough to hold the value expressed in the second-from-top stack item."),e.length===b){this.pushStack(e);break}let t=Array(b).fill(0),i=0;e.length>0&&(i=128&e[e.length-1],e[e.length-1]&=127);for(let i=0;it)&&(!(e.length>0)||(127&e[e.length-1])!=0||!(e.length<=1)&&(128&e[e.length-2])!=0)}(e)&&this.scriptEvaluationError("OP_BIN2NUM requires that the resulting number is valid."),this.pushStack(e);break}default:this.scriptEvaluationError(`Invalid opcode ${i} (pc=${this.programCounter}).`)}}return this.programCounter++,!0}validate(){for(this.unlockingScript.isPushOnly()||this.scriptEvaluationError("Unlocking scripts can only contain push operations, and no other opcodes.");this.step()&&("LockingScript"!==this.context||!(this.programCounter>=this.lockingScript.chunks.length)););return this.ifStack.length>0&&this.scriptEvaluationError("Every OP_IF, OP_NOTIF, or OP_ELSE must be terminated with OP_ENDIF prior to the end of the script."),1!==this.stack.length&&this.scriptEvaluationError(`The clean stack rule requires exactly one item to be on the stack after script execution, found ${this.stack.length}.`),0===this.stack.length?this.scriptEvaluationError("The top stack element must be truthy after script evaluation (stack is empty)."):this.castToBool(this.stackTop())||this.scriptEvaluationError("The top stack element must be truthy after script evaluation."),!0}castToBool(e){if(0===e.length)return!1;for(let t=0;t{let o=tC.SIGHASH_FORKID;"all"===t&&(o|=tC.SIGHASH_ALL),"none"===t&&(o|=tC.SIGHASH_NONE),"single"===t&&(o|=tC.SIGHASH_SINGLE),i&&(o|=tC.SIGHASH_ANYONECANPAY);let c=n.inputs[a],h=n.inputs.filter((e,t)=>t!==a),l=c.sourceTXID??c.sourceTransaction?.id("hex");if(null==l||void 0===l||""===l)throw Error("The input sourceTXID or sourceTransaction is required for transaction signing.");if(null==(r||=c.sourceTransaction?.outputs[c.sourceOutputIndex].satoshis)||void 0===r)throw Error("The sourceSatoshis or input sourceTransaction is required for transaction signing.");if(null==(s||=c.sourceTransaction?.outputs[c.sourceOutputIndex].lockingScript))throw Error("The lockingScript or input sourceTransaction is required for transaction signing.");let d=tC.format({sourceTXID:l,sourceOutputIndex:tU(c.sourceOutputIndex),sourceSatoshis:r,transactionVersion:n.version,otherInputs:h,inputIndex:a,outputs:n.outputs,inputSequence:tU(c.sequence),subscript:s,lockTime:n.lockTime,scope:o}),f=e.sign(D(d)),u=new tC(f.r,f.s,o).toChecksigFormat(),p=e.toPublicKey().encode(!0);return new tM([{op:u.length,data:u},{op:p.length,data:p}])},estimateLength:async()=>108}}}function t$(e){if(null==e)throw Error("must have value");return e}let tj=e=>0===e.length||1===e.length&&0===e[0]?{op:0}:1===e.length&&e[0]>0&&e[0]<=16?{op:80+e[0]}:1===e.length&&129===e[0]?{op:79}:e.length<=75?{op:e.length,data:e}:e.length<=255?{op:76,data:e}:e.length<=65535?{op:77,data:e}:{op:78,data:e};class tz{wallet;originator;static decode(e){let t=to.fromString(ex(t$(e.chunks[0].data))),i=[];for(let t=2;t=80&&e.chunks[t].op<=95?s=[e.chunks[t].op-80]:0===e.chunks[t].op?s=[0]:79===e.chunks[t].op&&(s=[129])),i.push(s),r===tB.OP_DROP||r===tB.OP_2DROP)break}return{fields:i,lockingPublicKey:t}}constructor(e,t){this.wallet=e,this.originator=t}async lock(e,t,i,r,s=!1,n=!0,a="before"){let{publicKey:o}=await this.wallet.getPublicKey({protocolID:t,keyID:i,counterparty:r,forSelf:s},this.originator),c=[],h=[];if(c.push({op:o.length/2,data:eO(o,"hex")}),c.push({op:tB.OP_CHECKSIG}),n){let s=e.reduce((e,t)=>[...e,...t],[]),{signature:n}=await this.wallet.createSignature({data:s,protocolID:t,keyID:i,counterparty:r},this.originator);e.push(n)}for(let t of e)h.push(tj(t));let l=e.length;for(;l>1;)h.push({op:tB.OP_2DROP}),l-=2;return new tD((0!==l&&h.push({op:tB.OP_DROP}),"before"===a)?[...c,...h]:[...h,...c])}unlock(e,t,i,r="all",s=!1,n,a){return{sign:async(o,c)=>{let h=tC.SIGHASH_FORKID;"all"===r&&(h|=tC.SIGHASH_ALL),"none"===r&&(h|=tC.SIGHASH_NONE),"single"===r&&(h|=tC.SIGHASH_SINGLE),s&&(h|=tC.SIGHASH_ANYONECANPAY);let l=o.inputs[c],d=o.inputs.filter((e,t)=>t!==c),f=l.sourceTXID??l.sourceTransaction?.id("hex");if(null==f||void 0===f)throw Error("The input sourceTXID or sourceTransaction is required for transaction signing.");if(null==(n||=l.sourceTransaction?.outputs[l.sourceOutputIndex].satoshis)||void 0===n)throw Error("The sourceSatoshis or input sourceTransaction is required for transaction signing.");if(null==(a||=l.sourceTransaction?.outputs[l.sourceOutputIndex].lockingScript))throw Error("The lockingScript or input sourceTransaction is required for transaction signing.");let u=D(tC.format({sourceTXID:f,sourceOutputIndex:t$(l.sourceOutputIndex),sourceSatoshis:n,transactionVersion:o.version,otherInputs:d,inputIndex:c,outputs:o.outputs,inputSequence:l.sequence??0xffffffff,subscript:a,lockTime:o.lockTime,scope:h})),{signature:p}=await this.wallet.createSignature({data:u,protocolID:e,keyID:t,counterparty:i},this.originator),b=e9.fromDER([...p]),g=new tC(b.r,b.s,h).toChecksigFormat();return new tM([{op:g.length,data:g}])},estimateLength:async()=>73}}}class tW{value;constructor(e){this.value=e}async computeFee(e){let t,i=e=>e>0x100000000?9:e>65536?5:e>253?3:1;t=4+i(e.inputs.length);for(let r=0;r{let s=this.https.request(e,t,e=>{let t="";e.on("data",e=>{t+=e}),e.on("end",()=>{let r=e.statusCode>=200&&e.statusCode<=299,s=e.headers["content-type"],n=""!==t&&"string"==typeof s&&s.startsWith("application/json")?JSON.parse(t):t;i({status:e.statusCode,statusText:e.statusMessage,ok:r,data:n})})});s.on("error",e=>{r(e)}),null!==t.data&&void 0!==t.data&&s.write(JSON.stringify(t.data)),s.end()})}}class tX{fetch;constructor(e){this.fetch=e}async request(e,t){let i={method:t.method,headers:t.headers,body:JSON.stringify(t.data)},r=await this.fetch(e,i),s=r.headers.get("Content-Type"),n=s?.startsWith("application/json")?await r.json():await r.text();return{ok:r.ok,status:r.status,statusText:r.statusText,data:n}}}function tY(){let e={async request(){throw Error("No method available to perform HTTP request")}};if("undefined"!=typeof window&&"function"==typeof window.fetch)return new tX(window.fetch.bind(window));if("undefined"==typeof require)return e;try{let e=require("https");return new tG(e)}catch(t){return e}}function tJ(){return`ts-sdk-${ex(tl(16))}`}class tZ{URL;apiKey;deploymentId;callbackUrl;callbackToken;headers;httpClient;constructor(e,t){if(this.URL=e,"string"==typeof t)this.apiKey=t,this.httpClient=tY(),this.deploymentId=tJ(),this.callbackToken=void 0,this.callbackUrl=void 0;else{let{apiKey:e,deploymentId:i,httpClient:r,callbackToken:s,callbackUrl:n,headers:a}=t??{};this.apiKey=e,this.httpClient=r??tY(),this.deploymentId=i??tJ(),this.callbackToken=s,this.callbackUrl=n,this.headers=a}}requestHeaders(){let e={"Content-Type":"application/json","XDeployment-ID":this.deploymentId};if(null!=this.apiKey&&""!==this.apiKey&&(e.Authorization=`Bearer ${this.apiKey}`),null!=this.callbackUrl&&""!==this.callbackUrl&&(e["X-CallbackUrl"]=this.callbackUrl),null!=this.callbackToken&&""!==this.callbackToken&&(e["X-CallbackToken"]=this.callbackToken),null!=this.headers)for(let t in this.headers)e[t]=this.headers[t];return e}async broadcast(e){let t;try{t=e.toHexEF()}catch(i){if("All inputs must have source transactions when serializing to EF format"===i.message)t=e.toHex();else throw i}let i={method:"POST",headers:this.requestHeaders(),data:{rawTx:t}};try{let e=await this.httpClient.request(`${this.URL}/v1/tx`,i);if(e.ok){let{txid:t,extraInfo:i,txStatus:r,competingTxs:s}=e.data,n={status:"success",txid:t,message:`${r} ${i}`};return null!=s&&(n.competingTxs=s),n}{let t=typeof e.status,i={status:"error",code:"number"===t||"string"===t?e.status.toString():"ERR_UNKNOWN",description:"Unknown error"},r=e.data;if("string"==typeof r)try{r=JSON.parse(e.data)}catch{}return"object"==typeof r&&(null!==r&&(i.more=r),null!=r&&"string"==typeof r.txid&&(i.txid=r.txid),null!=r&&"detail"in r&&"string"==typeof r.detail&&(i.description=r.detail)),i}}catch(e){return{status:"error",code:"500",description:"string"==typeof e.message?e.message:"Internal Server Error"}}}async broadcastMany(e){let t=e.map(e=>{try{return{rawTx:e.toHexEF()}}catch(t){if("All inputs must have source transactions when serializing to EF format"===t.message)return{rawTx:e.toHex()};throw t}}),i={method:"POST",headers:this.requestHeaders(),data:t};try{return(await this.httpClient.request(`${this.URL}/v1/txs`,i)).data}catch(i){let t={status:"error",code:"500",description:"string"==typeof i.message?i.message:"Internal Server Error"};return e.map(()=>t)}}}function tQ(e=!1,t={}){return new tZ(e?"https://testnet.arc.gorillapool.io":"https://arc.gorillapool.io",t)}class t0{network;apiKey;URL;httpClient;constructor(e="main",t={}){let{apiKey:i,httpClient:r}=t;this.network=e,this.URL=`https://api.whatsonchain.com/v1/bsv/${e}`,this.httpClient=r??tY(),this.apiKey=i??""}async isValidRootForHeight(e,t){let i={method:"GET",headers:this.getHttpHeaders()},r=await this.httpClient.request(`${this.URL}/block/${t}/header`,i);if(r.ok){let{merkleroot:t}=r.data;return t===e}if(404===r.status)return!1;throw Error(`Failed to verify merkleroot for height ${t} because of an error: ${JSON.stringify(r.data)} `)}async currentHeight(){try{let e={method:"GET",headers:this.getHttpHeaders()},t=await this.httpClient.request(`${this.URL}/block/headers`,e);if(t.ok)return t.data[0].height;throw Error(`Failed to get current height because of an error: ${JSON.stringify(t.data)} `)}catch(e){throw Error(`Failed to get current height because of an error: ${e instanceof Error?e.message:String(e)}`)}}getHttpHeaders(){let e={Accept:"application/json"};return"string"==typeof this.apiKey&&""!==this.apiKey.trim()&&(e.Authorization=this.apiKey),e}}class t1{blockHeight;path;static fromHex(e){return t1.fromBinary(eO(e,"hex"))}static fromReader(e,t=!0){let i,r,s,n=e.readVarIntNum(),a=e.readUInt8(),o=Array(a).fill(null).map(()=>[]);for(let t=0;t0;){r=e.readVarIntNum(),i=e.readUInt8();let n={offset:r};(1&i)!=0?n.duplicate=!0:((2&i)!=0&&(n.txid=!0),n.hash=ex(e.read(32).reverse())),Array.isArray(o[t])&&0!==o[t].length||(o[t]=[]),o[t].push(n),s--}o[t].sort((e,t)=>e.offset-t.offset)}return new t1(n,o,t)}static fromBinary(e){let t=new eV(e);return t1.fromReader(t)}static fromCoinbaseTxidAndHeight(e,t){return new t1(t,[[{offset:0,hash:e,txid:!0}]])}constructor(e,t,i=!0){let r;this.blockHeight=e,this.path=t;let s=Array(this.path.length).fill(0).map(()=>new Set);this.path.forEach((e,t)=>{if(0===e.length&&0===t)throw Error(`Empty level at height: ${t}`);let r=new Set;e.forEach(e=>{if(r.has(e.offset))throw Error(`Duplicate offset: ${e.offset}, at height: ${t}`);if(r.add(e.offset),0===t){if(!0!==e.duplicate)for(let t=1;t>t^1)}else if(i&&!s[t].has(e.offset))throw Error(`Invalid offset: ${e.offset}, at height: ${t}, with legal offsets: ${Array.from(s[t]).join(", ")}`)})}),this.path[0].forEach((e,t)=>{if(0===t&&(r=this.computeRoot(e.hash)),r!==this.computeRoot(e.hash))throw Error("Mismatched roots")})}toBinary(){let e=new eF;e.writeVarIntNum(this.blockHeight);let t=this.path.length;e.writeUInt8(t);for(let i=0;it.hash===e);if(null==t)throw Error(`Transaction ID ${e} not found in the Merkle Path`);return t.offset}computeRoot(e){if("string"!=typeof e){let t=this.path[0].find(e=>!!e?.hash);if(null==t)throw Error("No valid leaf found in the Merkle Path");e=t.hash}if("string"!=typeof e)throw Error("Transaction ID is undefined");let t=this.indexOf(e);if("number"!=typeof t)throw Error(`This proof does not contain the txid: ${e??"undefined"}`);let i=e=>ex(M(eO(e,"hex").reverse()).reverse()),r=e;if(1===this.path.length&&1===this.path[0].length)return r;for(let e=0;e>e^1,n=this.findOrComputeLeaf(e,s);if("object"!=typeof n)throw Error(`Missing hash for index ${t} at height ${e}`);r=i(!0===n.duplicate?(r??"")+(r??""):s%2!=0?(n.hash??"")+(r??""):(r??"")+(n.hash??""))}return r}findOrComputeLeaf(e,t){let i=e=>ex(M(eO(e,"hex").reverse()).reverse()),r=this.path[e].find(e=>e.offset===t);if(null!=r)return r;if(0===e)return;let s=e-1,n=t<<1,a=this.findOrComputeLeaf(s,n);if(null==a||null==a.hash||""===a.hash)return;let o=this.findOrComputeLeaf(s,n+1);if(null!=o)return{offset:t,hash:i(!0===o.duplicate?a.hash+a.hash:(o.hash??"")+(a.hash??""))}}async verify(e,t){let i=this.computeRoot(e);if(0===this.indexOf(e)){let e=await t.currentHeight();if(this.blockHeight+100t.offset===e.path[i][r].offset))t[i].push(e.path[i][r]);else if(e.path[i][r]?.txid!==void 0&&e.path[i][r]?.txid!==null){let s=t[i].find(t=>t.offset===e.path[i][r].offset);null!=s&&(s.txid=!0)}}this.path=t,this.trim()}trim(){let e=(e,t)=>{(0===t.length||t.slice(-1)[0]!==e)&&t.push(e)},t=(e,t)=>{for(let i=e.length;i>=0;i--){let r=this.path[t].findIndex(t=>t.offset===e[i]);r>=0&&this.path[t].splice(r,1)}},i=t=>{let i=[];for(let r of t)e(r>>1,i);return i},r=[],s=[];for(let e=0;ee.offset-t.offset);for(let t=0;t>1,r);else{let r=i.offset%2==1,n=this.path[0][t+(r?-1:1)];void 0!==n.txid&&null!==n.txid&&n.txid||e(n.offset,s)}}t(s,0);for(let e=1;e{e.writeUInt8(t)},s=()=>{if(null==this._txid)throw Error("Transaction ID (_txid) is undefined");e.writeReverse(eO(this._txid,"hex"))},n=()=>{if(null!=this._rawTx)e.write(this._rawTx);else if(null!=this._tx)e.write(this._tx.toBinary());else throw Error("a valid serialized Transaction is expected")},a=()=>{void 0===this.bumpIndex?i(r.RAWTX):(i(r.RAWTX_AND_BUMP_INDEX),e.writeVarIntNum(this.bumpIndex))};t===t6?this.isTxidOnly?(i(r.TXID_ONLY),s()):(void 0!==this.bumpIndex?(i(r.RAWTX_AND_BUMP_INDEX),e.writeVarIntNum(this.bumpIndex)):i(r.RAWTX),n()):(n(),a())}static fromReader(e,t){let i,s,n;if(t===t6){let t=e.readUInt8();t===r.TXID_ONLY?n=t2.fromTxid(ex(e.readReverse(32))):(t===r.RAWTX_AND_BUMP_INDEX&&(s=e.readVarIntNum()),i=t4.fromReader(e),n=t2.fromTx(i,s))}else i=t4.fromReader(e),s=0!==e.readUInt8()?e.readVarIntNum():void 0,n=t2.fromTx(i,s);return n}}function t3(e){if(null==e)throw Error("Expected a valid value, but got undefined.");return e}let t6=0xefbe0002;!function(e){e[e.RAWTX=0]="RAWTX",e[e.RAWTX_AND_BUMP_INDEX=1]="RAWTX_AND_BUMP_INDEX",e[e.TXID_ONLY=2]="TXID_ONLY"}(r||(r={}));class t8{bumps=[];txs=[];version=t6;atomicTxid=void 0;constructor(e=t6){this.version=e}findTxid(e){return this.txs.find(t=>t.txid===e)}makeTxidOnly(e){let t=this.txs.findIndex(t=>t.txid===e);if(-1===t)return;let i=this.txs[t];return i.isTxidOnly?i:(this.txs.splice(t,1),i=this.mergeTxidOnly(e))}findBump(e){return this.bumps.find(t=>t.path[0].some(t=>t.hash===e))}findTransactionForSigning(e){let t=this.findTxid(e);if(null!=t&&null!=t.tx){for(let e of t.tx.inputs)if(null==e.sourceTransaction){let t=this.findTxid(t3(e.sourceTXID));null!=t&&(e.sourceTransaction=t.tx)}return t.tx}}findAtomicTransaction(e){let t=this.findTxid(e);if(null==t||null==t.tx)return;let i=(e,t)=>{let r=e.findBump(t.id("hex"));if(null!=r)t.merklePath=r;else for(let r of t.inputs){if(null==r.sourceTransaction){let t=e.findTxid(t3(r.sourceTXID));null!=t&&(r.sourceTransaction=t.tx)}if(null!=r.sourceTransaction){let t=e.findBump(r.sourceTransaction.id("hex"));null!=t?r.sourceTransaction.merklePath=t:i(e,r.sourceTransaction)}}};return i(this,t.tx),t.tx}mergeBump(e){let t;for(let i=0;it.txid===e);t>=0&&this.txs.splice(t,1)}mergeTxidOnly(e){let t=this.txs.find(t=>t.txid===e);return null==t&&(t=new t2(e),this.txs.push(t),this.tryToValidateBumpIndex(t)),t}mergeBeefTx(e){let t=this.findTxid(e.txid);if(e.isTxidOnly&&null==t?t=this.mergeTxidOnly(e.txid):null!=e._tx&&(null==t||t.isTxidOnly)?t=this.mergeTransaction(e._tx):null!=e._rawTx&&(null==t||t.isTxidOnly)&&(t=this.mergeRawTx(e._rawTx)),null==t)throw Error(`Failed to merge BeefTx for txid: ${e.txid}`);return t}mergeBeef(e){let t=Array.isArray(e)?t8.fromBinary(e):e;for(let e of t.bumps)this.mergeBump(e);for(let e of t.txs)this.mergeBeefTx(e)}isValid(e){return this.verifyValid(e).valid}async verify(e,t){let i=this.verifyValid(t);if(!i.valid)return!1;for(let t of Object.keys(i.roots))if(!await e.isValidRootForHeight(i.roots[t],Number(t)))return!1;return!0}verifyValid(e){let t={valid:!1,roots:{}},i=this.sortTxs();if(i.missingInputs.length>0||i.notValid.length>0||i.txidOnly.length>0&&!0!==e||i.withMissingInputs.length>0)return t;let r={};for(let i of this.txs)if(i.isTxidOnly){if(!0!==e)return t;r[i.txid]=!0}let s=(e,i)=>{let r=e.computeRoot(i);return(void 0===t.roots[e.blockHeight]||""===t.roots[e.blockHeight])&&(t.roots[e.blockHeight]=r),t.roots[e.blockHeight]===r};for(let e of this.bumps)for(let i of e.path[0])if(!0===i.txid&&"string"==typeof i.hash&&i.hash.length>0&&(r[i.hash]=!0,!s(e,i.hash)))return t;for(let e of this.txs)if(void 0!==e.bumpIndex&&null==this.bumps[e.bumpIndex].path[0].find(t=>t.hash===e.txid))return t;for(let e of this.txs){for(let i of e.inputTxids)if(!r[i])return t;r[e.txid]=!0}return t.valid=!0,t}toWriter(e){for(let t of(e.writeUInt32LE(this.version),e.writeVarIntNum(this.bumps.length),this.bumps))e.write(t.toBinary());for(let t of(e.writeVarIntNum(this.txs.length),this.txs))t.toWriter(e,this.version)}toBinary(){this.sortTxs();let e=new eF;return this.toWriter(e),e.toArray()}toBinaryAtomic(e){this.sortTxs();let t=this.findTxid(e);if(null==t)throw Error(`${e} does not exist in this Beef`);let i=this.txs[this.txs.length-1]===t?this:this.clone();if(i!==this){let t=this.txs.findIndex(t=>t.txid===e);i.txs.splice(t+1)}let r=new eF;return r.writeUInt32LE(0x1010101),r.writeReverse(eO(e,"hex")),i.toWriter(r),r.toArray()}toHex(){return ex(this.toBinary())}static fromReader(e){let t,i=e.readUInt32LE();if(0x1010101===i&&(t=ex(e.readReverse(32)),i=e.readUInt32LE()),0xefbe0001!==i&&i!==t6)throw Error(`Serialized BEEF must start with 4022206465 or ${t6} but starts with ${i}`);let r=new t8(i),s=e.readVarIntNum();for(let t=0;te.hash===t);if(r>=0)return e.bumpIndex=i,this.bumps[i].path[0][r].txid=!0,!0}return!1}sortTxs(){let e={},t={},i=[],r=[],s=[];for(let n of this.txs)t[n.txid]=n,n.isValid=n.hasProof,n.isValid?(e[n.txid]=!0,r.push(n)):n.isTxidOnly&&0===n.inputTxids.length?(e[n.txid]=!0,s.push(n)):i.push(n);let n={},a=[],o=i;for(let e of(i=[],o)){let r=!1;for(let i of e.inputTxids)void 0===t[i]&&(n[i]=!0,r=!0);r?a.push(e):i.push(e)}for(;i.length>0;){let t=i;for(let s of(i=[],t))s.inputTxids.every(t=>e[t])?(e[s.txid]=!0,r.push(s)):i.push(s);if(t.length===i.length)break}let c=i;return this.txs=a.concat(c).concat(s).concat(r),{missingInputs:Object.keys(n),notValid:c.map(e=>e.txid),valid:Object.keys(e),withMissingInputs:a.map(e=>e.txid),txidOnly:s.map(e=>e.txid)}}clone(){let e=new t8;return e.version=this.version,e.bumps=Array.from(this.bumps),e.txs=Array.from(this.txs),e}trimKnownTxids(e){for(let t=0;t!0===e.txid).map(e=>` '${e.hash??""}'`).join(",\n")} - ] -`;for(let i of(t=-1,this.txs))t++,e+=` TX ${t} - txid: ${i.txid} -`,void 0!==i.bumpIndex&&(e+=` bumpIndex: ${i.bumpIndex} -`),i.isTxidOnly?e+=" txidOnly\n":e+=` rawTx length=${i.rawTx?.length??0} -`,i.inputTxids.length>0&&(e+=` inputs: [ -${i.inputTxids.map(e=>` '${e}'`).join(",\n")} - ] -`);return e}addComputedLeaves(){let e=e=>ex(M(eO(e,"hex").reverse()).reverse());for(let t of this.bumps)for(let i=1;ie.offset===r.offset+1),n=r.offset>>1;void 0!==s&&"string"==typeof s.hash&&t.path[i].every(e=>e.offset!==n)&&t.path[i].push({offset:n,hash:e(s.hash+r.hash)})}}}class t4{version;inputs;outputs;lockTime;metadata;merklePath;cachedHash;static addPathOrInputs(e,t,i){if("number"==typeof e.pathIndex){let t=i[e.pathIndex];if("object"!=typeof t)throw Error("Invalid merkle path index found in BEEF!");e.tx.merklePath=t}else for(let r of e.tx.inputs){if(void 0===r.sourceTXID)throw Error("Input sourceTXID is undefined");let e=t[r.sourceTXID];if("object"!=typeof e)throw Error(`Reference to unknown TXID in BEEF: ${r.sourceTXID??"undefined"}`);r.sourceTransaction=e.tx,this.addPathOrInputs(e,t,i)}}static fromBEEF(e,t){let{tx:i}=t4.fromAnyBeef(e,t);return i}static fromAtomicBEEF(e){let{tx:t,txid:i,beef:r}=t4.fromAnyBeef(e);if(i!==r.atomicTxid)if(null!=r.atomicTxid)throw Error(`Transaction with TXID ${r.atomicTxid} not found in BEEF data.`);else throw Error("beef must conform to BRC-95 and must contain the subject txid.");return t}static fromAnyBeef(e,t){let i=t8.fromBinary(e);if(i.txs.length<1)throw Error("beef must include at least one transaction.");let r=t??i.atomicTxid??i.txs.slice(-1)[0].txid,s=i.findAtomicTransaction(r);if(null==s)if(null!=t)throw Error(`Transaction with TXID ${r} not found in BEEF data.`);else throw Error("beef does not contain transaction for atomic txid.");return{tx:s,beef:i,txid:r}}static fromEF(e){let t=new eV(e),i=t.readUInt32LE();if("0000000000ef"!==ex(t.read(6)))throw Error("Invalid EF marker");let r=t.readVarIntNum(),s=[];for(let e=0;et}}let i=await e.computeFee(this),r=this.calculateChange(i);if(r<=0){this.outputs=this.outputs.filter(e=>!0!==e.change);return}this.distributeChange(r,t)}calculateChange(e){let t=0;for(let e of this.inputs){if("object"!=typeof e.sourceTransaction)throw Error("Source transactions are required for all inputs during fee computation");t+=e.sourceTransaction.outputs[e.sourceOutputIndex].satoshis??0}for(let i of(t-=e,this.outputs))!0!==i.change&&void 0!==i.satoshis&&(t-=i.satoshis);return t}distributeChange(e,t){let i=0,r=this.outputs.filter(e=>e.change);if("random"===t?i=this.distributeRandomChange(e,r):"equal"===t&&(i=this.distributeEqualChange(e,r)),i"object"==typeof this.inputs[t].unlockingScriptTemplate?await this.inputs[t]?.unlockingScriptTemplate?.sign(this,t):await Promise.resolve(void 0)));for(let t=0,i=this.inputs.length;t0;){let n=s.shift(),a=n?.id("hex")??"";if(null!=a&&""!==a&&r.has(a))continue;if("object"==typeof n?.merklePath){if("scripts only"===e){null!=a&&r.add(a);continue}else if(await n.merklePath.verify(a,e)){r.add(a);continue}}if(void 0!==t){if(void 0===n)throw Error("Transaction is undefined");let e=t4.fromEF(n.toEF());if(delete e.outputs[0].satoshis,e.outputs[0].change=!0,await e.fee(t),n.getFee()i!==e);if(void 0===t.sourceTXID&&(t.sourceTXID=h),!new tq({sourceTXID:t.sourceTXID,sourceOutputIndex:t.sourceOutputIndex,lockingScript:c.lockingScript,sourceSatoshis:c.satoshis??0,transactionVersion:n.version,otherInputs:l,unlockingScript:t.unlockingScript,inputSequence:t.sequence??0,inputIndex:e,outputs:n.outputs,lockTime:n.lockTime,memoryLimit:i}).validate())return!1}let c=0;for(let e of n.outputs){if("number"!=typeof e.satoshis)throw Error("Every output must have a defined amount during transaction verification.");c+=e.satoshis}if(c>o)return!1;r.add(a)}return!0}toBEEF(e){let t=new eF;t.writeUInt32LE(0xefbe0001);let i=[],r=[],s=t=>{let n={tx:t},a="object"==typeof t.merklePath;if(a){let e=!1;for(let r=0;re.tx.id("hex")===t.id("hex"))||r.unshift(n),!a)for(let i=0;i2)throw Error("Protocol security level must be 0, 1, or 2");let r=e[1].toLowerCase().trim();if(t.length>800)throw Error("Key IDs must be 800 characters or less");if(t.length<1)throw Error("Key IDs must be 1 character or more");if(r.length>400)if(r.startsWith("specific linkage revelation ")){if(r.length>430)throw Error("Specific linkage revelation protocol names must be 430 characters or less")}else throw Error("Protocol names must be 400 characters or less");if(r.length<5)throw Error("Protocol names must be 5 characters or more");if(r.includes(" "))throw Error('Protocol names cannot contain multiple consecutive spaces (" ")');if(!/^[a-z0-9 ]+$/g.test(r))throw Error("Protocol names can only contain letters, numbers and spaces");if(r.endsWith(" protocol"))throw Error('No need to end your protocol name with " protocol"');return`${i}-${r}-${t}`}}class t9{keyDeriver;cache;maxCacheSize;rootKey;identityKey;constructor(e,t){"anyone"===e?this.rootKey=new tp(1):this.rootKey=e,this.keyDeriver=new t5(this.rootKey,(e,t,i)=>{this.cacheSet(`${e.toString()}-${t.toString()}`,i)},(e,t)=>this.cacheGet(`${e.toString()}-${t.toString()}`)),this.identityKey=this.rootKey.toPublicKey().toString(),this.cache=new Map;let i=t?.maxCacheSize;this.maxCacheSize=null!=i&&!isNaN(i)&&i>0?i:1e3}derivePublicKey(e,t,i,r=!1){let s=this.generateCacheKey("derivePublicKey",e,t,i,r);if(this.cache.has(s)){let e=this.cacheGet(s);if(void 0===e)throw Error("Cached value is undefined");return e}{let n=this.keyDeriver.derivePublicKey(e,t,i,r);return this.cacheSet(s,n),n}}derivePrivateKey(e,t,i){let r=this.generateCacheKey("derivePrivateKey",e,t,i);if(this.cache.has(r)){let e=this.cacheGet(r);if(void 0===e)throw Error("Cached value is undefined");return e}{let s=this.keyDeriver.derivePrivateKey(e,t,i);return this.cacheSet(r,s),s}}deriveSymmetricKey(e,t,i){let r=this.generateCacheKey("deriveSymmetricKey",e,t,i);if(this.cache.has(r)){let e=this.cacheGet(r);if(void 0===e)throw Error("Cached value is undefined");return e}{let s=this.keyDeriver.deriveSymmetricKey(e,t,i);return this.cacheSet(r,s),s}}revealCounterpartySecret(e){let t=this.generateCacheKey("revealCounterpartySecret",e);if(this.cache.has(t)){let e=this.cacheGet(t);if(void 0===e)throw Error("Cached value is undefined");return e}{let i=this.keyDeriver.revealCounterpartySecret(e);return this.cacheSet(t,i),i}}revealSpecificSecret(e,t,i){let r=this.generateCacheKey("revealSpecificSecret",e,t,i);if(this.cache.has(r)){let e=this.cacheGet(r);if(void 0===e)throw Error("Cached value is undefined");return e}{let s=this.keyDeriver.revealSpecificSecret(e,t,i);return this.cacheSet(r,s),s}}generateCacheKey(e,...t){let i=t.map(e=>this.serializeArgument(e)).join("|");return`${e}|${i}`}serializeArgument(e){return e instanceof to||e instanceof tp?e.toString():Array.isArray(e)?e.map(e=>this.serializeArgument(e)).join(","):"object"==typeof e&&null!==e?JSON.stringify(e):String(e)}cacheGet(e){let t=this.cache.get(e);return this.cache.delete(e),void 0!==t&&this.cache.set(e,t),t}cacheSet(e,t){if(this.cache.size>=this.maxCacheSize){let e=this.cache.keys().next().value;this.cache.delete(e)}this.cache.set(e,t)}}let t7=class{keyDeriver;constructor(e){"string"!=typeof e.identityKey&&(e=new t9(e)),this.keyDeriver=e}async getPublicKey(e){if(e.identityKey){if(null==this.keyDeriver)throw Error("keyDeriver is undefined");return{publicKey:this.keyDeriver.rootKey.toPublicKey().toString()}}if(null==e.protocolID||null==e.keyID||""===e.keyID)throw Error("protocolID and keyID are required if identityKey is false or undefined.");return{publicKey:(this.keyDeriver??(()=>{throw Error("keyDeriver is undefined")})()).derivePublicKey(e.protocolID,e.keyID,e.counterparty??"self",e.forSelf).toString()}}async revealCounterpartyKeyLinkage(e){let{publicKey:t}=await this.getPublicKey({identityKey:!0});if(null==this.keyDeriver)throw Error("keyDeriver is undefined");let i=this.keyDeriver.revealCounterpartySecret(e.counterparty),r=new tR().generateProof(this.keyDeriver.rootKey,this.keyDeriver.rootKey.toPublicKey(),to.fromString(e.counterparty),e4.fromDER(i)),s=[...r.R.encode(!0),...r.SPrime.encode(!0),...r.z.toArray()],n=new Date().toISOString(),{ciphertext:a}=await this.encrypt({plaintext:i,protocolID:[2,"counterparty linkage revelation"],keyID:n,counterparty:e.verifier}),{ciphertext:o}=await this.encrypt({plaintext:s,protocolID:[2,"counterparty linkage revelation"],keyID:n,counterparty:e.verifier});return{prover:t,verifier:e.verifier,counterparty:e.counterparty,revelationTime:n,encryptedLinkage:a,encryptedLinkageProof:o}}async revealSpecificKeyLinkage(e){let{publicKey:t}=await this.getPublicKey({identityKey:!0});if(null==this.keyDeriver)throw Error("keyDeriver is undefined");let i=this.keyDeriver.revealSpecificSecret(e.counterparty,e.protocolID,e.keyID),{ciphertext:r}=await this.encrypt({plaintext:i,protocolID:[2,`specific linkage revelation ${e.protocolID[0]} ${e.protocolID[1]}`],keyID:e.keyID,counterparty:e.verifier}),{ciphertext:s}=await this.encrypt({plaintext:[0],protocolID:[2,`specific linkage revelation ${e.protocolID[0]} ${e.protocolID[1]}`],keyID:e.keyID,counterparty:e.verifier});return{prover:t,verifier:e.verifier,counterparty:e.counterparty,protocolID:e.protocolID,keyID:e.keyID,encryptedLinkage:r,encryptedLinkageProof:s,proofType:0}}async encrypt(e){if(null==this.keyDeriver)throw Error("keyDeriver is undefined");return{ciphertext:this.keyDeriver.deriveSymmetricKey(e.protocolID,e.keyID,e.counterparty??"self").encrypt(e.plaintext)}}async decrypt(e){if(null==this.keyDeriver)throw Error("keyDeriver is undefined");return{plaintext:this.keyDeriver.deriveSymmetricKey(e.protocolID,e.keyID,e.counterparty??"self").decrypt(e.ciphertext)}}async createHmac(e){if(null==this.keyDeriver)throw Error("keyDeriver is undefined");return{hmac:V(this.keyDeriver.deriveSymmetricKey(e.protocolID,e.keyID,e.counterparty??"self").toArray(),e.data)}}async verifyHmac(e){if(null==this.keyDeriver)throw Error("keyDeriver is undefined");let t=V(this.keyDeriver.deriveSymmetricKey(e.protocolID,e.keyID,e.counterparty??"self").toArray(),e.data).toString()===e.hmac.toString();if(!t){let e=Error("HMAC is not valid");throw e.code="ERR_INVALID_HMAC",e}return{valid:t}}async createSignature(e){if(null==e.hashToDirectlySign&&null==e.data)throw Error("args.data or args.hashToDirectlySign must be valid");let t=e.hashToDirectlySign??D(e.data??[]),i=(this.keyDeriver??(()=>{throw Error("keyDeriver is undefined")})()).derivePrivateKey(e.protocolID,e.keyID,e.counterparty??"anyone");return{signature:tn(new l(t),i,!0).toDER()}}async verifySignature(e){if(null==e.hashToDirectlyVerify&&null==e.data)throw Error("args.data or args.hashToDirectlyVerify must be valid");let t=e.hashToDirectlyVerify??D(e.data??[]),i=(this.keyDeriver??(()=>{throw Error("keyDeriver is undefined")})()).derivePublicKey(e.protocolID,e.keyID,e.counterparty??"self",e.forSelf),r=ta(new l(t),e9.fromDER(e.signature),i);if(!r){let e=Error("Signature is not valid");throw e.code="ERR_INVALID_SIGNATURE",e}return{valid:r}}};class ie{CWI;constructor(){if("object"!=typeof window)throw Error("The window.CWI substrate requires a global window object.");if("object"!=typeof window.CWI)throw Error("The window.CWI interface does not appear to be bound to the window object.");this.CWI=window.CWI}async createAction(e,t){return await this.CWI.createAction(e,t)}async signAction(e,t){return await this.CWI.signAction(e,t)}async abortAction(e,t){return await this.CWI.abortAction(e,t)}async listActions(e,t){return await this.CWI.listActions(e,t)}async internalizeAction(e,t){return await this.CWI.internalizeAction(e,t)}async listOutputs(e,t){return await this.CWI.listOutputs(e,t)}async relinquishOutput(e,t){return await this.CWI.relinquishOutput(e,t)}async getPublicKey(e,t){return await this.CWI.getPublicKey(e,t)}async revealCounterpartyKeyLinkage(e,t){return await this.CWI.revealCounterpartyKeyLinkage(e,t)}async revealSpecificKeyLinkage(e,t){return await this.CWI.revealSpecificKeyLinkage(e,t)}async encrypt(e,t){return await this.CWI.encrypt(e,t)}async decrypt(e,t){return await this.CWI.decrypt(e,t)}async createHmac(e,t){return await this.CWI.createHmac(e,t)}async verifyHmac(e,t){return await this.CWI.verifyHmac(e,t)}async createSignature(e,t){return await this.CWI.createSignature(e,t)}async verifySignature(e,t){return await this.CWI.verifySignature(e,t)}async acquireCertificate(e,t){return await this.CWI.acquireCertificate(e,t)}async listCertificates(e,t){return await this.CWI.listCertificates(e,t)}async proveCertificate(e,t){return await this.CWI.proveCertificate(e,t)}async relinquishCertificate(e,t){return await this.CWI.relinquishCertificate(e,t)}async discoverByIdentityKey(e,t){return await this.CWI.discoverByIdentityKey(e,t)}async discoverByAttributes(e,t){return await this.CWI.discoverByAttributes(e,t)}async isAuthenticated(e,t){return await this.CWI.isAuthenticated(e,t)}async waitForAuthentication(e,t){return await this.CWI.waitForAuthentication(e,t)}async getHeight(e,t){return await this.CWI.getHeight(e,t)}async getHeaderForHeight(e,t){return await this.CWI.getHeaderForHeight(e,t)}async getNetwork(e,t){return await this.CWI.getNetwork(e,t)}async getVersion(e,t){return await this.CWI.getVersion(e,t)}}class it extends Error{code;isError=!0;constructor(e,t=1,i){super(e),this.code=t,this.name=this.constructor.name,null!=i&&""!==i?this.stack=i:Error.captureStackTrace(this,this.constructor)}}!function(e){e[e.unknownError=1]="unknownError",e[e.unsupportedAction=2]="unsupportedAction",e[e.invalidHmac=3]="invalidHmac",e[e.invalidSignature=4]="invalidSignature",e[e.reviewActions=5]="reviewActions"}(n||(n={}));class ii{domain;constructor(e="*"){if("object"!=typeof window)throw Error("The XDM substrate requires a global window object.");if("function"!=typeof window.postMessage)throw Error("The window object does not seem to support postMessage calls.");this.domain=e}async invoke(e,t){return await new Promise((i,r)=>{let s=eC(tl(12)),n=e=>{"CWI"===e.data.type&&e.isTrusted&&e.data.id===s&&!0!==e.data.isInvocation&&("function"==typeof window.removeEventListener&&window.removeEventListener("message",n),"error"===e.data.status?r(new it(e.data.description,e.data.code)):i(e.data.result))};window.addEventListener("message",n),window.parent.postMessage({type:"CWI",isInvocation:!0,id:s,call:e,args:t},this.domain)})}async createAction(e){return await this.invoke("createAction",e)}async signAction(e){return await this.invoke("signAction",e)}async abortAction(e){return await this.invoke("abortAction",e)}async listActions(e){return await this.invoke("listActions",e)}async internalizeAction(e){return await this.invoke("internalizeAction",e)}async listOutputs(e){return await this.invoke("listOutputs",e)}async relinquishOutput(e){return await this.invoke("relinquishOutput",e)}async getPublicKey(e){return await this.invoke("getPublicKey",e)}async revealCounterpartyKeyLinkage(e){return await this.invoke("revealCounterpartyKeyLinkage",e)}async revealSpecificKeyLinkage(e){return await this.invoke("revealSpecificKeyLinkage",e)}async encrypt(e){return await this.invoke("encrypt",e)}async decrypt(e){return await this.invoke("decrypt",e)}async createHmac(e){return await this.invoke("createHmac",e)}async verifyHmac(e){return await this.invoke("verifyHmac",e)}async createSignature(e){return await this.invoke("createSignature",e)}async verifySignature(e){return await this.invoke("verifySignature",e)}async acquireCertificate(e){return await this.invoke("acquireCertificate",e)}async listCertificates(e){return await this.invoke("listCertificates",e)}async proveCertificate(e){return await this.invoke("proveCertificate",e)}async relinquishCertificate(e){return await this.invoke("relinquishCertificate",e)}async discoverByIdentityKey(e){return await this.invoke("discoverByIdentityKey",e)}async discoverByAttributes(e){return await this.invoke("discoverByAttributes",e)}async isAuthenticated(e){return await this.invoke("isAuthenticated",e)}async waitForAuthentication(e){return await this.invoke("waitForAuthentication",e)}async getHeight(e){return await this.invoke("getHeight",e)}async getHeaderForHeight(e){return await this.invoke("getHeaderForHeight",e)}async getNetwork(e){return await this.invoke("getNetwork",e)}async getVersion(e){return await this.invoke("getVersion",e)}}class ir{type;serialNumber;subject;certifier;revocationOutpoint;fields;signature;constructor(e,t,i,r,s,n,a){this.type=e,this.serialNumber=t,this.subject=i,this.certifier=r,this.revocationOutpoint=s,this.fields=n,this.signature=a}toBinary(e=!0){let t=new eF,i=eO(this.type,"base64");t.write(i);let r=eO(this.serialNumber,"base64");t.write(r);let s=eO(this.subject,"hex");t.write(s);let n=eO(this.certifier,"hex");t.write(n);let[a,o]=this.revocationOutpoint.split("."),c=eO(a,"hex");t.write(c),t.writeVarIntNum(Number(o));let h=Object.keys(this.fields).sort();for(let e of(t.writeVarIntNum(h.length),h)){let i=this.fields[e],r=eO(e,"utf8");t.writeVarIntNum(r.length),t.write(r);let s=eO(i,"utf8");t.writeVarIntNum(s.length),t.write(s)}if(e&&(this.signature??"").length>0){let e=eO(this.signature,"hex");t.write(e)}return t.toArray()}static fromBinary(e){let t,i=new eV(e),r=eC(i.read(32)),s=eC(i.read(32)),n=ex(i.read(33)),a=ex(i.read(33)),o=ex(i.read(32)),c=i.readVarIntNum(),h=`${o}.${c}`,l=i.readVarIntNum(),d={};for(let e=0;e0)throw Error(`Certificate has already been signed! Signature present: ${this.signature}`);this.certifier=(await e.getPublicKey({identityKey:!0})).publicKey;let t=this.toBinary(!1),{signature:i}=await e.createSignature({data:t,protocolID:[2,"certificate signature"],keyID:`${this.type} ${this.serialNumber}`});this.signature=ex(i)}static getCertificateFieldEncryptionDetails(e,t){return{protocolID:[2,"certificate field encryption"],keyID:t?`${t} ${e}`:e}}}!function(e){e[e.createAction=1]="createAction",e[e.signAction=2]="signAction",e[e.abortAction=3]="abortAction",e[e.listActions=4]="listActions",e[e.internalizeAction=5]="internalizeAction",e[e.listOutputs=6]="listOutputs",e[e.relinquishOutput=7]="relinquishOutput",e[e.getPublicKey=8]="getPublicKey",e[e.revealCounterpartyKeyLinkage=9]="revealCounterpartyKeyLinkage",e[e.revealSpecificKeyLinkage=10]="revealSpecificKeyLinkage",e[e.encrypt=11]="encrypt",e[e.decrypt=12]="decrypt",e[e.createHmac=13]="createHmac",e[e.verifyHmac=14]="verifyHmac",e[e.createSignature=15]="createSignature",e[e.verifySignature=16]="verifySignature",e[e.acquireCertificate=17]="acquireCertificate",e[e.listCertificates=18]="listCertificates",e[e.proveCertificate=19]="proveCertificate",e[e.relinquishCertificate=20]="relinquishCertificate",e[e.discoverByIdentityKey=21]="discoverByIdentityKey",e[e.discoverByAttributes=22]="discoverByAttributes",e[e.isAuthenticated=23]="isAuthenticated",e[e.waitForAuthentication=24]="waitForAuthentication",e[e.getHeight=25]="getHeight",e[e.getHeaderForHeight=26]="getHeaderForHeight",e[e.getNetwork=27]="getNetwork",e[e.getVersion=28]="getVersion"}(a||(a={}));let is=a;class ia{wire;constructor(e){this.wire=e}async transmit(e,t="",i=[]){let r=new eF;r.writeUInt8(is[e]);let s=eO(t,"utf8");r.writeUInt8(s.length),r.write(s),i.length>0&&r.write(i);let n=r.toArray(),a=new eV(await this.wire.transmitToWallet(n)),o=a.readUInt8();if(0===o)return a.read();{let e=a.readVarIntNum(),t=eA(a.read(e)),i=a.readVarIntNum();throw new it(t,o,eA(a.read(i)))}}async createAction(e,t){let i=new eF,r=eO(e.description,"utf8");if(i.writeVarIntNum(r.length),i.write(r),null!=e.inputBEEF?(i.writeVarIntNum(e.inputBEEF.length),i.write(e.inputBEEF)):i.writeVarIntNum(-1),null!=e.inputs)for(let t of(i.writeVarIntNum(e.inputs.length),e.inputs)){if(i.write(this.encodeOutpoint(t.outpoint)),null!=t.unlockingScript&&""!==t.unlockingScript){let e=eO(t.unlockingScript,"hex");i.writeVarIntNum(e.length),i.write(e)}else i.writeVarIntNum(-1),i.writeVarIntNum(t.unlockingScriptLength??0);let e=eO(t.inputDescription,"utf8");i.writeVarIntNum(e.length),i.write(e),"number"==typeof t.sequenceNumber?i.writeVarIntNum(t.sequenceNumber):i.writeVarIntNum(-1)}else i.writeVarIntNum(-1);if(null!=e.outputs)for(let t of(i.writeVarIntNum(e.outputs.length),e.outputs)){let e=eO(t.lockingScript,"hex");i.writeVarIntNum(e.length),i.write(e),i.writeVarIntNum(t.satoshis);let r=eO(t.outputDescription,"utf8");if(i.writeVarIntNum(r.length),i.write(r),null!=t.basket&&""!==t.basket){let e=eO(t.basket,"utf8");i.writeVarIntNum(e.length),i.write(e)}else i.writeVarIntNum(-1);if(null!=t.customInstructions&&""!==t.customInstructions){let e=eO(t.customInstructions,"utf8");i.writeVarIntNum(e.length),i.write(e)}else i.writeVarIntNum(-1);if(null!=t.tags)for(let e of(i.writeVarIntNum(t.tags.length),t.tags)){let t=eO(e,"utf8");i.writeVarIntNum(t.length),i.write(t)}else i.writeVarIntNum(-1)}else i.writeVarIntNum(-1);if("number"==typeof e.lockTime?i.writeVarIntNum(e.lockTime):i.writeVarIntNum(-1),"number"==typeof e.version?i.writeVarIntNum(e.version):i.writeVarIntNum(-1),null!=e.labels)for(let t of(i.writeVarIntNum(e.labels.length),e.labels)){let e=eO(t,"utf8");i.writeVarIntNum(e.length),i.write(e)}else i.writeVarIntNum(-1);if(null!=e.options){if(i.writeInt8(1),"boolean"==typeof e.options.signAndProcess?i.writeInt8(+!!e.options.signAndProcess):i.writeInt8(-1),"boolean"==typeof e.options.acceptDelayedBroadcast?i.writeInt8(+!!e.options.acceptDelayedBroadcast):i.writeInt8(-1),"known"===e.options.trustSelf?i.writeInt8(1):i.writeInt8(-1),null!=e.options.knownTxids)for(let t of(i.writeVarIntNum(e.options.knownTxids.length),e.options.knownTxids)){let e=eO(t,"hex");i.write(e)}else i.writeVarIntNum(-1);if("boolean"==typeof e.options.returnTXIDOnly?i.writeInt8(+!!e.options.returnTXIDOnly):i.writeInt8(-1),"boolean"==typeof e.options.noSend?i.writeInt8(+!!e.options.noSend):i.writeInt8(-1),null!=e.options.noSendChange)for(let t of(i.writeVarIntNum(e.options.noSendChange.length),e.options.noSendChange))i.write(this.encodeOutpoint(t));else i.writeVarIntNum(-1);if(null!=e.options.sendWith)for(let t of(i.writeVarIntNum(e.options.sendWith.length),e.options.sendWith)){let e=eO(t,"hex");i.write(e)}else i.writeVarIntNum(-1);"boolean"==typeof e.options.randomizeOutputs?i.writeInt8(+!!e.options.randomizeOutputs):i.writeInt8(-1)}else i.writeInt8(0);let s=new eV(await this.transmit("createAction",t,i.toArray())),n={};if(1===s.readInt8()&&(n.txid=ex(s.read(32))),1===s.readInt8()){let e=s.readVarIntNum();n.tx=s.read(e)}let a=s.readVarIntNum();if(a>=0){n.noSendChange=[];for(let e=0;e=0){n.sendWithResults=[];for(let e=0;e=0){a.sendWithResults=[];for(let e=0;e=0){c.labels=[];for(let e=0;e=0){c.inputs=[];for(let e=0;e=0&&(e=ex(r.read(n)));let a=r.readVarIntNum();a>=0&&(t=ex(r.read(a)));let o=r.readVarIntNum(),h=eA(r.read(o)),l=r.readVarIntNum();c.inputs.push({sourceOutpoint:i,sourceSatoshis:s,sourceLockingScript:e,unlockingScript:t,inputDescription:h,sequenceNumber:l})}}let d=r.readVarIntNum();if(d>=0){c.outputs=[];for(let e=0;e=0&&(e=ex(r.read(a)));let o=1===r.readInt8(),h=r.readVarIntNum(),l=eA(r.read(h)),d=r.readVarIntNum();d>=0&&(t=eA(r.read(d)));let f=r.readVarIntNum(),u=[];if(f>=0)for(let e=0;e=0&&(i=eA(r.read(p))),c.outputs.push({outputIndex:s,satoshis:n,lockingScript:e,spendable:o,outputDescription:l,basket:t,tags:u,customInstructions:i})}}n.push(c)}return{totalActions:s,actions:n}}async internalizeAction(e,t){let i=new eF;for(let t of(i.writeVarIntNum(e.tx.length),i.write(e.tx),i.writeVarIntNum(e.outputs.length),e.outputs))if(i.writeVarIntNum(t.outputIndex),"wallet payment"===t.protocol){if(null==t.paymentRemittance)throw Error("Payment remittance is required for wallet payment");i.writeUInt8(1),i.write(eO(t.paymentRemittance.senderIdentityKey,"hex"));let e=eO(t.paymentRemittance.derivationPrefix,"base64");i.writeVarIntNum(e.length),i.write(e);let r=eO(t.paymentRemittance.derivationSuffix,"base64");i.writeVarIntNum(r.length),i.write(r)}else{i.writeUInt8(2);let e=eO(t.insertionRemittance?.basket,"utf8");if(i.writeVarIntNum(e.length),i.write(e),"string"==typeof t.insertionRemittance?.customInstructions&&""!==t.insertionRemittance.customInstructions){let e=eO(t.insertionRemittance.customInstructions,"utf8");i.writeVarIntNum(e.length),i.write(e)}else i.writeVarIntNum(-1);if("object"==typeof t.insertionRemittance?.tags)for(let e of(i.writeVarIntNum(t.insertionRemittance.tags.length),t.insertionRemittance.tags)){let t=eO(e,"utf8");i.writeVarIntNum(t.length),i.write(t)}else i.writeVarIntNum(0)}if("object"==typeof e.labels)for(let t of(i.writeVarIntNum(e.labels.length),e.labels)){let e=eO(t,"utf8");i.writeVarIntNum(e.length),i.write(e)}else i.writeVarIntNum(-1);let r=eO(e.description);return i.writeVarIntNum(r.length),i.write(r),i.writeInt8("boolean"==typeof e.seekPermission?+!!e.seekPermission:-1),await this.transmit("internalizeAction",t,i.toArray()),{accepted:!0}}async listOutputs(e,t){let i,r=new eF,s=eO(e.basket,"utf8");if(r.writeVarIntNum(s.length),r.write(s),"object"==typeof e.tags)for(let t of(r.writeVarIntNum(e.tags.length),e.tags)){let e=eO(t,"utf8");r.writeVarIntNum(e.length),r.write(e)}else r.writeVarIntNum(0);"all"===e.tagQueryMode?r.writeInt8(1):"any"===e.tagQueryMode?r.writeInt8(2):r.writeInt8(-1),"locking scripts"===e.include?r.writeInt8(1):"entire transactions"===e.include?r.writeInt8(2):r.writeInt8(-1),"boolean"==typeof e.includeCustomInstructions?r.writeInt8(+!!e.includeCustomInstructions):r.writeInt8(-1),"boolean"==typeof e.includeTags?r.writeInt8(+!!e.includeTags):r.writeInt8(-1),"boolean"==typeof e.includeLabels?r.writeInt8(+!!e.includeLabels):r.writeInt8(-1),"number"==typeof e.limit?r.writeVarIntNum(e.limit):r.writeVarIntNum(-1),"number"==typeof e.offset?r.writeVarIntNum(e.offset):r.writeVarIntNum(-1),r.writeInt8("boolean"==typeof e.seekPermission?+!!e.seekPermission:-1);let n=new eV(await this.transmit("listOutputs",t,r.toArray())),a=n.readVarIntNum(),o=n.readVarIntNum();o>=0&&(i=n.read(o));let c=[];for(let e=0;e=0&&(e.lockingScript=ex(n.read(t)));let i=n.readVarIntNum();i>=0&&(e.customInstructions=eA(n.read(i)));let r=n.readVarIntNum();if(-1!==r){let t=[];for(let e=0;e0&&(t=eA(i.read(n)));let a=i.read(),o=await fetch(`${this.baseUrl}/${s}`,{method:"POST",headers:{"Content-Type":"application/octet-stream",Origin:t??""},body:new Uint8Array(a)});return Array.from(new Uint8Array(await o.arrayBuffer()))}}class ic extends Error{reviewActionResults;sendWithResults;txid;tx;noSendChange;code;isError=!0;constructor(e,t,i,r,s){super("Undelayed createAction or signAction results require review."),this.reviewActionResults=e,this.sendWithResults=t,this.txid=i,this.tx=r,this.noSendChange=s,this.code=5,this.name=this.constructor.name}}class ih{baseUrl;httpClient;originator;api;constructor(e,t="http://localhost:3321",i=fetch){this.baseUrl=t,this.originator=e,this.httpClient=i;let r="undefined"!=typeof window&&"undefined"!=typeof document&&window?.origin!=="file://";this.api=async(e,t)=>{let s=!r&&this.originator?function(e,t="http"){if(/^[a-z][a-z0-9+.-]*:\/\//i.test(e))try{return new URL(e).origin}catch{}try{return new URL(`${t}://${e}`).origin}catch{throw Error(`Invalid originator value: ${e}`)}}(this.originator,"http"):void 0;r||void 0!==s||console.error("Originator is required in Node.js environments");let n=await await i(`${this.baseUrl}/${e}`,{method:"POST",headers:{Accept:"application/json","Content-Type":"application/json",...s?{Origin:s}:{},...s?{Originator:s}:{}},body:JSON.stringify(t)}),a=await n.json();if(!n.ok)if(400===n.status&&a.isError&&5===a.code)throw new ic(a.reviewActionResults,a.sendWithResults,a.txid,a.tx,a.noSendChange);else throw Error(JSON.stringify({call:e,args:t,message:a.message??`HTTP Client error ${n.status}`}));return a}}async createAction(e){return await this.api("createAction",e)}async signAction(e){return await this.api("signAction",e)}async abortAction(e){return await this.api("abortAction",e)}async listActions(e){return await this.api("listActions",e)}async internalizeAction(e){return await this.api("internalizeAction",e)}async listOutputs(e){return await this.api("listOutputs",e)}async relinquishOutput(e){return await this.api("relinquishOutput",e)}async getPublicKey(e){return await this.api("getPublicKey",e)}async revealCounterpartyKeyLinkage(e){return await this.api("revealCounterpartyKeyLinkage",e)}async revealSpecificKeyLinkage(e){return await this.api("revealSpecificKeyLinkage",e)}async encrypt(e){return await this.api("encrypt",e)}async decrypt(e){return await this.api("decrypt",e)}async createHmac(e){return await this.api("createHmac",e)}async verifyHmac(e){return await this.api("verifyHmac",e)}async createSignature(e){return await this.api("createSignature",e)}async verifySignature(e){return await this.api("verifySignature",e)}async acquireCertificate(e){return await this.api("acquireCertificate",e)}async listCertificates(e){return await this.api("listCertificates",e)}async proveCertificate(e){return await this.api("proveCertificate",e)}async relinquishCertificate(e){return await this.api("relinquishCertificate",e)}async discoverByIdentityKey(e){return await this.api("discoverByIdentityKey",e)}async discoverByAttributes(e){return await this.api("discoverByAttributes",e)}async isAuthenticated(e){return await this.api("isAuthenticated",e)}async waitForAuthentication(e){return await this.api("waitForAuthentication",e)}async getHeight(e){return await this.api("getHeight",e)}async getHeaderForHeight(e){return await this.api("getHeaderForHeight",e)}async getNetwork(e){return await this.api("getNetwork",e)}async getVersion(e){return await this.api("getVersion",e)}}class il{domain;constructor(e="*"){if("object"!=typeof window)throw Error("The XDM substrate requires a global window object.");if(!window.hasOwnProperty("ReactNativeWebView"))throw Error("The window object does not have a ReactNativeWebView property.");if("function"!=typeof window.ReactNativeWebView.postMessage)throw Error("The window.ReactNativeWebView property does not seem to support postMessage calls.");this.domain=e}async invoke(e,t){return await new Promise((i,r)=>{let s=eC(tl(12)),n=e=>{let t=JSON.parse(e.data);"CWI"===t.type&&t.id===s&&!0!==t.isInvocation&&("function"==typeof window.removeEventListener&&window.removeEventListener("message",n),"error"===t.status?r(new it(t.description,t.code)):i(t.result))};window.addEventListener("message",n),window.ReactNativeWebView.postMessage(JSON.stringify({type:"CWI",isInvocation:!0,id:s,call:e,args:t}))})}async createAction(e){return await this.invoke("createAction",e)}async signAction(e){return await this.invoke("signAction",e)}async abortAction(e){return await this.invoke("abortAction",e)}async listActions(e){return await this.invoke("listActions",e)}async internalizeAction(e){return await this.invoke("internalizeAction",e)}async listOutputs(e){return await this.invoke("listOutputs",e)}async relinquishOutput(e){return await this.invoke("relinquishOutput",e)}async getPublicKey(e){return await this.invoke("getPublicKey",e)}async revealCounterpartyKeyLinkage(e){return await this.invoke("revealCounterpartyKeyLinkage",e)}async revealSpecificKeyLinkage(e){return await this.invoke("revealSpecificKeyLinkage",e)}async encrypt(e){return await this.invoke("encrypt",e)}async decrypt(e){return await this.invoke("decrypt",e)}async createHmac(e){return await this.invoke("createHmac",e)}async verifyHmac(e){return await this.invoke("verifyHmac",e)}async createSignature(e){return await this.invoke("createSignature",e)}async verifySignature(e){return await this.invoke("verifySignature",e)}async acquireCertificate(e){return await this.invoke("acquireCertificate",e)}async listCertificates(e){return await this.invoke("listCertificates",e)}async proveCertificate(e){return await this.invoke("proveCertificate",e)}async relinquishCertificate(e){return await this.invoke("relinquishCertificate",e)}async discoverByIdentityKey(e){return await this.invoke("discoverByIdentityKey",e)}async discoverByAttributes(e){return await this.invoke("discoverByAttributes",e)}async isAuthenticated(e){return await this.invoke("isAuthenticated",e)}async waitForAuthentication(e){return await this.invoke("waitForAuthentication",e)}async getHeight(e){return await this.invoke("getHeight",e)}async getHeaderForHeight(e){return await this.invoke("getHeaderForHeight",e)}async getNetwork(e){return await this.invoke("getNetwork",e)}async getVersion(e){return await this.invoke("getVersion",e)}}class id{substrate;originator;constructor(e="auto",t){"Cicada"===e&&(e=new ia(new io(t))),"window.CWI"===e&&(e=new ie),"XDM"===e&&(e=new ii),"json-api"===e&&(e=new ih(t)),"react-native"===e&&(e=new il(t)),this.substrate=e,this.originator=t}async connectToSubstrate(){let e;if("object"==typeof this.substrate)return;let t=async t=>{let i;if("object"!=typeof(i="number"==typeof t?await Promise.race([e.getVersion({}),new Promise((e,i)=>setTimeout(()=>i(Error("Timed out.")),t))]):await e.getVersion({}))||"string"!=typeof i.version)throw Error("Failed to use substrate.")};try{e=new ie,await t(),this.substrate=e}catch(i){try{e=new ii,await t(200),this.substrate=e}catch(i){try{e=new ia(new io(this.originator)),await t(),this.substrate=e}catch(i){try{e=new ih(this.originator),await t(),this.substrate=e}catch(i){try{e=new il(this.originator),await t(),this.substrate=e}catch(e){throw Error("No wallet available over any communication substrate. Install a BSV wallet today!")}}}}}}async createAction(e){return await this.connectToSubstrate(),await this.substrate.createAction(e,this.originator)}async signAction(e){return await this.connectToSubstrate(),await this.substrate.signAction(e,this.originator)}async abortAction(e){return await this.connectToSubstrate(),await this.substrate.abortAction(e,this.originator)}async listActions(e){return await this.connectToSubstrate(),await this.substrate.listActions(e,this.originator)}async internalizeAction(e){return await this.connectToSubstrate(),await this.substrate.internalizeAction(e,this.originator)}async listOutputs(e){return await this.connectToSubstrate(),await this.substrate.listOutputs(e,this.originator)}async relinquishOutput(e){return await this.connectToSubstrate(),await this.substrate.relinquishOutput(e,this.originator)}async getPublicKey(e){return await this.connectToSubstrate(),await this.substrate.getPublicKey(e,this.originator)}async revealCounterpartyKeyLinkage(e){return await this.connectToSubstrate(),await this.substrate.revealCounterpartyKeyLinkage(e,this.originator)}async revealSpecificKeyLinkage(e){return await this.connectToSubstrate(),await this.substrate.revealSpecificKeyLinkage(e,this.originator)}async encrypt(e){return await this.connectToSubstrate(),await this.substrate.encrypt(e,this.originator)}async decrypt(e){return await this.connectToSubstrate(),await this.substrate.decrypt(e,this.originator)}async createHmac(e){return await this.connectToSubstrate(),await this.substrate.createHmac(e,this.originator)}async verifyHmac(e){return await this.connectToSubstrate(),await this.substrate.verifyHmac(e,this.originator)}async createSignature(e){return await this.connectToSubstrate(),await this.substrate.createSignature(e,this.originator)}async verifySignature(e){return await this.connectToSubstrate(),await this.substrate.verifySignature(e,this.originator)}async acquireCertificate(e){return await this.connectToSubstrate(),await this.substrate.acquireCertificate(e,this.originator)}async listCertificates(e){return await this.connectToSubstrate(),await this.substrate.listCertificates(e,this.originator)}async proveCertificate(e){return await this.connectToSubstrate(),await this.substrate.proveCertificate(e,this.originator)}async relinquishCertificate(e){return await this.connectToSubstrate(),await this.substrate.relinquishCertificate(e,this.originator)}async discoverByIdentityKey(e){return await this.connectToSubstrate(),await this.substrate.discoverByIdentityKey(e,this.originator)}async discoverByAttributes(e){return await this.connectToSubstrate(),await this.substrate.discoverByAttributes(e,this.originator)}async isAuthenticated(e={}){return await this.connectToSubstrate(),await this.substrate.isAuthenticated(e,this.originator)}async waitForAuthentication(e={}){return await this.connectToSubstrate(),await this.substrate.waitForAuthentication(e,this.originator)}async getHeight(e={}){return await this.connectToSubstrate(),await this.substrate.getHeight(e,this.originator)}async getHeaderForHeight(e){return await this.connectToSubstrate(),await this.substrate.getHeaderForHeight(e,this.originator)}async getNetwork(e={}){return await this.connectToSubstrate(),await this.substrate.getNetwork(e,this.originator)}async getVersion(e={}){return await this.connectToSubstrate(),await this.substrate.getVersion(e,this.originator)}}class iu extends ir{keyring;decryptedFields;constructor(e,t,i,r,s,n,a,o,c){super(e,t,i,r,s,n,o),this.keyring=a,this.decryptedFields=c}static fromCertificate(e,t){return new iu(e.type,e.serialNumber,e.subject,e.certifier,e.revocationOutpoint,e.fields,t,e.signature)}async decryptFields(e,t,i){if(null==this.keyring||0===Object.keys(this.keyring).length)throw Error("A keyring is required to decrypt certificate fields for the verifier.");try{let r={};for(let s in this.keyring){let{plaintext:n}=await e.decrypt({ciphertext:eO(this.keyring[s],"base64"),...ir.getCertificateFieldEncryptionDetails(s,this.serialNumber),counterparty:this.subject,privileged:t,privilegedReason:i}),a=new tT(n).decrypt(eO(this.fields[s],"base64"));r[s]=eA(a)}return r}catch(e){throw Error(`Failed to decrypt selectively revealed certificate fields using keyring: ${String(e instanceof Error?e.message:e)}`)}}}class ip extends t7{keyDeriver;constructor(e){if(super(e),e instanceof t5)this.keyDeriver=e;else if("string"==typeof e||e instanceof tp)this.keyDeriver=new t9(e);else throw Error("Invalid key deriver provided")}async isAuthenticated(){throw Error("not implemented")}async waitForAuthentication(){throw Error("not implemented")}async getNetwork(){throw Error("not implemented")}async getVersion(){throw Error("not implemented")}async getPublicKey(e){if(!0===e.privileged)throw Error("no privilege support");if(!0===e.identityKey){if(null===this.keyDeriver||void 0===this.keyDeriver)throw Error("keyDeriver is not initialized");return{publicKey:this.keyDeriver.rootKey.toPublicKey().toString()}}if(null==e.protocolID||"string"!=typeof e.keyID||""===e.keyID.trim())throw Error("protocolID and keyID are required if identityKey is false or undefined.");if(null===this.keyDeriver||void 0===this.keyDeriver)throw Error("keyDeriver is not initialized");return{publicKey:this.keyDeriver.derivePublicKey(e.protocolID,e.keyID,"string"==typeof e.counterparty&&""!==e.counterparty.trim()?e.counterparty:"self",!!e.forSelf).toString()}}async createAction(){throw Error("not implemented")}async signAction(){throw Error("not implemented")}async abortAction(){throw Error("not implemented")}async listActions(){throw Error("not implemented")}async internalizeAction(){throw Error("not implemented")}async listOutputs(){throw Error("not implemented")}async relinquishOutput(){throw Error("not implemented")}async acquireCertificate(){throw Error("not implemented")}async listCertificates(){throw Error("not implemented")}async proveCertificate(){throw Error("not implemented")}async relinquishCertificate(){throw Error("not implemented")}async discoverByIdentityKey(){throw Error("not implemented")}async discoverByAttributes(){throw Error("not implemented")}async getHeight(){throw Error("not implemented")}async getHeaderForHeight(){throw Error("not implemented")}}class ib{sessionNonceToSession;identityKeyToNonces;constructor(){this.sessionNonceToSession=new Map,this.identityKeyToNonces=new Map}addSession(e){if("string"!=typeof e.sessionNonce)throw Error("Invalid session: sessionNonce is required to add a session.");if(this.sessionNonceToSession.set(e.sessionNonce,e),"string"==typeof e.peerIdentityKey){let t=this.identityKeyToNonces.get(e.peerIdentityKey);null==t&&(t=new Set,this.identityKeyToNonces.set(e.peerIdentityKey,t)),t.add(e.sessionNonce)}}updateSession(e){this.removeSession(e),this.addSession(e)}getSession(e){let t,i=this.sessionNonceToSession.get(e);if(null!=i)return i;let r=this.identityKeyToNonces.get(e);if(null!=r&&0!==r.size){for(let e of r){let i=this.sessionNonceToSession.get(e);null!=i&&(null==t?t=i:(i.lastUpdate??0)>(t.lastUpdate??0)&&(t=i))}return t}}removeSession(e){if("string"==typeof e.sessionNonce&&this.sessionNonceToSession.delete(e.sessionNonce),"string"==typeof e.peerIdentityKey){let t=this.identityKeyToNonces.get(e.peerIdentityKey);null!=t&&(t.delete(e.sessionNonce??""),0===t.size&&this.identityKeyToNonces.delete(e.peerIdentityKey))}}hasSession(e){if(this.sessionNonceToSession.has(e))return!0;let t=this.identityKeyToNonces.get(e);return null!=t&&t.size>0}}async function ig(e,t,i="self"){let r=eO(e,"base64"),s=r.slice(0,16),n=r.slice(16),{valid:a}=await t.verifyHmac({data:s,hmac:n,protocolID:[2,"server hmac"],keyID:eA(s),counterparty:i});return a}async function iy(e,t="self"){let i=tl(16),{hmac:r}=await e.createHmac({protocolID:[2,"server hmac"],keyID:eA(i),data:i,counterparty:t});return eC([...i,...r])}let im=async(e,t,i)=>{let r=await e.listCertificates({certifiers:t.certifiers,types:Object.keys(t.types)});return await Promise.all(r.certificates.map(async r=>{let{keyringForVerifier:s}=await e.proveCertificate({certificate:r,fieldsToReveal:t.types[r.type],verifier:i});return new iu(r.type,r.serialNumber,r.subject,r.certifier,r.revocationOutpoint,r.fields,s,r.signature)}))},iw=async(e,t,i)=>{if(null==t.certificates||0===t.certificates.length)throw Error("No certificates were provided in the AuthMessage.");await Promise.all(t.certificates.map(async r=>{if(r.subject!==t.identityKey)throw Error(`The subject of one of your certificates ("${r.subject}") is not the same as the request sender ("${t.identityKey}").`);let s=new iu(r.type,r.serialNumber,r.subject,r.certifier,r.revocationOutpoint,r.fields,r.keyring,r.signature);if(!await s.verify())throw Error(`The signature for the certificate with serial number ${s.serialNumber} is invalid!`);if(null!=i){let{certifiers:e,types:t}=i;if(!e.includes(s.certifier))throw Error(`Certificate with serial number ${s.serialNumber} has an unrequested certifier: ${s.certifier}`);if(null==t[s.type])throw Error(`Certificate with type ${s.type} was not requested`)}await s.decryptFields(e)}))};class iI{sessionManager;transport;wallet;certificatesToRequest;onGeneralMessageReceivedCallbacks=new Map;onCertificatesReceivedCallbacks=new Map;onCertificateRequestReceivedCallbacks=new Map;onInitialResponseReceivedCallbacks=new Map;callbackIdCounter=0;autoPersistLastSession=!0;lastInteractedWithPeer;constructor(e,t,i,r,s){this.wallet=e,this.transport=t,this.certificatesToRequest=i??{certifiers:[],types:{}},this.transport.onData(this.handleIncomingMessage.bind(this)).catch(e=>{throw e}),this.sessionManager=null!=r?r:new ib,!1===s?this.autoPersistLastSession=!1:this.autoPersistLastSession=!0}async toPeer(e,t,i){this.autoPersistLastSession&&"string"==typeof this.lastInteractedWithPeer&&"string"!=typeof t&&(t=this.lastInteractedWithPeer);let r=await this.getAuthenticatedSession(t,i),s=eC(tl(32)),{signature:n}=await this.wallet.createSignature({data:e,protocolID:[2,"auth message signature"],keyID:`${s} ${r.peerNonce??""}`,counterparty:r.peerIdentityKey}),a={version:"0.1",messageType:"general",identityKey:(await this.wallet.getPublicKey({identityKey:!0})).publicKey,nonce:s,yourNonce:r.peerNonce,payload:e,signature:n};r.lastUpdate=Date.now(),this.sessionManager.updateSession(r);try{await this.transport.send(a)}catch(t){let e=Error(`Failed to send message to peer ${r.peerIdentityKey??"unknown"}: ${String(t.message)}`);throw e.stack=t.stack,e}}async requestCertificates(e,t,i=1e4){this.autoPersistLastSession&&"string"==typeof this.lastInteractedWithPeer&&"string"!=typeof t&&(t=this.lastInteractedWithPeer);let r=await this.getAuthenticatedSession(t,i),s=eC(tl(32)),{signature:n}=await this.wallet.createSignature({data:eO(JSON.stringify(e),"utf8"),protocolID:[2,"auth message signature"],keyID:`${s} ${r.peerNonce??""}`,counterparty:r.peerIdentityKey}),a={version:"0.1",messageType:"certificateRequest",identityKey:(await this.wallet.getPublicKey({identityKey:!0})).publicKey,nonce:s,initialNonce:r.sessionNonce,yourNonce:r.peerNonce,requestedCertificates:e,signature:n};r.lastUpdate=Date.now(),this.sessionManager.updateSession(r);try{await this.transport.send(a)}catch(e){throw Error(`Failed to send certificate request message to peer ${r.peerIdentityKey??"unknown"}: ${String(e.message)}`)}}async getAuthenticatedSession(e,t){let i;if(void 0===this.transport)throw Error("Peer transport is not connected!");if("string"==typeof e&&(i=this.sessionManager.getSession(e)),null==i||!i.isAuthenticated){let r=await this.initiateHandshake(e,t);if(null==(i=this.sessionManager.getSession(r))||!i.isAuthenticated)throw Error("Unable to establish mutual authentication with peer!")}return i}listenForGeneralMessages(e){let t=this.callbackIdCounter++;return this.onGeneralMessageReceivedCallbacks.set(t,e),t}stopListeningForGeneralMessages(e){this.onGeneralMessageReceivedCallbacks.delete(e)}listenForCertificatesReceived(e){let t=this.callbackIdCounter++;return this.onCertificatesReceivedCallbacks.set(t,e),t}stopListeningForCertificatesReceived(e){this.onCertificatesReceivedCallbacks.delete(e)}listenForCertificatesRequested(e){let t=this.callbackIdCounter++;return this.onCertificateRequestReceivedCallbacks.set(t,e),t}stopListeningForCertificatesRequested(e){this.onCertificateRequestReceivedCallbacks.delete(e)}async initiateHandshake(e,t=1e4){let i=await iy(this.wallet),r=Date.now();this.sessionManager.addSession({isAuthenticated:!1,sessionNonce:i,peerIdentityKey:e,lastUpdate:r});let s={version:"0.1",messageType:"initialRequest",identityKey:(await this.wallet.getPublicKey({identityKey:!0})).publicKey,initialNonce:i,requestedCertificates:this.certificatesToRequest};return await this.transport.send(s),await this.waitForInitialResponse(i,t)}async waitForInitialResponse(e,t=1e4){return await new Promise((i,r)=>{let s=this.listenForInitialResponse(e,e=>{clearTimeout(n),this.stopListeningForInitialResponses(s),i(e)}),n=setTimeout(()=>{this.stopListeningForInitialResponses(s),r(Error("Initial response timed out."))},t)})}listenForInitialResponse(e,t){let i=this.callbackIdCounter++;return this.onInitialResponseReceivedCallbacks.set(i,{callback:t,sessionNonce:e}),i}stopListeningForInitialResponses(e){this.onInitialResponseReceivedCallbacks.delete(e)}async handleIncomingMessage(e){if("string"!=typeof e.version||"0.1"!==e.version)throw Error(`Invalid or unsupported message auth version! Received: ${e.version}, expected: 0.1`);switch(e.messageType){case"initialRequest":await this.processInitialRequest(e);break;case"initialResponse":await this.processInitialResponse(e);break;case"certificateRequest":await this.processCertificateRequest(e);break;case"certificateResponse":await this.processCertificateResponse(e);break;case"general":await this.processGeneralMessage(e);break;default:throw Error(`Unknown message type of ${String(e.messageType)} from ${String(e.identityKey)}`)}}async processInitialRequest(e){let t;if("string"!=typeof e.identityKey||"string"!=typeof e.initialNonce||""===e.initialNonce)throw Error("Missing required fields in initialRequest message.");let i=await iy(this.wallet),r=Date.now();this.sessionManager.addSession({isAuthenticated:!0,sessionNonce:i,peerNonce:e.initialNonce,peerIdentityKey:e.identityKey,lastUpdate:r}),null!=e.requestedCertificates&&Array.isArray(e.requestedCertificates.certifiers)&&e.requestedCertificates.certifiers.length>0&&(this.onCertificateRequestReceivedCallbacks.size>0?this.onCertificateRequestReceivedCallbacks.forEach(t=>{t(e.identityKey,e.requestedCertificates)}):t=await im(this.wallet,e.requestedCertificates,e.identityKey));let{signature:s}=await this.wallet.createSignature({data:eO(e.initialNonce+i,"base64"),protocolID:[2,"auth message signature"],keyID:`${e.initialNonce} ${i}`,counterparty:e.identityKey}),n={version:"0.1",messageType:"initialResponse",identityKey:(await this.wallet.getPublicKey({identityKey:!0})).publicKey,initialNonce:i,yourNonce:e.initialNonce,certificates:t,requestedCertificates:this.certificatesToRequest,signature:s};void 0===this.lastInteractedWithPeer&&(this.lastInteractedWithPeer=e.identityKey),await this.transport.send(n)}async processInitialResponse(e){if(!await ig(e.yourNonce,this.wallet))throw Error(`Initial response nonce verification failed from peer: ${e.identityKey}`);let t=this.sessionManager.getSession(e.yourNonce);if(null==t)throw Error(`Peer session not found for peer: ${e.identityKey}`);let i=eO((t.sessionNonce??"")+(e.initialNonce??""),"base64"),{valid:r}=await this.wallet.verifySignature({data:i,signature:e.signature,protocolID:[2,"auth message signature"],keyID:`${t.sessionNonce??""} ${e.initialNonce??""}`,counterparty:e.identityKey});if(!r)throw Error(`Unable to verify initial response signature for peer: ${e.identityKey}`);if(t.peerNonce=e.initialNonce,t.peerIdentityKey=e.identityKey,t.isAuthenticated=!0,t.lastUpdate=Date.now(),this.sessionManager.updateSession(t),this.certificatesToRequest?.certifiers?.length>0&&e.certificates?.length>0&&(await iw(this.wallet,e,this.certificatesToRequest),this.onCertificatesReceivedCallbacks.forEach(t=>t(e.identityKey,e.certificates))),this.lastInteractedWithPeer=e.identityKey,this.onInitialResponseReceivedCallbacks.forEach(e=>{e.sessionNonce===t.sessionNonce&&e.callback(t.sessionNonce)}),null!=e.requestedCertificates&&Array.isArray(e.requestedCertificates.certifiers)&&e.requestedCertificates.certifiers.length>0)if(this.onCertificateRequestReceivedCallbacks.size>0)this.onCertificateRequestReceivedCallbacks.forEach(t=>{t(e.identityKey,e.requestedCertificates)});else{let t=await im(this.wallet,e.requestedCertificates,e.identityKey);await this.sendCertificateResponse(e.identityKey,t)}}async processCertificateRequest(e){if(!await ig(e.yourNonce,this.wallet))throw Error(`Unable to verify nonce for certificate request message from: ${e.identityKey}`);let t=this.sessionManager.getSession(e.yourNonce);if(null==t)throw Error(`Session not found for nonce: ${e.yourNonce}`);let{valid:i}=await this.wallet.verifySignature({data:eO(JSON.stringify(e.requestedCertificates),"utf8"),signature:e.signature,protocolID:[2,"auth message signature"],keyID:`${e.nonce??""} ${t.sessionNonce??""}`,counterparty:t.peerIdentityKey});if(!i)throw Error(`Invalid signature in certificate request message from ${t.peerIdentityKey}`);if(t.lastUpdate=Date.now(),this.sessionManager.updateSession(t),null!=e.requestedCertificates&&Array.isArray(e.requestedCertificates.certifiers)&&e.requestedCertificates.certifiers.length>0)if(this.onCertificateRequestReceivedCallbacks.size>0)this.onCertificateRequestReceivedCallbacks.forEach(t=>{t(e.identityKey,e.requestedCertificates)});else{let t=await im(this.wallet,e.requestedCertificates,e.identityKey);await this.sendCertificateResponse(e.identityKey,t)}}async sendCertificateResponse(e,t){let i=await this.getAuthenticatedSession(e),r=eC(tl(32)),{signature:s}=await this.wallet.createSignature({data:eO(JSON.stringify(t),"utf8"),protocolID:[2,"auth message signature"],keyID:`${r} ${i.peerNonce??""}`,counterparty:i.peerIdentityKey}),n={version:"0.1",messageType:"certificateResponse",identityKey:(await this.wallet.getPublicKey({identityKey:!0})).publicKey,nonce:r,initialNonce:i.sessionNonce,yourNonce:i.peerNonce,certificates:t,signature:s};i.lastUpdate=Date.now(),this.sessionManager.updateSession(i);try{await this.transport.send(n)}catch(t){let e=t instanceof Error?t.message:String(t);throw Error(`Failed to send certificate response message to peer ${i.peerIdentityKey??"unknown"}: ${e}`)}}async processCertificateResponse(e){if(!await ig(e.yourNonce,this.wallet))throw Error(`Unable to verify nonce for certificate response from: ${e.identityKey}`);let t=this.sessionManager.getSession(e.yourNonce);if(null==t)throw Error(`Session not found for nonce: ${e.yourNonce}`);let{valid:i}=await this.wallet.verifySignature({data:eO(JSON.stringify(e.certificates),"utf8"),signature:e.signature,protocolID:[2,"auth message signature"],keyID:`${e.nonce??""} ${t.sessionNonce??""}`,counterparty:e.identityKey});if(!i)throw Error(`Unable to verify certificate response signature for peer: ${e.identityKey}`);await iw(this.wallet,e,e.requestedCertificates),this.onCertificatesReceivedCallbacks.forEach(t=>{t(e.identityKey,e.certificates??[])}),t.lastUpdate=Date.now(),this.sessionManager.updateSession(t)}async processGeneralMessage(e){if(!await ig(e.yourNonce,this.wallet))throw Error(`Unable to verify nonce for general message from: ${e.identityKey}`);let t=this.sessionManager.getSession(e.yourNonce);if(null==t)throw Error(`Session not found for nonce: ${e.yourNonce}`);let{valid:i}=await this.wallet.verifySignature({data:e.payload,signature:e.signature,protocolID:[2,"auth message signature"],keyID:`${e.nonce??""} ${t.sessionNonce??""}`,counterparty:t.peerIdentityKey});if(!i)throw Error(`Invalid signature in generalMessage from ${t.peerIdentityKey}`);t.lastUpdate=Date.now(),this.sessionManager.updateSession(t),this.lastInteractedWithPeer=e.identityKey,this.onGeneralMessageReceivedCallbacks.forEach(t=>{t(e.identityKey,e.payload??[])})}}let ik="undefined"!=typeof window?fetch.bind(window):fetch;class iv{onDataCallback;fetchClient;baseUrl;constructor(e,t=ik){this.fetchClient=t,this.baseUrl=e}async send(e){if(null==this.onDataCallback)throw Error("Listen before you start speaking. God gave you two ears and one mouth for a reason.");if("general"!==e.messageType)return await new Promise((t,i)=>{(async()=>{try{let i=this.fetchClient(`${this.baseUrl}/.well-known/auth`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(e)});"initialRequest"!==e.messageType&&t();let r=await i;if(r.ok&&null!=this.onDataCallback){let e=await r.json();this.onDataCallback(e)}else throw Error("HTTP server failed to authenticate");"initialRequest"===e.messageType&&t()}catch(e){i(e)}})()});{let t=this.deserializeRequestPayload(e.payload),i=`${this.baseUrl}${t.urlPostfix}`;if("object"!=typeof t.headers&&(t.headers={}),t.headers["x-bsv-auth-version"]=e.version,t.headers["x-bsv-auth-identity-key"]=e.identityKey,t.headers["x-bsv-auth-nonce"]=e.nonce,t.headers["x-bsv-auth-your-nonce"]=e.yourNonce,t.headers["x-bsv-auth-signature"]=ex(e.signature),t.headers["x-bsv-auth-request-id"]=t.requestId,null!=t.body){let e=t.headers;if(null==e["content-type"])throw Error("Content-Type header is required for requests with a body.");let i=String(e["content-type"]??"");i.includes("application/json")||i.includes("application/x-www-form-urlencoded")||i.includes("text/plain")?t.body=eA(t.body):t.body=new Uint8Array(t.body)}let r=await this.fetchClient(i,{method:t.method,headers:t.headers,body:t.body});if(500===r.status&&null==r.headers.get("x-bsv-auth-request-id")&&null==r.headers.get("x-bsv-auth-requested-certificates")){let e=await r.json();throw Error(`HTTP ${r.status} - ${JSON.stringify(e)}`)}let s=await r.arrayBuffer(),n=new eF;null!=r.headers.get("x-bsv-auth-request-id")&&n.write(eO(r.headers.get("x-bsv-auth-request-id"),"base64")),n.writeVarIntNum(r.status);let a=[];r.headers.forEach((e,t)=>{let i=t.toLowerCase();(i.startsWith("x-bsv-")||"authorization"===i)&&!i.startsWith("x-bsv-auth")&&a.push([i,e])}),a.sort(([e],[t])=>e.localeCompare(t)),n.writeVarIntNum(a.length);for(let e=0;e{e(t)}}deserializeRequestPayload(e){let t,i=new eV(e),r=eC(i.read(32)),s=i.readVarIntNum(),n="GET";s>0&&(n=eA(i.read(s)));let a=i.readVarIntNum(),o="";a>0&&(o=eA(i.read(a)));let c=i.readVarIntNum(),h="";c>0&&(h=eA(i.read(c)));let l={},d=i.readVarIntNum();if(d>0)for(let e=0;e0&&(t=i.read(f)),{urlPostfix:o+h,method:n,headers:l,body:t,requestId:r}}}class iS{sessionManager;wallet;callbacks={};certificatesReceived=[];requestedCertificates;peers={};constructor(e,t,i){this.wallet=e,this.requestedCertificates=t,this.sessionManager=i||new ib}async fetch(e,t={}){if("number"==typeof t.retryCounter){if(t.retryCounter<=0)throw Error("Request failed after maximum number of retries.");t.retryCounter--}let i=await new Promise(async(i,r)=>{try{let s,{method:n="GET",headers:a={},body:o}=t,c=new URL(e),h=c.origin;if(void 0===this.peers[h]){let e=new iv(h);s={peer:new iI(this.wallet,e,this.requestedCertificates,this.sessionManager),pendingCertificateRequests:[]},this.peers[h]=s,this.peers[h].peer.listenForCertificatesReceived((e,t)=>{this.certificatesReceived.push(...t)}),this.peers[h].peer.listenForCertificatesRequested(async(e,t)=>{try{this.peers[h].pendingCertificateRequests.push(!0);let i=await im(this.wallet,t,e);await this.peers[h].peer.sendCertificateResponse(e,i)}finally{await new Promise(e=>setTimeout(e,500)),this.peers[h].pendingCertificateRequests.shift()}})}else{if(!1===this.peers[h].supportsMutualAuth){try{let r=await this.handleFetchAndValidate(e,t,this.peers[h]);i(r)}catch(e){r(e)}return}s=this.peers[h]}let l=tl(32),d=eC(l),f=await this.serializeRequest(n,a,o,c,l);this.callbacks[d]={resolve:i,reject:r};let u=s.peer.listenForGeneralMessages((e,t)=>{let i,r=new eV(t);if(eC(r.read(32))!==d)return;s.peer.stopListeningForGeneralMessages(u),this.peers[h].identityKey=e,this.peers[h].supportsMutualAuth=!0;let n=r.readVarIntNum(),a={},o=r.readVarIntNum();if(o>0)for(let e=0;e0&&(i=r.read(c));let l=new Response(i?new Uint8Array(i):null,{status:n,statusText:`${n}`,headers:new Headers(a)});this.callbacks[d].resolve(l),delete this.callbacks[d]});s.pendingCertificateRequests.length>0&&await new Promise(e=>{setInterval(()=>{0===s.pendingCertificateRequests.length&&e()},100)}),await s.peer.toPeer(f.toArray(),s.identityKey).catch(async n=>{if(n.message.includes("Session not found for nonce")){delete this.peers[h],t.retryCounter??=3;let r=await this.fetch(e,t);i(r);return}if(n.message.includes("HTTP server failed to authenticate"))try{let r=await this.handleFetchAndValidate(e,t,s);i(r);return}catch(e){r(e)}else r(n)})}catch(e){r(e)}});return 402===i.status?await this.handlePaymentAndRetry(e,t,i):i}async sendCertificateRequest(e,t){let i,r=new URL(e).origin;if(void 0!==this.peers[r])i={peer:this.peers[r].peer};else{let e=new iv(r);i={peer:new iI(this.wallet,e,this.requestedCertificates,this.sessionManager)},this.peers[r]=i}return await new Promise(async(e,r)=>{let s=i.peer.listenForCertificatesReceived((t,r)=>{i.peer.stopListeningForCertificatesReceived(s),this.certificatesReceived.push(...r),e(r)});try{await i.peer.requestCertificates(t,i.identityKey)}catch(e){i.peer.stopListeningForCertificatesReceived(s),r(e)}})}consumeReceivedCertificates(){return this.certificatesReceived.splice(0)}async serializeRequest(e,t,i,r,s){let n=new eF;if(n.write(s),n.writeVarIntNum(e.length),n.write(eO(e)),r.pathname.length>0){let e=eO(r.pathname);n.writeVarIntNum(e.length),n.write(e)}else n.writeVarIntNum(-1);if(r.search.length>0){let e=eO(r.search);n.writeVarIntNum(e.length),n.write(e)}else n.writeVarIntNum(-1);let a=[];for(let[e,i]of Object.entries(t))if((e=e.toLowerCase()).startsWith("x-bsv-")||"authorization"===e){if(e.startsWith("x-bsv-auth"))throw Error("No BSV auth headers allowed here!");a.push([e,i])}else if(e.startsWith("content-type"))i=i.split(";")[0].trim(),a.push([e,i]);else throw Error("Unsupported header in the simplified fetch implementation. Only content-type, authorization, and x-bsv-* headers are supported.");a.sort(([e],[t])=>e.localeCompare(t)),n.writeVarIntNum(a.length);for(let e=0;e"content-type"===e);i=e&&e[1].includes("application/json")?"{}":""}if(i){let e=await this.normalizeBodyToNumberArray(i);n.writeVarIntNum(e.length),n.write(e)}else n.writeVarIntNum(-1);return n}async handleFetchAndValidate(e,t,i){let r=await fetch(e,t);if(r.headers.forEach(e=>{if(e.toLocaleLowerCase().startsWith("x-bsv"))throw Error("The server is trying to claim it has been authenticated when it has not!")}),r.ok)return i.supportsMutualAuth=!1,r;throw Error(`Request failed with status: ${r.status}`)}async handlePaymentAndRetry(e,t={},i){let r=i.headers.get("x-bsv-payment-version");if(!r||"1.0"!==r)throw Error(`Unsupported x-bsv-payment-version response header. Client version: 1.0, Server version: ${r}`);let s=i.headers.get("x-bsv-payment-satoshis-required");if(!s)throw Error("Missing x-bsv-payment-satoshis-required response header.");let n=parseInt(s);if(isNaN(n)||n<=0)throw Error("Invalid x-bsv-payment-satoshis-required response header value.");let a=i.headers.get("x-bsv-auth-identity-key");if(!a)throw Error("Missing x-bsv-auth-identity-key response header.");let o=i.headers.get("x-bsv-payment-derivation-prefix");if("string"!=typeof o||o.length<1)throw Error("Missing x-bsv-payment-derivation-prefix response header.");let c=await iy(this.wallet),{publicKey:h}=await this.wallet.getPublicKey({protocolID:[2,"3241645161d8"],keyID:`${o} ${c}`,counterparty:a}),l=new tK().lock(to.fromString(h).toAddress()).toHex(),{tx:d}=await this.wallet.createAction({description:`Payment for request to ${new URL(e).origin}`,outputs:[{satoshis:n,lockingScript:l,customInstructions:JSON.stringify({derivationPrefix:o,derivationSuffix:c,payee:a}),outputDescription:"HTTP request payment"}],options:{randomizeOutputs:!1}});return t.headers=t.headers||{},t.headers["x-bsv-payment"]=JSON.stringify({derivationPrefix:o,derivationSuffix:c,transaction:eC(d)}),t.retryCounter??=3,this.fetch(e,t)}async normalizeBodyToNumberArray(e){if(null==e)return[];if("object"==typeof e)return eO(JSON.stringify(e,"utf8"));if(Array.isArray(e)&&e.every(e=>"number"==typeof e))return e;if("string"==typeof e)return eO(e,"utf8");if(e instanceof ArrayBuffer||ArrayBuffer.isView(e))return Array.from(new Uint8Array(e instanceof ArrayBuffer?e:e.buffer));if(e instanceof Blob)return Array.from(new Uint8Array(await e.arrayBuffer()));if(e instanceof FormData){let t=[];return e.forEach((e,i)=>{t.push([i,e.toString()])}),eO(new URLSearchParams(t).toString(),"utf8")}if(e instanceof URLSearchParams)return eO(e.toString(),"utf8");if(e instanceof ReadableStream)throw Error("ReadableStream cannot be directly converted to number[].");throw Error("Unsupported body type in this SimplifiedFetch implementation.")}}class i_{pushDrop;static decode(e){let t=tz.decode(e);if(t.fields.length<4)throw Error("Invalid SHIP/SLAP advertisement!");let i=eA(t.fields[0]);if("SHIP"!==i&&"SLAP"!==i)throw Error("Invalid protocol type!");let r=ex(t.fields[1]);return{protocol:i,identityKey:r,domain:eA(t.fields[2]),topicOrService:eA(t.fields[3])}}constructor(e){this.pushDrop=new tz(e)}async lock(e,t,i){let{publicKey:r}=await this.pushDrop.wallet.getPublicKey({identityKey:!0});return await this.pushDrop.lock([eO(e,"utf8"),eO(r,"hex"),eO(t,"utf8"),eO(i,"utf8")],[2,"SHIP"===e?"Service Host Interconnect":"Service Lookup Availability"],"1","self")}unlock(e){return this.pushDrop.unlock([2,"SHIP"===e?"Service Host Interconnect":"Service Lookup Availability"],"1","self")}}let iE=["https://overlay-us-1.bsvb.tech","https://overlay-eu-1.bsvb.tech","https://overlay-ap-1.bsvb.tech","https://users.bapp.dev"],ix=["https://testnet-users.bapp.dev"];class iO{fetchClient;allowHTTP;constructor(e=fetch,t=!1){this.fetchClient=e,this.allowHTTP=t}async lookup(e,t,i=5e3){if(!e.startsWith("https:")&&!this.allowHTTP)throw Error('HTTPS facilitator can only use URLs that start with "https:"');let r=new Promise((e,t)=>setTimeout(()=>t(Error("Request timed out")),i)),s=fetch(`${e}/lookup`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({service:t.service,query:t.query})}),n=await Promise.race([s,r]);if(n.ok)return await n.json();throw Error("Failed to facilitate lookup")}}class iP{facilitator;slapTrackers;hostOverrides;additionalHosts;networkPreset;constructor(e={}){this.networkPreset=e.networkPreset??"mainnet",this.facilitator=e.facilitator??new iO(void 0,"local"===this.networkPreset),this.slapTrackers=e.slapTrackers??("mainnet"===this.networkPreset?iE:ix),this.hostOverrides=e.hostOverrides??{},this.additionalHosts=e.additionalHosts??{}}async query(e,t){let i=[];if(i="ls_slap"===e.service?"local"===this.networkPreset?["http://localhost:8080"]:this.slapTrackers:null!=this.hostOverrides[e.service]?this.hostOverrides[e.service]:"local"===this.networkPreset?["http://localhost:8080"]:await this.findCompetentHosts(e.service),this.additionalHosts[e.service]?.length>0&&(i=[...i,...this.additionalHosts[e.service]]),i.length<1)throw Error(`No competent ${this.networkPreset} hosts found by the SLAP trackers for lookup service: ${e.service}`);let r=(await Promise.allSettled(i.map(async i=>await this.facilitator.lookup(i,e,t)))).filter(e=>"fulfilled"===e.status).map(e=>e.value);if(0===r.length)throw Error("No successful responses from any hosts");let s=new Map;for(let e of r)if("output-list"===e.type)try{for(let t of e.outputs)try{let e=t4.fromBEEF(t.beef).id("hex"),i=`${e}.${t.outputIndex}`;s.set(i,t)}catch{continue}}catch(e){}return{type:"output-list",outputs:Array.from(s.values())}}async findCompetentHosts(e){let t={service:"ls_slap",query:{service:e}},i=await Promise.allSettled(this.slapTrackers.map(async e=>await this.facilitator.lookup(e,t,5e3))),r=new Set;for(let t of i)if("fulfilled"!==t.status)continue;else{let i=t.value;if("output-list"!==i.type)continue;for(let t of i.outputs)try{let i=t4.fromBEEF(t.beef).outputs[t.outputIndex].lockingScript,s=i_.decode(i);if(s.topicOrService!==e||"SLAP"!==s.protocol)continue;r.add(s.domain)}catch{continue}}return[...r]}}class iN{httpClient;allowHTTP;constructor(e=fetch,t=!1){this.httpClient=e,this.allowHTTP=t}async send(e,t){let i;if(!e.startsWith("https:")&&!this.allowHTTP)throw Error('HTTPS facilitator can only use URLs that start with "https:"');let r={"Content-Type":"application/octet-stream","X-Topics":JSON.stringify(t.topics)};if(Array.isArray(t.offChainValues)){r["x-includes-off-chain-values"]="true";let e=new eF;e.writeVarIntNum(t.beef.length),e.write(t.beef),e.write(t.offChainValues),i=new Uint8Array(e.toArray())}else i=new Uint8Array(t.beef);let s=await fetch(`${e}/submit`,{method:"POST",headers:r,body:i});if(s.ok)return await s.json();throw Error("Failed to facilitate broadcast")}}class iA{topics;facilitator;resolver;requireAcknowledgmentFromAllHostsForTopics;requireAcknowledgmentFromAnyHostForTopics;requireAcknowledgmentFromSpecificHostsForTopics;networkPreset;constructor(e,t={}){if(0===e.length)throw Error("At least one topic is required for broadcast.");if(e.some(e=>!e.startsWith("tm_")))throw Error('Every topic must start with "tm_".');this.topics=e,this.networkPreset=t.networkPreset??"mainnet",this.facilitator=t.facilitator??new iN(void 0,"local"===this.networkPreset),this.resolver=t.resolver??new iP({networkPreset:this.networkPreset}),this.requireAcknowledgmentFromAllHostsForTopics=t.requireAcknowledgmentFromAllHostsForTopics??[],this.requireAcknowledgmentFromAnyHostForTopics=t.requireAcknowledgmentFromAnyHostForTopics??"all",this.requireAcknowledgmentFromSpecificHostsForTopics=t.requireAcknowledgmentFromSpecificHostsForTopics??{}}async broadcast(e){let t,i,r,s,n,a=e.metadata.get("OffChainValues");try{t=e.toBEEF()}catch(e){throw Error("Transactions sent via SHIP to Overlay Services must be serializable to BEEF format.")}let o=await this.findInterestedHosts();if(0===Object.keys(o).length)return{status:"error",code:"ERR_NO_HOSTS_INTERESTED",description:`No ${this.networkPreset} hosts are interested in receiving this transaction.`};let c=Object.entries(o).map(async([e,i])=>{try{let r=await this.facilitator.send(e,{beef:t,offChainValues:a,topics:[...i]});if(null==r||0===Object.keys(r).length)throw Error("Steak has no topics.");return{host:e,success:!0,steak:r}}catch(t){return console.error(t),{host:e,success:!1,error:t}}}),h=(await Promise.all(c)).filter(e=>e.success);if(0===h.length)return{status:"error",code:"ERR_ALL_HOSTS_REJECTED",description:`All ${this.networkPreset} topical hosts have rejected the transaction.`};let l={};for(let e of h){let t=e.host,i=e.steak,r=new Set;for(let[e,t]of Object.entries(i)){let i=t.outputsToAdmit,s=t.coinsToRetain,n=t.coinsRemoved;(i?.length>0||s?.length>0||n?.length>0)&&r.add(e)}l[t]=r}return("all"===this.requireAcknowledgmentFromAllHostsForTopics?(i=this.topics,r="all"):"any"===this.requireAcknowledgmentFromAllHostsForTopics?(i=this.topics,r="any"):(i=Array.isArray(this.requireAcknowledgmentFromAllHostsForTopics)?this.requireAcknowledgmentFromAllHostsForTopics:this.topics,r="all"),i.length>0&&!this.checkAcknowledgmentFromAllHosts(l,i,r))?{status:"error",code:"ERR_REQUIRE_ACK_FROM_ALL_HOSTS_FAILED",description:"Not all hosts acknowledged the required topics."}:("all"===this.requireAcknowledgmentFromAnyHostForTopics?(s=this.topics,n="all"):"any"===this.requireAcknowledgmentFromAnyHostForTopics?(s=this.topics,n="any"):(s=Array.isArray(this.requireAcknowledgmentFromAnyHostForTopics)?this.requireAcknowledgmentFromAnyHostForTopics:[],n="all"),s.length>0&&!this.checkAcknowledgmentFromAnyHost(l,s,n))?{status:"error",code:"ERR_REQUIRE_ACK_FROM_ANY_HOST_FAILED",description:"No host acknowledged the required topics."}:Object.keys(this.requireAcknowledgmentFromSpecificHostsForTopics).length>0&&!this.checkAcknowledgmentFromSpecificHosts(l,this.requireAcknowledgmentFromSpecificHostsForTopics)?{status:"error",code:"ERR_REQUIRE_ACK_FROM_SPECIFIC_HOSTS_FAILED",description:"Specific hosts did not acknowledge the required topics."}:{status:"success",txid:e.id("hex"),message:`Sent to ${h.length} Overlay Services ${1===h.length?"host":"hosts"}.`}}checkAcknowledgmentFromAllHosts(e,t,i){for(let r of Object.values(e))if("all"===i){for(let e of t)if(!r.has(e))return!1}else if("any"===i){let e=!1;for(let i of t)if(r.has(i)){e=!0;break}if(!e)return!1}return!0}checkAcknowledgmentFromAnyHost(e,t,i){if("all"===i){for(let i of Object.values(e)){let e=!0;for(let r of t)if(!i.has(r)){e=!1;break}if(e)return!0}return!1}for(let i of Object.values(e))for(let e of t)if(i.has(e))return!0;return!1}checkAcknowledgmentFromSpecificHosts(e,t){for(let[i,r]of Object.entries(t)){let t,s,n=e[i];if(null==n)return!1;if("all"===r||"any"===r)s=r,t=this.topics;else{if(!Array.isArray(r))continue;t=r,s="all"}if("all"===s){for(let e of t)if(!n.has(e))return!1}else if("any"===s){let e=!1;for(let i of t)if(n.has(i)){e=!0;break}if(!e)return!1}}return!0}async findInterestedHosts(){if("local"===this.networkPreset){let e=new Set;for(let t=0;t{iC[iT[e]]=e});let iR={type:"error",data:"parser error"},iB="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===Object.prototype.toString.call(Blob),iL="function"==typeof ArrayBuffer,iD=e=>"function"==typeof ArrayBuffer.isView?ArrayBuffer.isView(e):e&&e.buffer instanceof ArrayBuffer,iM=({type:e,data:t},i,r)=>{if(iB&&t instanceof Blob)if(i)return r(t);else return iF(t,r);if(iL&&(t instanceof ArrayBuffer||iD(t)))if(i)return r(t);else return iF(new Blob([t]),r);return r(iT[e]+(t||""))},iF=(e,t)=>{let i=new FileReader;return i.onload=function(){t("b"+(i.result.split(",")[1]||""))},i.readAsDataURL(e)};function iV(e){return e instanceof Uint8Array?e:e instanceof ArrayBuffer?new Uint8Array(e):new Uint8Array(e.buffer,e.byteOffset,e.byteLength)}let iH="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",iq="undefined"==typeof Uint8Array?[]:new Uint8Array(256);for(let e=0;e{if("string"!=typeof e)return{type:"message",data:ij(e,t)};let i=e.charAt(0);return"b"===i?{type:"message",data:i$(e.substring(1),t)}:iC[i]?e.length>1?{type:iC[i],data:e.substring(1)}:{type:iC[i]}:iR},i$=(e,t)=>iU?ij((e=>{let t=.75*e.length,i=e.length,r,s=0,n,a,o,c;"="===e[e.length-1]&&(t--,"="===e[e.length-2]&&t--);let h=new ArrayBuffer(t),l=new Uint8Array(h);for(r=0;r>4,l[s++]=(15&a)<<4|o>>2,l[s++]=(3&o)<<6|63&c;return h})(e),t):{base64:!0,data:e},ij=(e,t)=>"blob"===t?e instanceof Blob?e:new Blob([e]):e instanceof ArrayBuffer?e:e.buffer;function iz(e){return e.reduce((e,t)=>e+t.length,0)}function iW(e,t){if(e[0].length===t)return e.shift();let i=new Uint8Array(t),r=0;for(let s=0;sPromise.resolve().then(e):(e,t)=>t(e,0),iY="undefined"!=typeof self?self:"undefined"!=typeof window?window:Function("return this")();function iJ(e,...t){return t.reduce((t,i)=>(e.hasOwnProperty(i)&&(t[i]=e[i]),t),{})}let iZ=iY.setTimeout,iQ=iY.clearTimeout;function i0(e,t){t.useNativeTimers?(e.setTimeoutFn=iZ.bind(iY),e.clearTimeoutFn=iQ.bind(iY)):(e.setTimeoutFn=iY.setTimeout.bind(iY),e.clearTimeoutFn=iY.clearTimeout.bind(iY))}function i1(){return Date.now().toString(36).substring(3)+Math.random().toString(36).substring(2,5)}class i2 extends Error{constructor(e,t,i){super(e),this.description=t,this.context=i,this.type="TransportError"}}class i3 extends iG{constructor(e){super(),this.writable=!1,i0(this,e),this.opts=e,this.query=e.query,this.socket=e.socket,this.supportsBinary=!e.forceBase64}onError(e,t,i){return super.emitReserved("error",new i2(e,t,i)),this}open(){return this.readyState="opening",this.doOpen(),this}close(){return("opening"===this.readyState||"open"===this.readyState)&&(this.doClose(),this.onClose()),this}send(e){"open"===this.readyState&&this.write(e)}onOpen(){this.readyState="open",this.writable=!0,super.emitReserved("open")}onData(e){let t=iK(e,this.socket.binaryType);this.onPacket(t)}onPacket(e){super.emitReserved("packet",e)}onClose(e){this.readyState="closed",super.emitReserved("close",e)}pause(e){}createUri(e,t={}){return e+"://"+this._hostname()+this._port()+this.opts.path+this._query(t)}_hostname(){let e=this.opts.hostname;return -1===e.indexOf(":")?e:"["+e+"]"}_port(){return this.opts.port&&(this.opts.secure&&Number(443!==this.opts.port)||!this.opts.secure&&80!==Number(this.opts.port))?":"+this.opts.port:""}_query(e){let t=function(e){let t="";for(let i in e)e.hasOwnProperty(i)&&(t.length&&(t+="&"),t+=encodeURIComponent(i)+"="+encodeURIComponent(e[i]));return t}(e);return t.length?"?"+t:""}}class i6 extends i3{constructor(){super(...arguments),this._polling=!1}get name(){return"polling"}doOpen(){this._poll()}pause(e){this.readyState="pausing";let t=()=>{this.readyState="paused",e()};if(this._polling||!this.writable){let e=0;this._polling&&(e++,this.once("pollComplete",function(){--e||t()})),this.writable||(e++,this.once("drain",function(){--e||t()}))}else t()}_poll(){this._polling=!0,this.doPoll(),this.emitReserved("poll")}onData(e){let t=e=>{if("opening"===this.readyState&&"open"===e.type&&this.onOpen(),"close"===e.type)return this.onClose({description:"transport closed by the server"}),!1;this.onPacket(e)};((e,t)=>{let i=e.split("\x1e"),r=[];for(let e=0;e{this.write([{type:"close"}])};"open"===this.readyState?e():this.once("open",e)}write(e){this.writable=!1,((e,t)=>{let i=e.length,r=Array(i),s=0;e.forEach((e,n)=>{iM(e,!1,e=>{r[n]=e,++s===i&&t(r.join("\x1e"))})})})(e,e=>{this.doWrite(e,()=>{this.writable=!0,this.emitReserved("drain")})})}uri(){let e=this.opts.secure?"https":"http",t=this.query||{};return!1!==this.opts.timestampRequests&&(t[this.opts.timestampParam]=i1()),this.supportsBinary||t.sid||(t.b64=1),this.createUri(e,t)}}let i8=!1;try{i8="undefined"!=typeof XMLHttpRequest&&"withCredentials"in new XMLHttpRequest}catch(e){}let i4=i8;function i5(){}class i9 extends i6{constructor(e){if(super(e),"undefined"!=typeof location){let t="https:"===location.protocol,i=location.port;i||(i=t?"443":"80"),this.xd="undefined"!=typeof location&&e.hostname!==location.hostname||i!==e.port}}doWrite(e,t){let i=this.request({method:"POST",data:e});i.on("success",t),i.on("error",(e,t)=>{this.onError("xhr post error",e,t)})}doPoll(){let e=this.request();e.on("data",this.onData.bind(this)),e.on("error",(e,t)=>{this.onError("xhr poll error",e,t)}),this.pollXhr=e}}class i7 extends iG{constructor(e,t,i){super(),this.createRequest=e,i0(this,i),this._opts=i,this._method=i.method||"GET",this._uri=t,this._data=void 0!==i.data?i.data:null,this._create()}_create(){var e;let t=iJ(this._opts,"agent","pfx","key","passphrase","cert","ca","ciphers","rejectUnauthorized","autoUnref");t.xdomain=!!this._opts.xd;let i=this._xhr=this.createRequest(t);try{i.open(this._method,this._uri,!0);try{if(this._opts.extraHeaders)for(let e in i.setDisableHeaderCheck&&i.setDisableHeaderCheck(!0),this._opts.extraHeaders)this._opts.extraHeaders.hasOwnProperty(e)&&i.setRequestHeader(e,this._opts.extraHeaders[e])}catch(e){}if("POST"===this._method)try{i.setRequestHeader("Content-type","text/plain;charset=UTF-8")}catch(e){}try{i.setRequestHeader("Accept","*/*")}catch(e){}null==(e=this._opts.cookieJar)||e.addCookies(i),"withCredentials"in i&&(i.withCredentials=this._opts.withCredentials),this._opts.requestTimeout&&(i.timeout=this._opts.requestTimeout),i.onreadystatechange=()=>{var e;3===i.readyState&&(null==(e=this._opts.cookieJar)||e.parseCookies(i.getResponseHeader("set-cookie"))),4===i.readyState&&(200===i.status||1223===i.status?this._onLoad():this.setTimeoutFn(()=>{this._onError("number"==typeof i.status?i.status:0)},0))},i.send(this._data)}catch(e){this.setTimeoutFn(()=>{this._onError(e)},0);return}"undefined"!=typeof document&&(this._index=i7.requestsCount++,i7.requests[this._index]=this)}_onError(e){this.emitReserved("error",e,this._xhr),this._cleanup(!0)}_cleanup(e){if(void 0!==this._xhr&&null!==this._xhr){if(this._xhr.onreadystatechange=i5,e)try{this._xhr.abort()}catch(e){}"undefined"!=typeof document&&delete i7.requests[this._index],this._xhr=null}}_onLoad(){let e=this._xhr.responseText;null!==e&&(this.emitReserved("data",e),this.emitReserved("success"),this._cleanup())}abort(){this._cleanup()}}function re(){for(let e in i7.requests)i7.requests.hasOwnProperty(e)&&i7.requests[e].abort()}i7.requestsCount=0,i7.requests={},"undefined"!=typeof document&&("function"==typeof attachEvent?attachEvent("onunload",re):"function"==typeof addEventListener&&addEventListener("onpagehide"in iY?"pagehide":"unload",re,!1));let rt=function(){let e=ri({xdomain:!1});return e&&null!==e.responseType}();function ri(e){let t=e.xdomain;try{if("undefined"!=typeof XMLHttpRequest&&(!t||i4))return new XMLHttpRequest}catch(e){}if(!t)try{return new iY[["Active"].concat("Object").join("X")]("Microsoft.XMLHTTP")}catch(e){}}let rr="undefined"!=typeof navigator&&"string"==typeof navigator.product&&"reactnative"===navigator.product.toLowerCase();class rs extends i3{get name(){return"websocket"}doOpen(){let e=this.uri(),t=this.opts.protocols,i=rr?{}:iJ(this.opts,"agent","perMessageDeflate","pfx","key","passphrase","cert","ca","ciphers","rejectUnauthorized","localAddress","protocolVersion","origin","maxPayload","family","checkServerIdentity");this.opts.extraHeaders&&(i.headers=this.opts.extraHeaders);try{this.ws=this.createSocket(e,t,i)}catch(e){return this.emitReserved("error",e)}this.ws.binaryType=this.socket.binaryType,this.addEventListeners()}addEventListeners(){this.ws.onopen=()=>{this.opts.autoUnref&&this.ws._socket.unref(),this.onOpen()},this.ws.onclose=e=>this.onClose({description:"websocket connection closed",context:e}),this.ws.onmessage=e=>this.onData(e.data),this.ws.onerror=e=>this.onError("websocket error",e)}write(e){this.writable=!1;for(let t=0;t{try{this.doWrite(i,e)}catch(e){}r&&iX(()=>{this.writable=!0,this.emitReserved("drain")},this.setTimeoutFn)})}}doClose(){void 0!==this.ws&&(this.ws.onerror=()=>{},this.ws.close(),this.ws=null)}uri(){let e=this.opts.secure?"wss":"ws",t=this.query||{};return this.opts.timestampRequests&&(t[this.opts.timestampParam]=i1()),this.supportsBinary||(t.b64=1),this.createUri(e,t)}}let rn=iY.WebSocket||iY.MozWebSocket,ra={websocket:class extends rs{createSocket(e,t,i){return rr?new rn(e,t,i):t?new rn(e,t):new rn(e)}doWrite(e,t){this.ws.send(t)}},webtransport:class extends i3{get name(){return"webtransport"}doOpen(){try{this._transport=new WebTransport(this.createUri("https"),this.opts.transportOptions[this.name])}catch(e){return this.emitReserved("error",e)}this._transport.closed.then(()=>{this.onClose()}).catch(e=>{this.onError("webtransport error",e)}),this._transport.ready.then(()=>{this._transport.createBidirectionalStream().then(e=>{let r=function(e,t){i||(i=new TextDecoder);let r=[],s=0,n=-1,a=!1;return new TransformStream({transform(o,c){for(r.push(o);;){if(0===s){if(1>iz(r))break;let e=iW(r,1);a=(128&e[0])==128,s=(n=127&e[0])<126?3:126===n?1:2}else if(1===s){if(2>iz(r))break;let e=iW(r,2);n=new DataView(e.buffer,e.byteOffset,e.length).getUint16(0),s=3}else if(2===s){if(8>iz(r))break;let e=iW(r,8),t=new DataView(e.buffer,e.byteOffset,e.length),i=t.getUint32(0);if(i>2097151){c.enqueue(iR);break}n=0x100000000*i+t.getUint32(4),s=3}else{if(iz(r)e){c.enqueue(iR);break}}}})}(Number.MAX_SAFE_INTEGER,this.socket.binaryType),s=e.readable.pipeThrough(r).getReader(),n=new TransformStream({transform(e,i){var r;r=t=>{let r,s=t.length;if(s<126)new DataView((r=new Uint8Array(1)).buffer).setUint8(0,s);else if(s<65536){let e=new DataView((r=new Uint8Array(3)).buffer);e.setUint8(0,126),e.setUint16(1,s)}else{let e=new DataView((r=new Uint8Array(9)).buffer);e.setUint8(0,127),e.setBigUint64(1,BigInt(s))}e.data&&"string"!=typeof e.data&&(r[0]|=128),i.enqueue(r),i.enqueue(t)},iB&&e.data instanceof Blob?e.data.arrayBuffer().then(iV).then(r):iL&&(e.data instanceof ArrayBuffer||iD(e.data))?r(iV(e.data)):iM(e,!1,e=>{t||(t=new TextEncoder),r(t.encode(e))})}});n.readable.pipeTo(e.writable),this._writer=n.writable.getWriter();let a=()=>{s.read().then(({done:e,value:t})=>{e||(this.onPacket(t),a())}).catch(e=>{})};a();let o={type:"open"};this.query.sid&&(o.data=`{"sid":"${this.query.sid}"}`),this._writer.write(o).then(()=>this.onOpen())})})}write(e){this.writable=!1;for(let t=0;t{r&&iX(()=>{this.writable=!0,this.emitReserved("drain")},this.setTimeoutFn)})}}doClose(){var e;null==(e=this._transport)||e.close()}},polling:class extends i9{constructor(e){super(e);let t=e&&e.forceBase64;this.supportsBinary=rt&&!t}request(e={}){return Object.assign(e,{xd:this.xd},this.opts),new i7(ri,this.uri(),e)}}},ro=/^(?:(?![^:@\/?#]+:[^:@\/]*@)(http|https|ws|wss):\/\/)?((?:(([^:@\/?#]*)(?::([^:@\/?#]*))?)?@)?((?:[a-f0-9]{0,4}:){2,7}[a-f0-9]{0,4}|[^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/,rc=["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"];function rh(e){if(e.length>8e3)throw"URI too long";let t=e,i=e.indexOf("["),r=e.indexOf("]");-1!=i&&-1!=r&&(e=e.substring(0,i)+e.substring(i,r).replace(/:/g,";")+e.substring(r,e.length));let s=ro.exec(e||""),n={},a=14;for(;a--;)n[rc[a]]=s[a]||"";return -1!=i&&-1!=r&&(n.source=t,n.host=n.host.substring(1,n.host.length-1).replace(/;/g,":"),n.authority=n.authority.replace("[","").replace("]","").replace(/;/g,":"),n.ipv6uri=!0),n.pathNames=function(e,t){let i=t.replace(/\/{2,9}/g,"/").split("/");return("/"==t.slice(0,1)||0===t.length)&&i.splice(0,1),"/"==t.slice(-1)&&i.splice(i.length-1,1),i}(0,n.path),n.queryKey=function(e,t){let i={};return t.replace(/(?:^|&)([^&=]*)=?([^&]*)/g,function(e,t,r){t&&(i[t]=r)}),i}(0,n.query),n}let rl="function"==typeof addEventListener&&"function"==typeof removeEventListener,rd=[];rl&&addEventListener("offline",()=>{rd.forEach(e=>e())},!1);class rf extends iG{constructor(e,t){if(super(),this.binaryType="arraybuffer",this.writeBuffer=[],this._prevBufferLen=0,this._pingInterval=-1,this._pingTimeout=-1,this._maxPayload=-1,this._pingTimeoutTime=1/0,e&&"object"==typeof e&&(t=e,e=null),e){let i=rh(e);t.hostname=i.host,t.secure="https"===i.protocol||"wss"===i.protocol,t.port=i.port,i.query&&(t.query=i.query)}else t.host&&(t.hostname=rh(t.host).host);i0(this,t),this.secure=null!=t.secure?t.secure:"undefined"!=typeof location&&"https:"===location.protocol,t.hostname&&!t.port&&(t.port=this.secure?"443":"80"),this.hostname=t.hostname||("undefined"!=typeof location?location.hostname:"localhost"),this.port=t.port||("undefined"!=typeof location&&location.port?location.port:this.secure?"443":"80"),this.transports=[],this._transportsByName={},t.transports.forEach(e=>{let t=e.prototype.name;this.transports.push(t),this._transportsByName[t]=e}),this.opts=Object.assign({path:"/engine.io",agent:!1,withCredentials:!1,upgrade:!0,timestampParam:"t",rememberUpgrade:!1,addTrailingSlash:!0,rejectUnauthorized:!0,perMessageDeflate:{threshold:1024},transportOptions:{},closeOnBeforeunload:!1},t),this.opts.path=this.opts.path.replace(/\/$/,"")+(this.opts.addTrailingSlash?"/":""),"string"==typeof this.opts.query&&(this.opts.query=function(e){let t={},i=e.split("&");for(let e=0,r=i.length;e{this.transport&&(this.transport.removeAllListeners(),this.transport.close())},addEventListener("beforeunload",this._beforeunloadEventListener,!1)),"localhost"!==this.hostname&&(this._offlineEventListener=()=>{this._onClose("transport close",{description:"network connection lost"})},rd.push(this._offlineEventListener))),this.opts.withCredentials&&(this._cookieJar=void 0),this._open()}createTransport(e){let t=Object.assign({},this.opts.query);t.EIO=4,t.transport=e,this.id&&(t.sid=this.id);let i=Object.assign({},this.opts,{query:t,socket:this,hostname:this.hostname,secure:this.secure,port:this.port},this.opts.transportOptions[e]);return new this._transportsByName[e](i)}_open(){if(0===this.transports.length)return void this.setTimeoutFn(()=>{this.emitReserved("error","No transports available")},0);let e=this.opts.rememberUpgrade&&rf.priorWebsocketSuccess&&-1!==this.transports.indexOf("websocket")?"websocket":this.transports[0];this.readyState="opening";let t=this.createTransport(e);t.open(),this.setTransport(t)}setTransport(e){this.transport&&this.transport.removeAllListeners(),this.transport=e,e.on("drain",this._onDrain.bind(this)).on("packet",this._onPacket.bind(this)).on("error",this._onError.bind(this)).on("close",e=>this._onClose("transport close",e))}onOpen(){this.readyState="open",rf.priorWebsocketSuccess="websocket"===this.transport.name,this.emitReserved("open"),this.flush()}_onPacket(e){if("opening"===this.readyState||"open"===this.readyState||"closing"===this.readyState)switch(this.emitReserved("packet",e),this.emitReserved("heartbeat"),e.type){case"open":this.onHandshake(JSON.parse(e.data));break;case"ping":this._sendPacket("pong"),this.emitReserved("ping"),this.emitReserved("pong"),this._resetPingTimeout();break;case"error":let t=Error("server error");t.code=e.data,this._onError(t);break;case"message":this.emitReserved("data",e.data),this.emitReserved("message",e.data)}}onHandshake(e){this.emitReserved("handshake",e),this.id=e.sid,this.transport.query.sid=e.sid,this._pingInterval=e.pingInterval,this._pingTimeout=e.pingTimeout,this._maxPayload=e.maxPayload,this.onOpen(),"closed"!==this.readyState&&this._resetPingTimeout()}_resetPingTimeout(){this.clearTimeoutFn(this._pingTimeoutTimer);let e=this._pingInterval+this._pingTimeout;this._pingTimeoutTime=Date.now()+e,this._pingTimeoutTimer=this.setTimeoutFn(()=>{this._onClose("ping timeout")},e),this.opts.autoUnref&&this._pingTimeoutTimer.unref()}_onDrain(){this.writeBuffer.splice(0,this._prevBufferLen),this._prevBufferLen=0,0===this.writeBuffer.length?this.emitReserved("drain"):this.flush()}flush(){if("closed"!==this.readyState&&this.transport.writable&&!this.upgrading&&this.writeBuffer.length){let e=this._getWritablePackets();this.transport.send(e),this._prevBufferLen=e.length,this.emitReserved("flush")}}_getWritablePackets(){if(!(this._maxPayload&&"polling"===this.transport.name&&this.writeBuffer.length>1))return this.writeBuffer;let e=1;for(let t=0;t=57344?i+=3:(r++,i+=4);return i}(i):Math.ceil(1.33*(i.byteLength||i.size))),t>0&&e>this._maxPayload)return this.writeBuffer.slice(0,t);e+=2}return this.writeBuffer}_hasPingExpired(){if(!this._pingTimeoutTime)return!0;let e=Date.now()>this._pingTimeoutTime;return e&&(this._pingTimeoutTime=0,iX(()=>{this._onClose("ping timeout")},this.setTimeoutFn)),e}write(e,t,i){return this._sendPacket("message",e,t,i),this}send(e,t,i){return this._sendPacket("message",e,t,i),this}_sendPacket(e,t,i,r){if("function"==typeof t&&(r=t,t=void 0),"function"==typeof i&&(r=i,i=null),"closing"===this.readyState||"closed"===this.readyState)return;(i=i||{}).compress=!1!==i.compress;let s={type:e,data:t,options:i};this.emitReserved("packetCreate",s),this.writeBuffer.push(s),r&&this.once("flush",r),this.flush()}close(){let e=()=>{this._onClose("forced close"),this.transport.close()},t=()=>{this.off("upgrade",t),this.off("upgradeError",t),e()},i=()=>{this.once("upgrade",t),this.once("upgradeError",t)};return("opening"===this.readyState||"open"===this.readyState)&&(this.readyState="closing",this.writeBuffer.length?this.once("drain",()=>{this.upgrading?i():e()}):this.upgrading?i():e()),this}_onError(e){if(rf.priorWebsocketSuccess=!1,this.opts.tryAllTransports&&this.transports.length>1&&"opening"===this.readyState)return this.transports.shift(),this._open();this.emitReserved("error",e),this._onClose("transport error",e)}_onClose(e,t){if("opening"===this.readyState||"open"===this.readyState||"closing"===this.readyState){if(this.clearTimeoutFn(this._pingTimeoutTimer),this.transport.removeAllListeners("close"),this.transport.close(),this.transport.removeAllListeners(),rl&&(this._beforeunloadEventListener&&removeEventListener("beforeunload",this._beforeunloadEventListener,!1),this._offlineEventListener)){let e=rd.indexOf(this._offlineEventListener);-1!==e&&rd.splice(e,1)}this.readyState="closed",this.id=null,this.emitReserved("close",e,t),this.writeBuffer=[],this._prevBufferLen=0}}}rf.protocol=4;class ru extends rf{constructor(){super(...arguments),this._upgrades=[]}onOpen(){if(super.onOpen(),"open"===this.readyState&&this.opts.upgrade)for(let e=0;e{i||(t.send([{type:"ping",data:"probe"}]),t.once("packet",e=>{if(!i)if("pong"===e.type&&"probe"===e.data){if(this.upgrading=!0,this.emitReserved("upgrading",t),!t)return;rf.priorWebsocketSuccess="websocket"===t.name,this.transport.pause(()=>{i||"closed"!==this.readyState&&(h(),this.setTransport(t),t.send([{type:"upgrade"}]),this.emitReserved("upgrade",t),t=null,this.upgrading=!1,this.flush())})}else{let e=Error("probe error");e.transport=t.name,this.emitReserved("upgradeError",e)}}))};function s(){i||(i=!0,h(),t.close(),t=null)}let n=e=>{let i=Error("probe error: "+e);i.transport=t.name,s(),this.emitReserved("upgradeError",i)};function a(){n("transport closed")}function o(){n("socket closed")}function c(e){t&&e.name!==t.name&&s()}let h=()=>{t.removeListener("open",r),t.removeListener("error",n),t.removeListener("close",a),this.off("close",o),this.off("upgrading",c)};t.once("open",r),t.once("error",n),t.once("close",a),this.once("close",o),this.once("upgrading",c),-1!==this._upgrades.indexOf("webtransport")&&"webtransport"!==e?this.setTimeoutFn(()=>{i||t.open()},200):t.open()}onHandshake(e){this._upgrades=this._filterUpgrades(e.upgrades),super.onHandshake(e)}_filterUpgrades(e){let t=[];for(let i=0;ira[e]).filter(e=>!!e)),super(e,i)}}rp.protocol;let rb="function"==typeof ArrayBuffer,rg=Object.prototype.toString,ry="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===rg.call(Blob),rm="function"==typeof File||"undefined"!=typeof File&&"[object FileConstructor]"===rg.call(File);function rw(e){return rb&&(e instanceof ArrayBuffer||("function"==typeof ArrayBuffer.isView?ArrayBuffer.isView(e):e.buffer instanceof ArrayBuffer))||ry&&e instanceof Blob||rm&&e instanceof File}let rI=["connect","connect_error","disconnect","disconnecting","newListener","removeListener"],rk=5;!function(e){e[e.CONNECT=0]="CONNECT",e[e.DISCONNECT=1]="DISCONNECT",e[e.EVENT=2]="EVENT",e[e.ACK=3]="ACK",e[e.CONNECT_ERROR=4]="CONNECT_ERROR",e[e.BINARY_EVENT=5]="BINARY_EVENT",e[e.BINARY_ACK=6]="BINARY_ACK"}(o||(o={}));class rv{constructor(e){this.replacer=e}encode(e){return(e.type===o.EVENT||e.type===o.ACK)&&function e(t,i){if(!t||"object"!=typeof t)return!1;if(Array.isArray(t)){for(let i=0,r=t.length;i=0&&t.num{delete this.acks[e];for(let t=0;t{this.io.clearTimeoutFn(s),t.apply(this,e)};n.withError=!0,this.acks[e]=n}emitWithAck(e,...t){return new Promise((i,r)=>{let s=(e,t)=>e?r(e):i(t);s.withError=!0,t.push(s),this.emit(e,...t)})}_addToQueue(e){let t;"function"==typeof e[e.length-1]&&(t=e.pop());let i={id:this._queueSeq++,tryCount:0,pending:!1,args:e,flags:Object.assign({fromQueue:!0},this.flags)};e.push((e,...r)=>{if(i===this._queue[0])return null!==e?i.tryCount>this._opts.retries&&(this._queue.shift(),t&&t(e)):(this._queue.shift(),t&&t(null,...r)),i.pending=!1,this._drainQueue()}),this._queue.push(i),this._drainQueue()}_drainQueue(e=!1){if(!this.connected||0===this._queue.length)return;let t=this._queue[0];(!t.pending||e)&&(t.pending=!0,t.tryCount++,this.flags=t.flags,this.emit.apply(this,t.args))}packet(e){e.nsp=this.nsp,this.io._packet(e)}onopen(){"function"==typeof this.auth?this.auth(e=>{this._sendConnectPacket(e)}):this._sendConnectPacket(this.auth)}_sendConnectPacket(e){this.packet({type:o.CONNECT,data:this._pid?Object.assign({pid:this._pid,offset:this._lastOffset},e):e})}onerror(e){this.connected||this.emitReserved("connect_error",e)}onclose(e,t){this.connected=!1,delete this.id,this.emitReserved("disconnect",e,t),this._clearAcks()}_clearAcks(){Object.keys(this.acks).forEach(e=>{if(!this.sendBuffer.some(t=>String(t.id)===e)){let t=this.acks[e];delete this.acks[e],t.withError&&t.call(this,Error("socket has been disconnected"))}})}onpacket(e){if(e.nsp===this.nsp)switch(e.type){case o.CONNECT:e.data&&e.data.sid?this.onconnect(e.data.sid,e.data.pid):this.emitReserved("connect_error",Error("It seems you are trying to reach a Socket.IO server in v2.x with a v3.x client, but they are not compatible (more information here: https://socket.io/docs/v3/migrating-from-2-x-to-3-0/)"));break;case o.EVENT:case o.BINARY_EVENT:this.onevent(e);break;case o.ACK:case o.BINARY_ACK:this.onack(e);break;case o.DISCONNECT:this.ondisconnect();break;case o.CONNECT_ERROR:this.destroy();let t=Error(e.data.message);t.data=e.data.data,this.emitReserved("connect_error",t)}}onevent(e){let t=e.data||[];null!=e.id&&t.push(this.ack(e.id)),this.connected?this.emitEvent(t):this.receiveBuffer.push(Object.freeze(t))}emitEvent(e){if(this._anyListeners&&this._anyListeners.length)for(let t of this._anyListeners.slice())t.apply(this,e);super.emit.apply(this,e),this._pid&&e.length&&"string"==typeof e[e.length-1]&&(this._lastOffset=e[e.length-1])}ack(e){let t=this,i=!1;return function(...r){i||(i=!0,t.packet({type:o.ACK,id:e,data:r}))}}onack(e){let t=this.acks[e.id];"function"==typeof t&&(delete this.acks[e.id],t.withError&&e.data.unshift(null),t.apply(this,e.data))}onconnect(e,t){this.id=e,this.recovered=t&&this._pid===t,this._pid=t,this.connected=!0,this.emitBuffered(),this.emitReserved("connect"),this._drainQueue(!0)}emitBuffered(){this.receiveBuffer.forEach(e=>this.emitEvent(e)),this.receiveBuffer=[],this.sendBuffer.forEach(e=>{this.notifyOutgoingListeners(e),this.packet(e)}),this.sendBuffer=[]}ondisconnect(){this.destroy(),this.onclose("io server disconnect")}destroy(){this.subs&&(this.subs.forEach(e=>e()),this.subs=void 0),this.io._destroy(this)}disconnect(){return this.connected&&this.packet({type:o.DISCONNECT}),this.destroy(),this.connected&&this.onclose("io client disconnect"),this}close(){return this.disconnect()}compress(e){return this.flags.compress=e,this}get volatile(){return this.flags.volatile=!0,this}timeout(e){return this.flags.timeout=e,this}onAny(e){return this._anyListeners=this._anyListeners||[],this._anyListeners.push(e),this}prependAny(e){return this._anyListeners=this._anyListeners||[],this._anyListeners.unshift(e),this}offAny(e){if(!this._anyListeners)return this;if(e){let t=this._anyListeners;for(let i=0;i0&&e.jitter<=1?e.jitter:0,this.attempts=0}rN.prototype.duration=function(){var e=this.ms*Math.pow(this.factor,this.attempts++);if(this.jitter){var t=Math.random(),i=Math.floor(t*this.jitter*e);e=(1&Math.floor(10*t))==0?e-i:e+i}return 0|Math.min(e,this.max)},rN.prototype.reset=function(){this.attempts=0},rN.prototype.setMin=function(e){this.ms=e},rN.prototype.setMax=function(e){this.max=e},rN.prototype.setJitter=function(e){this.jitter=e};class rA extends iG{constructor(e,t){var i;super(),this.nsps={},this.subs=[],e&&"object"==typeof e&&(t=e,e=void 0),(t=t||{}).path=t.path||"/socket.io",this.opts=t,i0(this,t),this.reconnection(!1!==t.reconnection),this.reconnectionAttempts(t.reconnectionAttempts||1/0),this.reconnectionDelay(t.reconnectionDelay||1e3),this.reconnectionDelayMax(t.reconnectionDelayMax||5e3),this.randomizationFactor(null!=(i=t.randomizationFactor)?i:.5),this.backoff=new rN({min:this.reconnectionDelay(),max:this.reconnectionDelayMax(),jitter:this.randomizationFactor()}),this.timeout(null==t.timeout?2e4:t.timeout),this._readyState="closed",this.uri=e;let r=t.parser||h;this.encoder=new r.Encoder,this.decoder=new r.Decoder,this._autoConnect=!1!==t.autoConnect,this._autoConnect&&this.open()}reconnection(e){return arguments.length?(this._reconnection=!!e,e||(this.skipReconnect=!0),this):this._reconnection}reconnectionAttempts(e){return void 0===e?this._reconnectionAttempts:(this._reconnectionAttempts=e,this)}reconnectionDelay(e){var t;return void 0===e?this._reconnectionDelay:(this._reconnectionDelay=e,null==(t=this.backoff)||t.setMin(e),this)}randomizationFactor(e){var t;return void 0===e?this._randomizationFactor:(this._randomizationFactor=e,null==(t=this.backoff)||t.setJitter(e),this)}reconnectionDelayMax(e){var t;return void 0===e?this._reconnectionDelayMax:(this._reconnectionDelayMax=e,null==(t=this.backoff)||t.setMax(e),this)}timeout(e){return arguments.length?(this._timeout=e,this):this._timeout}maybeReconnectOnOpen(){!this._reconnecting&&this._reconnection&&0===this.backoff.attempts&&this.reconnect()}open(e){if(~this._readyState.indexOf("open"))return this;this.engine=new rp(this.uri,this.opts);let t=this.engine,i=this;this._readyState="opening",this.skipReconnect=!1;let r=rx(t,"open",function(){i.onopen(),e&&e()}),s=t=>{this.cleanup(),this._readyState="closed",this.emitReserved("error",t),e?e(t):this.maybeReconnectOnOpen()},n=rx(t,"error",s);if(!1!==this._timeout){let e=this._timeout,i=this.setTimeoutFn(()=>{r(),s(Error("timeout")),t.close()},e);this.opts.autoUnref&&i.unref(),this.subs.push(()=>{this.clearTimeoutFn(i)})}return this.subs.push(r),this.subs.push(n),this}connect(e){return this.open(e)}onopen(){this.cleanup(),this._readyState="open",this.emitReserved("open");let e=this.engine;this.subs.push(rx(e,"ping",this.onping.bind(this)),rx(e,"data",this.ondata.bind(this)),rx(e,"error",this.onerror.bind(this)),rx(e,"close",this.onclose.bind(this)),rx(this.decoder,"decoded",this.ondecoded.bind(this)))}onping(){this.emitReserved("ping")}ondata(e){try{this.decoder.add(e)}catch(e){this.onclose("parse error",e)}}ondecoded(e){iX(()=>{this.emitReserved("packet",e)},this.setTimeoutFn)}onerror(e){this.emitReserved("error",e)}socket(e,t){let i=this.nsps[e];return i?this._autoConnect&&!i.active&&i.connect():(i=new rP(this,e,t),this.nsps[e]=i),i}_destroy(e){for(let e of Object.keys(this.nsps))if(this.nsps[e].active)return;this._close()}_packet(e){let t=this.encoder.encode(e);for(let i=0;ie()),this.subs.length=0,this.decoder.destroy()}_close(){this.skipReconnect=!0,this._reconnecting=!1,this.onclose("forced close")}disconnect(){return this._close()}onclose(e,t){var i;this.cleanup(),null==(i=this.engine)||i.close(),this.backoff.reset(),this._readyState="closed",this.emitReserved("close",e,t),this._reconnection&&!this.skipReconnect&&this.reconnect()}reconnect(){if(this._reconnecting||this.skipReconnect)return this;let e=this;if(this.backoff.attempts>=this._reconnectionAttempts)this.backoff.reset(),this.emitReserved("reconnect_failed"),this._reconnecting=!1;else{let t=this.backoff.duration();this._reconnecting=!0;let i=this.setTimeoutFn(()=>{!e.skipReconnect&&(this.emitReserved("reconnect_attempt",e.backoff.attempts),e.skipReconnect||e.open(t=>{t?(e._reconnecting=!1,e.reconnect(),this.emitReserved("reconnect_error",t)):e.onreconnect()}))},t);this.opts.autoUnref&&i.unref(),this.subs.push(()=>{this.clearTimeoutFn(i)})}}onreconnect(){let e=this.backoff.attempts;this._reconnecting=!1,this.backoff.reset(),this.emitReserved("reconnect",e)}}let rT={};function rC(e,t){let i;"object"==typeof e&&(t=e,e=void 0);let r=function(e,t="",i){let r=e;i=i||"undefined"!=typeof location&&location,null==e&&(e=i.protocol+"//"+i.host),"string"==typeof e&&("/"===e.charAt(0)&&(e="/"===e.charAt(1)?i.protocol+e:i.host+e),/^(https?|wss?):\/\//.test(e)||(e=void 0!==i?i.protocol+"//"+e:"https://"+e),r=rh(e)),!r.port&&(/^(http|ws)$/.test(r.protocol)?r.port="80":/^(http|ws)s$/.test(r.protocol)&&(r.port="443")),r.path=r.path||"/";let s=-1!==r.host.indexOf(":")?"["+r.host+"]":r.host;return r.id=r.protocol+"://"+s+":"+r.port+t,r.href=r.protocol+"://"+s+(i&&i.port===r.port?"":":"+r.port),r}(e,(t=t||{}).path||"/socket.io"),s=r.source,n=r.id,a=r.path,o=rT[n]&&a in rT[n].nsps;return t.forceNew||t["force new connection"]||!1===t.multiplex||o?i=new rA(s,t):(rT[n]||(rT[n]=new rA(s,t)),i=rT[n]),r.query&&!t.query&&(t.query=r.queryKey),i.socket(r.path,t)}Object.assign(rC,{Manager:rA,Socket:rP,io:rC,connect:rC});class rR{socket;onDataCallback;constructor(e){this.socket=e,this.socket.on("authMessage",async e=>{this.onDataCallback&&await this.onDataCallback(e)})}async send(e){this.socket.emit("authMessage",e)}async onData(e){this.onDataCallback=e}}class rB{ioSocket;peer;connected=!1;id="";serverIdentityKey;eventCallbacks=new Map;constructor(e,t){this.ioSocket=e,this.peer=t,this.ioSocket.on("connect",()=>{this.connected=!0,this.id=this.ioSocket.id||"",this.fireEventCallbacks("connect")}),this.ioSocket.on("disconnect",e=>{this.connected=!1,this.fireEventCallbacks("disconnect",e)}),this.peer.listenForGeneralMessages((e,t)=>{this.serverIdentityKey=e;let{eventName:i,data:r}=this.decodeEventPayload(t);this.fireEventCallbacks(i,r)})}on(e,t){let i=this.eventCallbacks.get(e);return i||(i=[],this.eventCallbacks.set(e,i)),i.push(t),this}emit(e,t){let i=this.encodeEventPayload(e,t);return this.peer.toPeer(i,this.serverIdentityKey).catch(t=>{console.error(`BRC103IoClientSocket emit error for event "${e}":`,t)}),this}disconnect(){this.serverIdentityKey=void 0,this.ioSocket.disconnect()}fireEventCallbacks(e,t){let i=this.eventCallbacks.get(e);if(i)for(let e of i)e(t)}encodeEventPayload(e,t){return eO(JSON.stringify({eventName:e,data:t}),"utf8")}decodeEventPayload(e){try{let t=eA(e);return JSON.parse(t)}catch{return{eventName:"_unknown",data:void 0}}}}let rL=!1;function rD(...e){rL&&console.log(...e)}function rM(...e){rL&&console.warn(...e)}function rF(...e){console.error(...e)}class rV{host;authFetch;walletClient;socket;myIdentityKey;joinedRooms=new Set;lookupResolver;networkPreset;initialized=!1;constructor(e={}){let{host:t,walletClient:i,enableLogging:r=!1,networkPreset:s="mainnet",originator:n}=e,a="testnet"===this.networkPreset?"https://staging-messagebox.babbage.systems":"https://messagebox.babbage.systems";this.host=t?.trim()??a,this.walletClient=i??new id("auto",n),this.authFetch=new iS(this.walletClient),this.networkPreset=s,this.lookupResolver=new iP({networkPreset:s}),r&&(rL=!0)}async init(e=this.host,t){let i=e?.trim();if(""===i)throw Error("Cannot anoint host: No valid host provided");if(i!==this.host&&(this.initialized=!1,this.host=i),this.initialized)return;let r=await this.getIdentityKey(t),[s]=await this.queryAdvertisements(r,i,t);if(null==s||s?.host?.trim()===""||s?.host!==i){rD("[MB CLIENT] Anointing host:",i);let{txid:e}=await this.anointHost(i,t);if(null==e||""===e.trim())throw Error("Failed to anoint host: No transaction ID returned")}this.initialized=!0}async assertInitialized(){this.initialized&&null!=this.host&&""!==this.host.trim()||await this.init()}getJoinedRooms(){return this.joinedRooms}async getIdentityKey(e){if(null!=this.myIdentityKey&&""!==this.myIdentityKey.trim())return this.myIdentityKey;rD("[MB CLIENT] Fetching identity key...");try{let t=await this.walletClient.getPublicKey({identityKey:!0},e);return this.myIdentityKey=t.publicKey,rD(`[MB CLIENT] Identity key fetched: ${this.myIdentityKey}`),this.myIdentityKey}catch(e){throw rF("[MB CLIENT ERROR] Failed to fetch identity key:",e),Error("Identity key retrieval failed")}}get testSocket(){return this.socket}async initializeConnection(e){if(await this.assertInitialized(),rD("[MB CLIENT] initializeConnection() STARTED"),(null==this.myIdentityKey||""===this.myIdentityKey.trim())&&await this.getIdentityKey(e),null==this.myIdentityKey||""===this.myIdentityKey.trim())throw rF("[MB CLIENT ERROR] Identity key is still missing after retrieval!"),Error("Identity key is missing");if(rD("[MB CLIENT] Setting up WebSocket connection..."),null==this.socket){if("string"!=typeof this.host||""===this.host.trim())throw Error("Cannot initialize WebSocket: Host is not set");this.socket=function(e,t){let i=rC(e,t.managerOptions),r=new rR(i);return new rB(i,new iI(t.wallet,r,t.requestedCertificates,t.sessionManager))}(this.host,{wallet:this.walletClient});let e=!1,t=!1;this.socket.on("connect",()=>{rD("[MB CLIENT] Connected to WebSocket."),e||(rD("[MB CLIENT] Sending authentication data:",this.myIdentityKey),null==this.myIdentityKey||""===this.myIdentityKey.trim()?rF("[MB CLIENT ERROR] Cannot send authentication: Identity key is missing!"):(this.socket?.emit("authenticated",{identityKey:this.myIdentityKey}),e=!0))}),this.socket.on("authenticationSuccess",e=>{rD(`[MB CLIENT] WebSocket authentication successful: ${JSON.stringify(e)}`),t=!0}),this.socket.on("authenticationFailed",e=>{rF(`[MB CLIENT ERROR] WebSocket authentication failed: ${JSON.stringify(e)}`),t=!1}),this.socket.on("disconnect",()=>{rD("[MB CLIENT] Disconnected from MessageBox server"),this.socket=void 0,e=!1,t=!1}),this.socket.on("error",e=>{rF("[MB CLIENT ERROR] WebSocket error:",e)}),await new Promise((e,i)=>{setTimeout(()=>{t?(rD("[MB CLIENT] WebSocket fully authenticated and ready!"),e()):i(Error("[MB CLIENT ERROR] WebSocket authentication timed out!"))},5e3)})}}async resolveHostForRecipient(e,t){let i=await this.queryAdvertisements(e,void 0,t);return 0===i.length?(rM(`[MB CLIENT] No advertisements for ${e}, using default host ${this.host}`),this.host):i[0].host}async queryAdvertisements(e,t,i){let r=[];try{let s={identityKey:e??await this.getIdentityKey(i)};null!=t&&""!==t.trim()&&(s.host=t);let n=await this.lookupResolver.query({service:"ls_messagebox",query:s});if("output-list"!==n.type)throw Error(`Unexpected result type: ${String(n.type)}`);for(let e of n.outputs)try{let t=t4.fromBEEF(e.beef),i=t.outputs[e.outputIndex].lockingScript,[,s]=tz.decode(i).fields;if(null==s||0===s.length)throw Error("Empty host field");r.push({host:eA(s),txid:t.id("hex"),outputIndex:e.outputIndex,lockingScript:i,beef:e.beef})}catch{}}catch(e){rF("[MB CLIENT ERROR] _queryAdvertisements failed:",e)}return r}async joinRoom(e){if(await this.assertInitialized(),rD(`[MB CLIENT] Attempting to join WebSocket room: ${e}`),null==this.socket&&(rD("[MB CLIENT] No WebSocket connection. Initializing..."),await this.initializeConnection()),null==this.myIdentityKey||""===this.myIdentityKey.trim())throw Error("[MB CLIENT ERROR] Identity key is not defined");let t=`${this.myIdentityKey??""}-${e}`;if(this.joinedRooms.has(t))return void rD(`[MB CLIENT] Already joined WebSocket room: ${t}`);try{rD(`[MB CLIENT] Joining WebSocket room: ${t}`),await this.socket?.emit("joinRoom",t),this.joinedRooms.add(t),rD(`[MB CLIENT] Successfully joined room: ${t}`)}catch(e){rF(`[MB CLIENT ERROR] Failed to join WebSocket room: ${t}`,e)}}async listenForLiveMessages({onMessage:e,messageBox:t,originator:i}){if(await this.assertInitialized(),rD(`[MB CLIENT] Setting up listener for WebSocket room: ${t}`),await this.joinRoom(t),null==this.myIdentityKey||""===this.myIdentityKey.trim())throw Error("[MB CLIENT ERROR] Identity key is missing. Cannot construct room ID.");let r=`${this.myIdentityKey}-${t}`;rD(`[MB CLIENT] Listening for messages in room: ${r}`),this.socket?.on(`sendMessage-${r}`,t=>{(async()=>{rD(`[MB CLIENT] Received message in room ${r}:`,t);try{let e=t.body;if("string"==typeof e)try{e=JSON.parse(e)}catch{}if(null!=e&&"object"==typeof e&&"string"==typeof e.encryptedMessage){rD(`[MB CLIENT] Decrypting message from ${String(t.sender)}...`);let r=await this.walletClient.decrypt({protocolID:[1,"messagebox"],keyID:"1",counterparty:t.sender,ciphertext:eO(e.encryptedMessage,"base64")},i);t.body=eA(r.plaintext)}else rD("[MB CLIENT] Message is not encrypted."),t.body="string"==typeof e?e:(()=>{try{return JSON.stringify(e)}catch{return"[Error: Unstringifiable message]"}})()}catch(e){rF("[MB CLIENT ERROR] Failed to parse or decrypt live message:",e),t.body="[Error: Failed to decrypt or parse message]"}e(t)})()})}async sendLiveMessage({recipient:e,messageBox:t,body:i,messageId:r,skipEncryption:s,checkPermissions:n,originator:a}){let o,c;if(await this.assertInitialized(),null==e||""===e.trim())throw Error("[MB CLIENT ERROR] Recipient identity key is required");if(null==t||""===t.trim())throw Error("[MB CLIENT ERROR] MessageBox is required");if(null==i||"string"==typeof i&&""===i.trim())throw Error("[MB CLIENT ERROR] Message body cannot be empty");if(await this.joinRoom(t),null==this.socket||!this.socket.connected){rM("[MB CLIENT WARNING] WebSocket not connected, falling back to HTTP");let r=await this.resolveHostForRecipient(e);return await this.sendMessage({recipient:e,messageBox:t,body:i},r)}try{let t=await this.walletClient.createHmac({data:Array.from(new TextEncoder().encode(JSON.stringify(i))),protocolID:[1,"messagebox"],keyID:"1",counterparty:e},a);o=r??Array.from(t.hmac).map(e=>e.toString(16).padStart(2,"0")).join("")}catch(e){throw rF("[MB CLIENT ERROR] Failed to generate HMAC:",e),Error("Failed to generate message identifier.")}let h=`${e}-${t}`;return rD(`[MB CLIENT] Sending WebSocket message to room: ${h}`),c=!0===s?"string"==typeof i?i:JSON.stringify(i):JSON.stringify({encryptedMessage:eC((await this.walletClient.encrypt({protocolID:[1,"messagebox"],keyID:"1",counterparty:e,plaintext:eO("string"==typeof i?i:JSON.stringify(i),"utf8")},a)).ciphertext)}),await new Promise((r,a)=>{let l=`sendMessageAck-${h}`,d=!1,f=c=>{if(d)return;d=!0;let h=this.socket;if("function"==typeof h?.off&&h.off(l,f),rD("[MB CLIENT] Received WebSocket acknowledgment:",c),null==c||"success"!==c.status){rM("[MB CLIENT] WebSocket message failed or returned unexpected response. Falling back to HTTP.");let c={recipient:e,messageBox:t,body:i,messageId:o,skipEncryption:s,checkPermissions:n};this.resolveHostForRecipient(e).then(async e=>await this.sendMessage(c,e)).then(r).catch(a)}else rD("[MB CLIENT] Message sent successfully via WebSocket:",c),r(c)};this.socket?.on(l,f),this.socket?.emit("sendMessage",{roomId:h,message:{messageId:o,recipient:e,body:c}}),setTimeout(()=>{if(!d){d=!0;let c=this.socket;"function"==typeof c?.off&&c.off(l,f),rM("[CLIENT] WebSocket acknowledgment timed out, falling back to HTTP");let h={recipient:e,messageBox:t,body:i,messageId:o,skipEncryption:s,checkPermissions:n};this.resolveHostForRecipient(e).then(async e=>await this.sendMessage(h,e)).then(r).catch(a)}},1e4)})}async leaveRoom(e){if(await this.assertInitialized(),null==this.socket)return void rM("[MB CLIENT] Attempted to leave a room but WebSocket is not connected.");if(null==this.myIdentityKey||""===this.myIdentityKey.trim())throw Error("[MB CLIENT ERROR] Identity key is not defined");let t=`${this.myIdentityKey}-${e}`;rD(`[MB CLIENT] Leaving WebSocket room: ${t}`),this.socket.emit("leaveRoom",t),this.joinedRooms.delete(t)}async disconnectWebSocket(){await this.assertInitialized(),null!=this.socket?(rD("[MB CLIENT] Closing WebSocket connection..."),this.socket.disconnect(),this.socket=void 0):rD("[MB CLIENT] No active WebSocket connection to close.")}async sendMessage(e,t,i){let r,s,n;if(await this.assertInitialized(),null==e.recipient||""===e.recipient.trim())throw Error("You must provide a message recipient!");if(null==e.messageBox||""===e.messageBox.trim())throw Error("You must provide a messageBox to send this message into!");if(null==e.body||"string"==typeof e.body&&0===e.body.trim().length)throw Error("Every message must have a body!");if(!0===e.checkPermissions)try{rD("[MB CLIENT] Checking permissions and fees for message...");let i=await this.getMessageBoxQuote({recipient:e.recipient,messageBox:e.messageBox},t);if(-1===i.recipientFee)throw Error("You have been blocked from sending messages to this recipient.");if(i.recipientFee>0||i.deliveryFee>0){let s=i.recipientFee+i.deliveryFee;s>0&&(rD(`[MB CLIENT] Creating payment of ${s} sats for message...`),r=await this.createMessagePayment(e.recipient,i,t),rD("[MB CLIENT] Payment data prepared:",r))}}catch(e){throw Error(`Permission check failed: ${e instanceof Error?e.message:"Unknown error"}`)}try{let t=await this.walletClient.createHmac({data:Array.from(new TextEncoder().encode(JSON.stringify(e.body))),protocolID:[1,"messagebox"],keyID:"1",counterparty:e.recipient},i);s=e.messageId??Array.from(t.hmac).map(e=>e.toString(16).padStart(2,"0")).join("")}catch(e){throw rF("[MB CLIENT ERROR] Failed to generate HMAC:",e),Error("Failed to generate message identifier.")}n=!0===e.skipEncryption?"string"==typeof e.body?e.body:JSON.stringify(e.body):JSON.stringify({encryptedMessage:eC((await this.walletClient.encrypt({protocolID:[1,"messagebox"],keyID:"1",counterparty:e.recipient,plaintext:eO("string"==typeof e.body?e.body:JSON.stringify(e.body),"utf8")},i)).ciphertext)});let a={message:{...e,messageId:s,body:n},...null!=r&&{payment:r}};try{let r=t??await this.resolveHostForRecipient(e.recipient);if(rD("[MB CLIENT] Sending HTTP request to:",`${r}/sendMessage`),rD("[MB CLIENT] Request Body:",JSON.stringify(a,null,2)),null==this.myIdentityKey||""===this.myIdentityKey)try{let e=await this.walletClient.getPublicKey({identityKey:!0},i);this.myIdentityKey=e.publicKey,rD(`[MB CLIENT] Fetched identity key before sending request: ${this.myIdentityKey}`)}catch(e){throw rF("[MB CLIENT ERROR] Failed to fetch identity key:",e),Error("Identity key retrieval failed")}let n=await this.authFetch.fetch(`${r}/sendMessage`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(a)});if(n.bodyUsed)throw Error("[MB CLIENT ERROR] Response body has already been used!");let o=await n.json();if(rD("[MB CLIENT] Raw Response Body:",o),!n.ok)throw rF(`[MB CLIENT ERROR] Failed to send message. HTTP ${n.status}: ${n.statusText}`),Error(`Message sending failed: HTTP ${n.status} - ${n.statusText}`);if("success"!==o.status)throw rF(`[MB CLIENT ERROR] Server returned an error: ${String(o.description)}`),Error(o.description??"Unknown error from server.");return rD("[MB CLIENT] Message successfully sent."),{...o,messageId:s}}catch(t){rF("[MB CLIENT ERROR] Network or timeout error:",t);let e=t instanceof Error?t.message:"Unknown error";throw Error(`Failed to send message: ${e}`)}}async anointHost(e,t){rD("[MB CLIENT] Starting anointHost...");try{if(!e.startsWith("http"))throw Error("Invalid host URL");let i=await this.getIdentityKey(t);rD("[MB CLIENT] Fields - Identity:",i,"Host:",e);let r=[eO(i,"hex"),eO(e,"utf8")],s=new tz(this.walletClient,t);rD("Fields:",r.map(e=>ex(e))),rD("ProtocolID:",[1,"messagebox advertisement"]),rD("KeyID:","1"),rD("SignAs:","self"),rD("anyoneCanSpend:",!1),rD("forSelf:",!0);let n=await s.lock(r,[1,"messagebox advertisement"],"1","anyone",!0);rD("[MB CLIENT] PushDrop script:",n.toASM());let{tx:a,txid:o}=await this.walletClient.createAction({description:"Anoint host for overlay routing",outputs:[{basket:"overlay advertisements",lockingScript:n.toHex(),satoshis:1,outputDescription:"Overlay advertisement output"}],options:{randomizeOutputs:!1,acceptDelayedBroadcast:!1}},t);if(rD("[MB CLIENT] Transaction created:",o),void 0!==a){let e=new iA(["tm_messagebox"],{networkPreset:this.networkPreset}),t=await e.broadcast(t4.fromAtomicBEEF(a));if(rD("[MB CLIENT] Advertisement broadcast succeeded. TXID:",t.txid),"string"!=typeof t.txid)throw Error("Anoint failed: broadcast did not return a txid");return{txid:t.txid}}throw Error("Anoint failed: failed to create action!")}catch(e){throw rF("[MB CLIENT ERROR] anointHost threw:",e),e}}async revokeHostAdvertisement(e,t){rD("[MB CLIENT] Starting revokeHost...");let i=`${e.txid}.${e.outputIndex}`;try{let{signableTransaction:r}=await this.walletClient.createAction({description:"Revoke MessageBox host advertisement",inputBEEF:e.beef,inputs:[{outpoint:i,unlockingScriptLength:73,inputDescription:"Revoking host advertisement token"}]},t);if(void 0===r)throw Error("Failed to create signable transaction.");let s=t4.fromBEEF(r.tx),n=new tz(this.walletClient,t),a=await n.unlock([1,"messagebox advertisement"],"1","anyone","all",!1,e.outputIndex,e.lockingScript),o=await a.sign(s,e.outputIndex),{tx:c}=await this.walletClient.signAction({reference:r.reference,spends:{[e.outputIndex]:{unlockingScript:o.toHex()}},options:{acceptDelayedBroadcast:!1}},t);if(void 0===c)throw Error("Failed to finalize the transaction signature.");let h=new iA(["tm_messagebox"],{networkPreset:this.networkPreset}),l=await h.broadcast(t4.fromAtomicBEEF(c));if(rD("[MB CLIENT] Revocation broadcast succeeded. TXID:",l.txid),"string"!=typeof l.txid)throw Error("Revoke failed: broadcast did not return a txid");return{txid:l.txid}}catch(e){throw rF("[MB CLIENT ERROR] revokeHost threw:",e),e}}async listMessages({messageBox:e,host:t,originator:i,acceptPayments:r}){if("boolean"!=typeof r&&(r=!0),"string"!=typeof t&&await this.assertInitialized(),""===e.trim())throw Error("MessageBox cannot be empty");let s=null!=t?[t]:[];if(0===s.length){let e=await this.queryAdvertisements(await this.getIdentityKey(i),i);s=Array.from(new Set([this.host,...e.map(e=>e.host)]))}let n=async t=>{try{rD(`[MB CLIENT] Listing messages from ${t}…`);let i=await this.authFetch.fetch(`${t}/listMessages`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({messageBox:e})});if(!i.ok)throw Error(`HTTP ${i.status} ${i.statusText}`);let r=await i.json();if("error"===r.status)throw Error(r.description??"Unknown server error");return r.messages}catch(e){throw rD(`[MB CLIENT DEBUG] listMessages failed for ${t}:`,e),e}},a=await Promise.allSettled(s.map(n)),o=[],c=[];for(let e of a)"fulfilled"===e.status?o.push(e.value):c.push(e.reason);if(0===o.length)throw Error("Failed to retrieve messages from any host");let h=new Map;for(let e of o)for(let t of e)h.has(t.messageId)||h.set(t.messageId,t);if(0===h.size)return[];let l=e=>{try{return JSON.parse(e)}catch{return e}},d=Array.from(h.values());for(let e of d)try{let t,s="string"==typeof e.body?l(e.body):e.body,n=s;if(null!=s&&"object"==typeof s&&"message"in s){let e=s.message;n="string"==typeof e?l(e):e,t=s.payment}if(r&&t?.tx!=null&&null!=t.outputs)try{rD(`[MB CLIENT] Processing recipient payment in message from ${String(e.sender)}…`);let r=t.outputs.filter(e=>"wallet payment"===e.protocol);r.length>0?(rD(`[MB CLIENT] Internalizing ${r.length} recipient payment output(s)…`),(await this.walletClient.internalizeAction({tx:t.tx,outputs:r,description:t.description??"MessageBox recipient payment"},i)).accepted?rD("[MB CLIENT] Successfully internalized recipient payment"):rM("[MB CLIENT] Recipient payment internalization was not accepted")):rD("[MB CLIENT] No wallet payment outputs found in payment data")}catch(e){rF("[MB CLIENT ERROR] Failed to internalize recipient payment:",e)}if(null!=n&&"object"==typeof n&&"string"==typeof n.encryptedMessage){rD(`[MB CLIENT] Decrypting message from ${String(e.sender)}…`);let t=await this.walletClient.decrypt({protocolID:[1,"messagebox"],keyID:"1",counterparty:e.sender,ciphertext:eO(n.encryptedMessage,"base64")},i),r=eA(t.plaintext);e.body=l(r)}else e.body=n??s}catch(t){rF("[MB CLIENT ERROR] Failed to parse or decrypt message in list:",t),e.body="[Error: Failed to decrypt or parse message]"}return d.sort((e,t)=>Number(t.timestamp??0)-Number(e.timestamp??0)),d}async listMessagesLite({messageBox:e,host:t}){let i=await this.authFetch.fetch(`${t}/listMessages`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({messageBox:e})}),r=await i.json();if("error"===r.status)throw Error(r.description??"Unknown server error");let s=r.messages,n=e=>{try{return JSON.parse(e)}catch{return e}};for(let e of s)try{let t="string"==typeof e.body?n(e.body):e.body,i=t;if(null!=t&&"object"==typeof t&&"message"in t){let e=t.message;i="string"==typeof e?n(e):e}if(null!=i&&"object"==typeof i&&"string"==typeof i.encryptedMessage){let t=await this.walletClient.decrypt({protocolID:[1,"messagebox"],keyID:"1",counterparty:e.sender,ciphertext:eO(i.encryptedMessage,"base64")}),r=eA(t.plaintext);e.body=n(r)}else e.body=i??t}catch(t){rF("[MB CLIENT ERROR] Failed to parse or decrypt message in list:",t),e.body="[Error: Failed to decrypt or parse message]"}return s}async acknowledgeMessage({messageIds:e,host:t,originator:i}){if(await this.assertInitialized(),!Array.isArray(e)||0===e.length)throw Error("Message IDs array cannot be empty");rD(`[MB CLIENT] Acknowledging messages ${JSON.stringify(e)}…`);let r=null!=t?[t]:[];if(0===r.length){let e=await this.getIdentityKey(i),t=await this.queryAdvertisements(e,void 0,i);r=Array.from(new Set([this.host,...t.map(e=>e.host)]))}let s=async t=>{try{let i=await this.authFetch.fetch(`${t}/acknowledgeMessage`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({messageIds:e})});if(!i.ok)throw Error(`HTTP ${i.status}`);let r=await i.json();if("error"===r.status)throw Error(r.description);return rD(`[MB CLIENT] Acknowledged on ${t}`),r.status}catch(e){return rM(`[MB CLIENT WARN] acknowledgeMessage failed for ${t}:`,e),null}},n=await Promise.allSettled(r.map(s)),a=n.filter(e=>"fulfilled"===e.status),o=a.find(e=>null!=e.value)?.value;if(null!=o)return o;let c=[];for(let e of n)"rejected"===e.status&&c.push(e.reason);throw Error(`Failed to acknowledge messages on all hosts: ${c.map(e=>String(e)).join("; ")}`)}async setMessageBoxPermission(e,t){await this.assertInitialized();let i=t??this.host;rD("[MB CLIENT] Setting messageBox permission...");let r=await this.authFetch.fetch(`${i}/permissions/set`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({messageBox:e.messageBox,recipientFee:e.recipientFee,...null!=e.sender&&{sender:e.sender}})});if(!r.ok){let e=await r.json().catch(()=>({}));throw Error(`Failed to set permission: HTTP ${r.status} - ${""!==String(e.description)?String(e.description):r.statusText}`)}let{status:s,description:n}=await r.json();if("error"===s)throw Error(n??"Failed to set permission")}async getMessageBoxPermission(e,t){await this.assertInitialized();let i=t??await this.resolveHostForRecipient(e.recipient),r=new URLSearchParams({recipient:e.recipient,messageBox:e.messageBox,...null!=e.sender&&{sender:e.sender}});rD("[MB CLIENT] Getting messageBox permission...");let s=await this.authFetch.fetch(`${i}/permissions/get?${r.toString()}`,{method:"GET"});if(!s.ok){let e=await s.json().catch(()=>({}));throw Error(`Failed to get permission: HTTP ${s.status} - ${""!==String(e.description)?String(e.description):s.statusText}`)}let n=await s.json();if("error"===n.status)throw Error(n.description??"Failed to get permission");return n.permission}async getMessageBoxQuote(e,t){await this.assertInitialized();let i=t??await this.resolveHostForRecipient(e.recipient),r=new URLSearchParams({recipient:e.recipient,messageBox:e.messageBox});rD("[MB CLIENT] Getting messageBox quote...");let s=await this.authFetch.fetch(`${i}/permissions/quote?${r.toString()}`,{method:"GET"});if(!s.ok){let e=await s.json().catch(()=>({}));throw Error(`Failed to get quote: HTTP ${s.status} - ${String(e.description)??s.statusText}`)}let{status:n,description:a,quote:o}=await s.json();if("error"===n)throw Error(a??"Failed to get quote");let c=s.headers.get("x-bsv-auth-identity-key");if(null==c)throw Error("Failed to get quote: Delivery agent did not provide their identity key");return{recipientFee:o.recipientFee,deliveryFee:o.deliveryFee,deliveryAgentIdentityKey:c}}async listMessageBoxPermissions(e,t){await this.assertInitialized();let i=t??this.host,r=new URLSearchParams;e?.messageBox!=null&&r.set("message_box",e.messageBox),e?.limit!==void 0&&r.set("limit",e.limit.toString()),e?.offset!==void 0&&r.set("offset",e.offset.toString()),rD("[MB CLIENT] Listing messageBox permissions with params:",r.toString());let s=await this.authFetch.fetch(`${i}/permissions/list?${r.toString()}`,{method:"GET"});if(!s.ok){let e=await s.json().catch(()=>({}));throw Error(`Failed to list permissions: HTTP ${s.status} - ${""!==String(e.description)?String(e.description):s.statusText}`)}let n=await s.json();if("error"===n.status)throw Error(n.description??"Failed to list permissions");return n.permissions.map(e=>({sender:e.sender,messageBox:e.message_box,recipientFee:e.recipient_fee,status:rV.getStatusFromFee(e.recipient_fee),createdAt:e.created_at,updatedAt:e.updated_at}))}async allowNotificationsFromPeer(e,t=0,i){await this.setMessageBoxPermission({messageBox:"notifications",sender:e,recipientFee:t},i)}async denyNotificationsFromPeer(e,t){await this.setMessageBoxPermission({messageBox:"notifications",sender:e,recipientFee:-1},t)}async checkPeerNotificationStatus(e,t){let i=await this.getIdentityKey();return await this.getMessageBoxPermission({recipient:i,messageBox:"notifications",sender:e},t)}async listPeerNotifications(e){return await this.listMessageBoxPermissions({messageBox:"notifications"},e)}async sendNotification(e,t,i){return await this.assertInitialized(),await this.sendMessage({recipient:e,messageBox:"notifications",body:t,checkPermissions:!0},i)}async registerDevice(e,t){if(await this.assertInitialized(),null==e.fcmToken||""===e.fcmToken.trim())throw Error("fcmToken is required and must be a non-empty string");if(null!=e.platform&&!["ios","android","web"].includes(e.platform))throw Error("platform must be one of: ios, android, web");let i=t??this.host;rD("[MB CLIENT] Registering device for FCM notifications...");let r=await this.authFetch.fetch(`${i}/registerDevice`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({fcmToken:e.fcmToken.trim(),deviceId:e.deviceId?.trim()??void 0,platform:e.platform??void 0})});if(!r.ok){let e=String((await r.json().catch(()=>({}))).description)??r.statusText;throw Error(`Failed to register device: HTTP ${r.status} - ${e}`)}let s=await r.json();if("error"===s.status)throw Error(s.description??"Failed to register device");return rD("[MB CLIENT] Device registered successfully"),{status:s.status,message:s.message,deviceId:s.deviceId}}async listRegisteredDevices(e){await this.assertInitialized();let t=e??this.host;rD("[MB CLIENT] Listing registered devices...");let i=await this.authFetch.fetch(`${t}/devices`,{method:"GET"});if(!i.ok){let e=String((await i.json().catch(()=>({}))).description)??i.statusText;throw Error(`Failed to list devices: HTTP ${i.status} - ${e}`)}let r=await i.json();if("error"===r.status)throw Error(r.description??"Failed to list devices");return rD(`[MB CLIENT] Found ${r.devices.length} registered devices`),r.devices}static getStatusFromFee(e){return -1===e?"blocked":0===e?"always_allow":"payment_required"}async createMessagePayment(e,t,i="MessageBox delivery payment",r){if(t.recipientFee<=0&&t.deliveryFee<=0)throw Error("No payment required");rD(`[MB CLIENT] Creating payment transaction for ${t.recipientFee} sats (delivery: ${t.deliveryFee}, recipient: ${t.recipientFee})`);let s=[],n=[],a=await this.getIdentityKey(),o=0;if(t.deliveryFee>0){let e=eC(tl(32)),i=eC(tl(32)),{publicKey:c}=await this.walletClient.getPublicKey({protocolID:[2,"3241645161d8"],keyID:`${e} ${i}`,counterparty:t.deliveryAgentIdentityKey},r),h=new tK().lock(to.fromString(c).toAddress()).toHex();n.push({satoshis:t.deliveryFee,lockingScript:h,outputDescription:"MessageBox server delivery fee",customInstructions:JSON.stringify({derivationPrefix:e,derivationSuffix:i,recipientIdentityKey:t.deliveryAgentIdentityKey})}),s.push({outputIndex:o++,protocol:"wallet payment",paymentRemittance:{derivationPrefix:e,derivationSuffix:i,senderIdentityKey:a}})}if(t.recipientFee>0){let i=eC(tl(32)),r=eC(tl(32)),a=new t7("anyone"),{publicKey:c}=await a.getPublicKey({protocolID:[2,"3241645161d8"],keyID:`${i} ${r}`,counterparty:e});if(null==c||""===c.trim())throw Error("Failed to derive recipient's public key");let h=new tK().lock(to.fromString(c).toAddress()).toHex();n.push({satoshis:t.recipientFee,lockingScript:h,outputDescription:"Recipient message fee",customInstructions:JSON.stringify({derivationPrefix:i,derivationSuffix:r,recipientIdentityKey:e})}),s.push({outputIndex:o++,protocol:"wallet payment",paymentRemittance:{derivationPrefix:i,derivationSuffix:r,senderIdentityKey:(await a.getPublicKey({identityKey:!0})).publicKey}})}let{tx:c}=await this.walletClient.createAction({description:i,outputs:n,options:{randomizeOutputs:!1,acceptDelayedBroadcast:!1}},r);if(null==c)throw Error("Failed to create payment transaction");return{tx:c,outputs:s,description:i}}}return globalThis.run=async(e,t)=>{let i;try{i=function(e){let t,i,r=new eV(JSON.parse(e)),s=r.readUInt8();if(1===s)t=r.read(32),i=r.read();else if(2===s)t=r.read(32),r.read(16),i=r.read();else throw Error(`Unsupported snapshot version: ${s}`);return ex(new eV(new tT(t).decrypt(i)).read(32))}(e)}catch(e){return console.error("error getting key from snap",e),{title:"New Message",body:`Error getting key from snap: ${e}`,origin:"FOO",timestamp:Date.now(),data:{messageId:"FOO",sender:"FOO",fcmMessageId:t,from:"fcm"}}}console.error("got key out of snap"),console.error("key",i);let r=new rV({enableLogging:!0,walletClient:new ip(new tp(i,"hex")),host:"https://messagebox.babbage.systems"});console.error("[headless] \uD83D\uDD0D Listing messages from notifications box...");let s=(await r.listMessagesLite({messageBox:"notifications",host:"https://messagebox.babbage.systems"})).find(e=>e.messageId===t);return s?{title:s.sender,body:"string"==typeof s.body?s.body:JSON.stringify(s.body),origin:s.sender||"unknown",timestamp:Date.now(),data:{messageId:s.messageId,sender:s.sender,fcmMessageId:t,from:"fcm"}}:(console.warn(`[headless] \u{26A0}\u{FE0F} Message ${t} not found in notifications box`),null)},{}})()); -//# sourceMappingURL=wallet-bundle.js.map \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 6d344e7..5f10868 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19683,7 +19683,7 @@ "@babel/runtime": "^7.20.0", "@expo/code-signing-certificates": "^0.0.5", "@expo/config": "~11.0.13", - "@expo/config-plugins": "~10.1.1", + "@expo/config-plugins": "~10.1.2", "@expo/devcert": "^1.1.2", "@expo/env": "~1.0.7", "@expo/image-utils": "^0.7.6", @@ -19692,7 +19692,7 @@ "@expo/osascript": "^2.2.5", "@expo/package-manager": "^1.8.6", "@expo/plist": "^0.3.5", - "@expo/prebuild-config": "~9.0.0", + "@expo/prebuild-config": "^9.0.11", "@expo/spawn-async": "^1.7.2", "@expo/ws-tunnel": "^1.0.1", "@expo/xcpretty": "^4.3.0", @@ -19977,7 +19977,7 @@ "integrity": "sha512-TnGb4u/zUZetpav9sx/3fWK71oCPaOjZHoVED9NaEncktAd0Eonhq5NUghiJmkUGt3gGSjRAEBXiBbbY9/B1LA==", "requires": { "@babel/code-frame": "~7.10.4", - "@expo/config-plugins": "~10.1.1", + "@expo/config-plugins": "~10.1.2", "@expo/config-types": "^53.0.5", "@expo/json-file": "^9.1.5", "deepmerge": "^4.3.1", @@ -20721,7 +20721,7 @@ "integrity": "sha512-0DsxhhixRbCCvmYskBTq8czsU0YOBsntYURhWPNpkl0IPVpeP9haE5W4OwtHGzXEbmHdzaoDwNmVcWjS/mqbDw==", "requires": { "@expo/config": "~11.0.13", - "@expo/config-plugins": "~10.1.1", + "@expo/config-plugins": "~10.1.2", "@expo/config-types": "^53.0.5", "@expo/image-utils": "^0.7.6", "@expo/json-file": "^9.1.5", @@ -25023,7 +25023,7 @@ "@babel/runtime": "^7.20.0", "@expo/cli": "0.24.20", "@expo/config": "~11.0.13", - "@expo/config-plugins": "~10.1.1", + "@expo/config-plugins": "~10.1.2", "@expo/fingerprint": "0.13.4", "@expo/metro-config": "0.20.17", "@expo/vector-icons": "^14.0.0", @@ -25298,7 +25298,7 @@ "resolved": "https://registry.npmjs.org/expo-splash-screen/-/expo-splash-screen-0.30.10.tgz", "integrity": "sha512-Tt9va/sLENQDQYeOQ6cdLdGvTZ644KR3YG9aRlnpcs2/beYjOX1LHT510EGzVN9ljUTg+1ebEo5GGt2arYtPjw==", "requires": { - "@expo/prebuild-config": "~9.0.0" + "@expo/prebuild-config": "^9.0.10" } }, "expo-status-bar": { diff --git a/utils/logging.config.ts b/utils/logging.config.ts index 8f9a56e..b66d346 100644 --- a/utils/logging.config.ts +++ b/utils/logging.config.ts @@ -4,7 +4,7 @@ const defaultLogging = false // Specific file logging overrides const loggingConfig: { [file: string]: boolean } = { default: defaultLogging, - 'context/WalletContext': true + 'context/WalletWebViewContext': true } export default loggingConfig diff --git a/wallet/dist/index.html b/wallet/dist/index.html index 326ccad..92f36c9 100644 --- a/wallet/dist/index.html +++ b/wallet/dist/index.html @@ -26,6 +26,17 @@ return /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ +/***/ "../node_modules/idb/build/index.js": +/*!******************************************!*\ + !*** ../node_modules/idb/build/index.js ***! + \******************************************/ +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ deleteDB: () => (/* binding */ deleteDB),\n/* harmony export */ openDB: () => (/* binding */ openDB),\n/* harmony export */ unwrap: () => (/* binding */ unwrap),\n/* harmony export */ wrap: () => (/* binding */ wrap)\n/* harmony export */ });\nconst instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c);\n\nlet idbProxyableTypes;\nlet cursorAdvanceMethods;\n// This is a function to prevent it throwing up in node environments.\nfunction getIdbProxyableTypes() {\n return (idbProxyableTypes ||\n (idbProxyableTypes = [\n IDBDatabase,\n IDBObjectStore,\n IDBIndex,\n IDBCursor,\n IDBTransaction,\n ]));\n}\n// This is a function to prevent it throwing up in node environments.\nfunction getCursorAdvanceMethods() {\n return (cursorAdvanceMethods ||\n (cursorAdvanceMethods = [\n IDBCursor.prototype.advance,\n IDBCursor.prototype.continue,\n IDBCursor.prototype.continuePrimaryKey,\n ]));\n}\nconst transactionDoneMap = new WeakMap();\nconst transformCache = new WeakMap();\nconst reverseTransformCache = new WeakMap();\nfunction promisifyRequest(request) {\n const promise = new Promise((resolve, reject) => {\n const unlisten = () => {\n request.removeEventListener('success', success);\n request.removeEventListener('error', error);\n };\n const success = () => {\n resolve(wrap(request.result));\n unlisten();\n };\n const error = () => {\n reject(request.error);\n unlisten();\n };\n request.addEventListener('success', success);\n request.addEventListener('error', error);\n });\n // This mapping exists in reverseTransformCache but doesn't exist in transformCache. This\n // is because we create many promises from a single IDBRequest.\n reverseTransformCache.set(promise, request);\n return promise;\n}\nfunction cacheDonePromiseForTransaction(tx) {\n // Early bail if we've already created a done promise for this transaction.\n if (transactionDoneMap.has(tx))\n return;\n const done = new Promise((resolve, reject) => {\n const unlisten = () => {\n tx.removeEventListener('complete', complete);\n tx.removeEventListener('error', error);\n tx.removeEventListener('abort', error);\n };\n const complete = () => {\n resolve();\n unlisten();\n };\n const error = () => {\n reject(tx.error || new DOMException('AbortError', 'AbortError'));\n unlisten();\n };\n tx.addEventListener('complete', complete);\n tx.addEventListener('error', error);\n tx.addEventListener('abort', error);\n });\n // Cache it for later retrieval.\n transactionDoneMap.set(tx, done);\n}\nlet idbProxyTraps = {\n get(target, prop, receiver) {\n if (target instanceof IDBTransaction) {\n // Special handling for transaction.done.\n if (prop === 'done')\n return transactionDoneMap.get(target);\n // Make tx.store return the only store in the transaction, or undefined if there are many.\n if (prop === 'store') {\n return receiver.objectStoreNames[1]\n ? undefined\n : receiver.objectStore(receiver.objectStoreNames[0]);\n }\n }\n // Else transform whatever we get back.\n return wrap(target[prop]);\n },\n set(target, prop, value) {\n target[prop] = value;\n return true;\n },\n has(target, prop) {\n if (target instanceof IDBTransaction &&\n (prop === 'done' || prop === 'store')) {\n return true;\n }\n return prop in target;\n },\n};\nfunction replaceTraps(callback) {\n idbProxyTraps = callback(idbProxyTraps);\n}\nfunction wrapFunction(func) {\n // Due to expected object equality (which is enforced by the caching in `wrap`), we\n // only create one new func per func.\n // Cursor methods are special, as the behaviour is a little more different to standard IDB. In\n // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the\n // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense\n // with real promises, so each advance methods returns a new promise for the cursor object, or\n // undefined if the end of the cursor has been reached.\n if (getCursorAdvanceMethods().includes(func)) {\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n func.apply(unwrap(this), args);\n return wrap(this.request);\n };\n }\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n return wrap(func.apply(unwrap(this), args));\n };\n}\nfunction transformCachableValue(value) {\n if (typeof value === 'function')\n return wrapFunction(value);\n // This doesn't return, it just creates a 'done' promise for the transaction,\n // which is later returned for transaction.done (see idbObjectHandler).\n if (value instanceof IDBTransaction)\n cacheDonePromiseForTransaction(value);\n if (instanceOfAny(value, getIdbProxyableTypes()))\n return new Proxy(value, idbProxyTraps);\n // Return the same value back if we're not going to transform it.\n return value;\n}\nfunction wrap(value) {\n // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because\n // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.\n if (value instanceof IDBRequest)\n return promisifyRequest(value);\n // If we've already transformed this value before, reuse the transformed value.\n // This is faster, but it also provides object equality.\n if (transformCache.has(value))\n return transformCache.get(value);\n const newValue = transformCachableValue(value);\n // Not all types are transformed.\n // These may be primitive types, so they can't be WeakMap keys.\n if (newValue !== value) {\n transformCache.set(value, newValue);\n reverseTransformCache.set(newValue, value);\n }\n return newValue;\n}\nconst unwrap = (value) => reverseTransformCache.get(value);\n\n/**\n * Open a database.\n *\n * @param name Name of the database.\n * @param version Schema version.\n * @param callbacks Additional callbacks.\n */\nfunction openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) {\n const request = indexedDB.open(name, version);\n const openPromise = wrap(request);\n if (upgrade) {\n request.addEventListener('upgradeneeded', (event) => {\n upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event);\n });\n }\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event.newVersion, event));\n }\n openPromise\n .then((db) => {\n if (terminated)\n db.addEventListener('close', () => terminated());\n if (blocking) {\n db.addEventListener('versionchange', (event) => blocking(event.oldVersion, event.newVersion, event));\n }\n })\n .catch(() => { });\n return openPromise;\n}\n/**\n * Delete a database.\n *\n * @param name Name of the database.\n */\nfunction deleteDB(name, { blocked } = {}) {\n const request = indexedDB.deleteDatabase(name);\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event));\n }\n return wrap(request).then(() => undefined);\n}\n\nconst readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];\nconst writeMethods = ['put', 'add', 'delete', 'clear'];\nconst cachedMethods = new Map();\nfunction getMethod(target, prop) {\n if (!(target instanceof IDBDatabase &&\n !(prop in target) &&\n typeof prop === 'string')) {\n return;\n }\n if (cachedMethods.get(prop))\n return cachedMethods.get(prop);\n const targetFuncName = prop.replace(/FromIndex$/, '');\n const useIndex = prop !== targetFuncName;\n const isWrite = writeMethods.includes(targetFuncName);\n if (\n // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.\n !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) ||\n !(isWrite || readMethods.includes(targetFuncName))) {\n return;\n }\n const method = async function (storeName, ...args) {\n // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(\n const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');\n let target = tx.store;\n if (useIndex)\n target = target.index(args.shift());\n // Must reject if op rejects.\n // If it's a write operation, must reject if tx.done rejects.\n // Must reject with op rejection first.\n // Must resolve with op value.\n // Must handle both promises (no unhandled rejections)\n return (await Promise.all([\n target[targetFuncName](...args),\n isWrite && tx.done,\n ]))[0];\n };\n cachedMethods.set(prop, method);\n return method;\n}\nreplaceTraps((oldTraps) => ({\n ...oldTraps,\n get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),\n has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop),\n}));\n\nconst advanceMethodProps = ['continue', 'continuePrimaryKey', 'advance'];\nconst methodMap = {};\nconst advanceResults = new WeakMap();\nconst ittrProxiedCursorToOriginalProxy = new WeakMap();\nconst cursorIteratorTraps = {\n get(target, prop) {\n if (!advanceMethodProps.includes(prop))\n return target[prop];\n let cachedFunc = methodMap[prop];\n if (!cachedFunc) {\n cachedFunc = methodMap[prop] = function (...args) {\n advanceResults.set(this, ittrProxiedCursorToOriginalProxy.get(this)[prop](...args));\n };\n }\n return cachedFunc;\n },\n};\nasync function* iterate(...args) {\n // tslint:disable-next-line:no-this-assignment\n let cursor = this;\n if (!(cursor instanceof IDBCursor)) {\n cursor = await cursor.openCursor(...args);\n }\n if (!cursor)\n return;\n cursor = cursor;\n const proxiedCursor = new Proxy(cursor, cursorIteratorTraps);\n ittrProxiedCursorToOriginalProxy.set(proxiedCursor, cursor);\n // Map this double-proxy back to the original, so other cursor methods work.\n reverseTransformCache.set(proxiedCursor, unwrap(cursor));\n while (cursor) {\n yield proxiedCursor;\n // If one of the advancing methods was not called, call continue().\n cursor = await (advanceResults.get(proxiedCursor) || cursor.continue());\n advanceResults.delete(proxiedCursor);\n }\n}\nfunction isIteratorProp(target, prop) {\n return ((prop === Symbol.asyncIterator &&\n instanceOfAny(target, [IDBIndex, IDBObjectStore, IDBCursor])) ||\n (prop === 'iterate' && instanceOfAny(target, [IDBIndex, IDBObjectStore])));\n}\nreplaceTraps((oldTraps) => ({\n ...oldTraps,\n get(target, prop, receiver) {\n if (isIteratorProp(target, prop))\n return iterate;\n return oldTraps.get(target, prop, receiver);\n },\n has(target, prop) {\n return isIteratorProp(target, prop) || oldTraps.has(target, prop);\n },\n}));\n\n\n\n\n//# sourceURL=webpack://$/../node_modules/idb/build/index.js?\n}"); + +/***/ }), + /***/ "./node_modules/@bsv/sdk/dist/cjs/mod.js": /*!***********************************************!*\ !*** ./node_modules/@bsv/sdk/dist/cjs/mod.js ***! @@ -594,7 +605,7 @@ /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.KeyShares = void 0;\nconst BigNumber_js_1 = __importDefault(__webpack_require__(/*! ./BigNumber.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/BigNumber.js\"));\nconst PublicKey_js_1 = __importDefault(__webpack_require__(/*! ./PublicKey.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/PublicKey.js\"));\nconst Curve_js_1 = __importDefault(__webpack_require__(/*! ./Curve.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Curve.js\"));\nconst ECDSA_js_1 = __webpack_require__(/*! ./ECDSA.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/ECDSA.js\");\nconst Hash_js_1 = __webpack_require__(/*! ./Hash.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Hash.js\");\nconst Random_js_1 = __importDefault(__webpack_require__(/*! ./Random.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Random.js\"));\nconst utils_js_1 = __webpack_require__(/*! ./utils.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/utils.js\");\nconst Polynomial_js_1 = __importStar(__webpack_require__(/*! ./Polynomial.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Polynomial.js\"));\n/**\n * @class KeyShares\n *\n * This class is used to store the shares of a private key.\n *\n * @param shares - An array of shares\n * @param threshold - The number of shares required to recombine the private key\n *\n * @returns KeyShares\n *\n * @example\n * const key = PrivateKey.fromShares(shares)\n *\n */\nclass KeyShares {\n constructor(points, threshold, integrity) {\n this.points = points;\n this.threshold = threshold;\n this.integrity = integrity;\n }\n static fromBackupFormat(shares) {\n let threshold = 0;\n let integrity = '';\n const points = shares.map((share, idx) => {\n const shareParts = share.split('.');\n if (shareParts.length !== 4) {\n throw new Error('Invalid share format in share ' +\n idx.toString() +\n '. Expected format: \"x.y.t.i\" - received ' +\n share);\n }\n const [x, y, t, i] = shareParts;\n if (t === undefined)\n throw new Error('Threshold not found in share ' + idx.toString());\n if (i === undefined)\n throw new Error('Integrity not found in share ' + idx.toString());\n const tInt = parseInt(t);\n if (idx !== 0 && threshold !== tInt) {\n throw new Error('Threshold mismatch in share ' + idx.toString());\n }\n if (idx !== 0 && integrity !== i) {\n throw new Error('Integrity mismatch in share ' + idx.toString());\n }\n threshold = tInt;\n integrity = i;\n return Polynomial_js_1.PointInFiniteField.fromString([x, y].join('.'));\n });\n return new KeyShares(points, threshold, integrity);\n }\n toBackupFormat() {\n return this.points.map((share) => share.toString() + '.' + this.threshold.toString() + '.' + this.integrity);\n }\n}\nexports.KeyShares = KeyShares;\n/**\n * Represents a Private Key, which is a secret that can be used to generate signatures in a cryptographic system.\n *\n * The `PrivateKey` class extends from the `BigNumber` class. It offers methods to create signatures, verify them,\n * create a corresponding public key and derive a shared secret from a public key.\n *\n * @extends {BigNumber}\n * @see {@link BigNumber} for more information on BigNumber.\n */\nclass PrivateKey extends BigNumber_js_1.default {\n /**\n * Generates a private key randomly.\n *\n * @method fromRandom\n * @static\n * @returns The newly generated Private Key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n */\n static fromRandom() {\n return new PrivateKey((0, Random_js_1.default)(32));\n }\n /**\n * Generates a private key from a string.\n *\n * @method fromString\n * @static\n * @param str - The string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not valid.\n **/\n static fromString(str, base = 'hex') {\n return new PrivateKey(super.fromString(str, base).toArray());\n }\n /**\n * Generates a private key from a hexadecimal string.\n *\n * @method fromHex\n * @static\n * @param {string} str - The hexadecimal string representing the private key. The string must represent a valid private key in big-endian format.\n * @returns {PrivateKey} The generated Private Key instance.\n * @throws {Error} If the string is not a valid hexadecimal or represents an invalid private key.\n **/\n static fromHex(str) {\n return new PrivateKey(super.fromHex(str, 'big'));\n }\n /**\n * Generates a private key from a WIF (Wallet Import Format) string.\n *\n * @method fromWif\n * @static\n * @param wif - The WIF string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not a valid WIF.\n **/\n static fromWif(wif, prefixLength = 1) {\n const decoded = (0, utils_js_1.fromBase58Check)(wif, undefined, prefixLength);\n if (decoded.data.length !== 33) {\n throw new Error('Invalid WIF length');\n }\n if (decoded.data[32] !== 1) {\n throw new Error('Invalid WIF padding');\n }\n return new PrivateKey(decoded.data.slice(0, 32));\n }\n /**\n * @constructor\n *\n * @param number - The number (various types accepted) to construct a BigNumber from. Default is 0.\n *\n * @param base - The base of number provided. By default is 10. Ignored if number is BigNumber.\n *\n * @param endian - The endianness provided. By default is 'big endian'. Ignored if number is BigNumber.\n *\n * @param modN - Optional. Default 'apply. If 'apply', apply modN to input to guarantee a valid PrivateKey. If 'error', if input is out of field throw new Error('Input is out of field'). If 'nocheck', assumes input is in field.\n *\n * @example\n * import PrivateKey from './PrivateKey';\n * import BigNumber from './BigNumber';\n * const privKey = new PrivateKey(new BigNumber('123456', 10, 'be'));\n */\n constructor(number = 0, base = 10, endian = 'be', modN = 'apply') {\n if (number instanceof BigNumber_js_1.default) {\n super();\n number.copy(this);\n }\n else {\n super(number, base, endian);\n }\n if (modN !== 'nocheck') {\n const check = this.checkInField();\n if (!check.inField) {\n if (modN === 'error') {\n throw new Error('Input is out of field');\n }\n // Force the PrivateKey BigNumber value to lie in the field limited by curve.n\n BigNumber_js_1.default.move(this, check.modN);\n }\n }\n }\n /**\n * A utility function to check that the value of this PrivateKey lies in the field limited by curve.n\n * @returns { inField, modN } where modN is this PrivateKey's current BigNumber value mod curve.n, and inField is true only if modN equals current BigNumber value.\n */\n checkInField() {\n const curve = new Curve_js_1.default();\n const modN = this.mod(curve.n);\n const inField = this.cmp(modN) === 0;\n return { inField, modN };\n }\n /**\n * @returns true if the PrivateKey's current BigNumber value lies in the field limited by curve.n\n */\n isValid() {\n return this.checkInField().inField;\n }\n /**\n * Signs a message using the private key.\n *\n * @method sign\n * @param msg - The message (array of numbers or string) to be signed.\n * @param enc - If 'hex' the string will be treated as hex, utf8 otherwise.\n * @param forceLowS - If true (the default), the signature will be forced to have a low S value.\n * @param customK — If provided, uses a custom K-value for the signature. Provie a function that returns a BigNumber, or the BigNumber itself.\n * @returns A digital signature generated from the hash of the message and the private key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n */\n sign(msg, enc, forceLowS = true, customK) {\n const msgHash = new BigNumber_js_1.default((0, Hash_js_1.sha256)(msg, enc), 16);\n return (0, ECDSA_js_1.sign)(msgHash, this, forceLowS, customK);\n }\n /**\n * Verifies a message's signature using the public key associated with this private key.\n *\n * @method verify\n * @param msg - The original message which has been signed.\n * @param sig - The signature to be verified.\n * @param enc - The data encoding method.\n * @returns Whether or not the signature is valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n * const isSignatureValid = privateKey.verify('Hello, World!', signature);\n */\n verify(msg, sig, enc) {\n const msgHash = new BigNumber_js_1.default((0, Hash_js_1.sha256)(msg, enc), 16);\n return (0, ECDSA_js_1.verify)(msgHash, sig, this.toPublicKey());\n }\n /**\n * Converts the private key to its corresponding public key.\n *\n * The public key is generated by multiplying the base point G of the curve and the private key.\n *\n * @method toPublicKey\n * @returns The generated PublicKey.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n */\n toPublicKey() {\n const c = new Curve_js_1.default();\n const p = c.g.mul(this);\n return new PublicKey_js_1.default(p.x, p.y);\n }\n /**\n * Converts the private key to a Wallet Import Format (WIF) string.\n *\n * Base58Check encoding is used for encoding the private key.\n * The prefix\n *\n * @method toWif\n * @returns The WIF string.\n *\n * @param prefix defaults to [0x80] for mainnet, set it to [0xef] for testnet.\n *\n * @throws Error('Value is out of field') if current BigNumber value is out of field limited by curve.n\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const wif = privateKey.toWif();\n * const testnetWif = privateKey.toWif([0xef]);\n */\n toWif(prefix = [0x80]) {\n if (!this.isValid()) {\n throw new Error('Value is out of field');\n }\n return (0, utils_js_1.toBase58Check)([...this.toArray('be', 32), 1], prefix);\n }\n /**\n * Base58Check encodes the hash of the public key associated with this private key with a prefix to indicate locking script type.\n * Defaults to P2PKH for mainnet, otherwise known as a \"Bitcoin Address\".\n *\n * @param prefix defaults to [0x00] for mainnet, set to [0x6f] for testnet or use the strings 'testnet' or 'mainnet'\n *\n * @returns Returns the address encoding associated with the hash of the public key associated with this private key.\n *\n * @example\n * const address = privkey.toAddress()\n * const address = privkey.toAddress('mainnet')\n * const testnetAddress = privkey.toAddress([0x6f])\n * const testnetAddress = privkey.toAddress('testnet')\n */\n toAddress(prefix = [0x00]) {\n return this.toPublicKey().toAddress(prefix);\n }\n /**\n * Converts this PrivateKey to a hexadecimal string.\n *\n * @method toHex\n * @param length - The minimum length of the hex string\n * @returns Returns a string representing the hexadecimal value of this BigNumber.\n *\n * @example\n * const bigNumber = new BigNumber(255);\n * const hex = bigNumber.toHex();\n */\n toHex() {\n return super.toHex(32);\n }\n /**\n * Converts this PrivateKey to a string representation.\n *\n * @method toString\n * @param {number | 'hex'} [base='hex'] - The base for representing the number. Default is hexadecimal ('hex').\n * @param {number} [padding=64] - The minimum number of digits for the output string. Default is 64, ensuring a 256-bit representation in hexadecimal.\n * @returns {string} A string representation of the PrivateKey in the specified base, padded to the specified length.\n *\n **/\n toString(base = 'hex', padding = 64) {\n return super.toString(base, padding);\n }\n /**\n * Derives a shared secret from the public key.\n *\n * @method deriveSharedSecret\n * @param key - The public key to derive the shared secret from.\n * @returns The derived shared secret (a point on the curve).\n * @throws Will throw an error if the public key is not valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n * const sharedSecret = privateKey.deriveSharedSecret(publicKey);\n */\n deriveSharedSecret(key) {\n if (!key.validate()) {\n throw new Error('Public key not valid for ECDH secret derivation');\n }\n return key.mul(this);\n }\n /**\n * Derives a child key with BRC-42.\n * @param publicKey The public key of the other party\n * @param invoiceNumber The invoice number used to derive the child key\n * @param cacheSharedSecret Optional function to cache shared secrets\n * @param retrieveCachedSharedSecret Optional function to retrieve shared secrets from the cache\n * @returns The derived child key.\n */\n deriveChild(publicKey, invoiceNumber, cacheSharedSecret, retrieveCachedSharedSecret) {\n let sharedSecret;\n if (typeof retrieveCachedSharedSecret === 'function') {\n const retrieved = retrieveCachedSharedSecret(this, publicKey);\n if (typeof retrieved !== 'undefined') {\n sharedSecret = retrieved;\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n if (typeof cacheSharedSecret === 'function') {\n cacheSharedSecret(this, publicKey, sharedSecret);\n }\n }\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n }\n const invoiceNumberBin = (0, utils_js_1.toArray)(invoiceNumber, 'utf8');\n const hmac = (0, Hash_js_1.sha256hmac)(sharedSecret.encode(true), invoiceNumberBin);\n const curve = new Curve_js_1.default();\n return new PrivateKey(this.add(new BigNumber_js_1.default(hmac)).mod(curve.n).toArray());\n }\n /**\n * Splits the private key into shares using Shamir's Secret Sharing Scheme.\n *\n * @param threshold The minimum number of shares required to reconstruct the private key.\n * @param totalShares The total number of shares to generate.\n * @param prime The prime number to be used in Shamir's Secret Sharing Scheme.\n * @returns An array of shares.\n *\n * @example\n * const key = PrivateKey.fromRandom()\n * const shares = key.toKeyShares(2, 5)\n */\n toKeyShares(threshold, totalShares) {\n if (typeof threshold !== 'number' || typeof totalShares !== 'number') {\n throw new Error('threshold and totalShares must be numbers');\n }\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (totalShares < 2)\n throw new Error('totalShares must be at least 2');\n if (threshold > totalShares) {\n throw new Error('threshold should be less than or equal to totalShares');\n }\n const poly = Polynomial_js_1.default.fromPrivateKey(this, threshold);\n const points = [];\n const usedXCoordinates = new Set();\n const curve = new Curve_js_1.default();\n /**\n * Cryptographically secure x-coordinate generation for Shamir's Secret Sharing (toKeyShares)\n *\n * - Each x-coordinate is derived using a master seed (Random(64)) as the HMAC key and a per-attempt counter array as the message.\n * - The counter array includes the share index, the attempt number (to handle rare collisions), and 32 bytes of fresh randomness for each attempt.\n * - This ensures:\n * 1. **Non-determinism**: Each split is unique, even for the same key and parameters, due to the per-attempt randomness.\n * 2. **Uniqueness**: x-coordinates are checked for zero and duplication; retry logic ensures no repeats or invalid values.\n * 3. **Cryptographic strength**: HMAC-SHA-512 is robust, and combining deterministic and random values protects against RNG compromise or bias.\n * 4. **Defensive programming**: Attempts are capped (5 per share) to prevent infinite loops in pathological cases.\n *\n * This approach is robust against all practical attacks and is suitable for high-security environments where deterministic splits are not desired.\n */\n const seed = (0, Random_js_1.default)(64);\n for (let i = 0; i < totalShares; i++) {\n let x;\n let attempts = 0;\n do {\n // To ensure no two points are ever the same, even if the system RNG is compromised,\n // we'll use a different counter value for each point and use SHA-512 HMAC.\n const counter = [i, attempts, ...(0, Random_js_1.default)(32)];\n const h = (0, Hash_js_1.sha512hmac)(seed, counter);\n x = new BigNumber_js_1.default(h).umod(curve.p);\n // repeat generation if x is zero or has already been used (insanely unlikely)\n attempts++;\n if (attempts > 5) {\n throw new Error('Failed to generate unique x coordinate after 5 attempts');\n }\n } while (x.isZero() || usedXCoordinates.has(x.toString()));\n usedXCoordinates.add(x.toString());\n const y = poly.valueAt(x);\n points.push(new Polynomial_js_1.PointInFiniteField(x, y));\n }\n const integrity = this.toPublicKey().toHash('hex').slice(0, 8);\n return new KeyShares(points, threshold, integrity);\n }\n /**\n * @method toBackupShares\n *\n * Creates a backup of the private key by splitting it into shares.\n *\n *\n * @param threshold The number of shares which will be required to reconstruct the private key.\n * @param totalShares The number of shares to generate for distribution.\n * @returns\n */\n toBackupShares(threshold, totalShares) {\n return this.toKeyShares(threshold, totalShares).toBackupFormat();\n }\n /**\n *\n * @method fromBackupShares\n *\n * Creates a private key from backup shares.\n *\n * @param shares\n * @returns PrivateKey\n *\n * @example\n *\n * const share1 = '3znuzt7DZp8HzZTfTh5MF9YQKNX3oSxTbSYmSRGrH2ev.2Nm17qoocmoAhBTCs8TEBxNXCskV9N41rB2PckcgYeqV.2.35449bb9'\n * const share2 = 'Cm5fuUc39X5xgdedao8Pr1kvCSm8Gk7Cfenc7xUKcfLX.2juyK9BxCWn2DiY5JUAgj9NsQ77cc9bWksFyW45haXZm.2.35449bb9'\n *\n * const recoveredKey = PrivateKey.fromBackupShares([share1, share2])\n */\n static fromBackupShares(shares) {\n return PrivateKey.fromKeyShares(KeyShares.fromBackupFormat(shares));\n }\n /**\n * Combines shares to reconstruct the private key.\n *\n * @param shares An array of points (shares) to be used to reconstruct the private key.\n * @param threshold The minimum number of shares required to reconstruct the private key.\n *\n * @returns The reconstructed private key.\n *\n **/\n static fromKeyShares(keyShares) {\n const { points, threshold, integrity } = keyShares;\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (points.length < threshold) {\n throw new Error(`At least ${threshold} shares are required to reconstruct the private key`);\n }\n // check to see if two points have the same x value\n for (let i = 0; i < threshold; i++) {\n for (let j = i + 1; j < threshold; j++) {\n if (points[i].x.eq(points[j].x)) {\n throw new Error('Duplicate share detected, each must be unique.');\n }\n }\n }\n const poly = new Polynomial_js_1.default(points, threshold);\n const privateKey = new PrivateKey(poly.valueAt(new BigNumber_js_1.default(0)).toArray());\n const integrityHash = privateKey.toPublicKey().toHash('hex').slice(0, 8);\n if (integrityHash !== integrity) {\n throw new Error('Integrity hash mismatch');\n }\n return privateKey;\n }\n}\nexports[\"default\"] = PrivateKey;\n//# sourceMappingURL=PrivateKey.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/cjs/src/primitives/PrivateKey.js?\n}"); +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.KeyShares = void 0;\nconst BigNumber_js_1 = __importDefault(__webpack_require__(/*! ./BigNumber.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/BigNumber.js\"));\nconst PublicKey_js_1 = __importDefault(__webpack_require__(/*! ./PublicKey.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/PublicKey.js\"));\nconst Curve_js_1 = __importDefault(__webpack_require__(/*! ./Curve.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Curve.js\"));\nconst ECDSA_js_1 = __webpack_require__(/*! ./ECDSA.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/ECDSA.js\");\nconst Hash_js_1 = __webpack_require__(/*! ./Hash.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Hash.js\");\nconst Random_js_1 = __importDefault(__webpack_require__(/*! ./Random.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Random.js\"));\nconst utils_js_1 = __webpack_require__(/*! ./utils.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/utils.js\");\nconst Polynomial_js_1 = __importStar(__webpack_require__(/*! ./Polynomial.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/Polynomial.js\"));\n/**\n * @class KeyShares\n *\n * This class is used to store the shares of a private key.\n *\n * @param shares - An array of shares\n * @param threshold - The number of shares required to recombine the private key\n *\n * @returns KeyShares\n *\n * @example\n * const key = PrivateKey.fromShares(shares)\n *\n */\nclass KeyShares {\n constructor(points, threshold, integrity) {\n this.points = points;\n this.threshold = threshold;\n this.integrity = integrity;\n }\n static fromBackupFormat(shares) {\n let threshold = 0;\n let integrity = '';\n const points = shares.map((share, idx) => {\n const shareParts = share.split('.');\n if (shareParts.length !== 4) {\n throw new Error('Invalid share format in share ' +\n idx.toString() +\n '. Expected format: \"x.y.t.i\" - received ' +\n share);\n }\n const [x, y, t, i] = shareParts;\n if (t === undefined)\n throw new Error('Threshold not found in share ' + idx.toString());\n if (i === undefined)\n throw new Error('Integrity not found in share ' + idx.toString());\n const tInt = parseInt(t);\n if (idx !== 0 && threshold !== tInt) {\n throw new Error('Threshold mismatch in share ' + idx.toString());\n }\n if (idx !== 0 && integrity !== i) {\n throw new Error('Integrity mismatch in share ' + idx.toString());\n }\n threshold = tInt;\n integrity = i;\n return Polynomial_js_1.PointInFiniteField.fromString([x, y].join('.'));\n });\n return new KeyShares(points, threshold, integrity);\n }\n toBackupFormat() {\n return this.points.map((share) => share.toString() + '.' + this.threshold.toString() + '.' + this.integrity);\n }\n}\nexports.KeyShares = KeyShares;\n/**\n * Represents a Private Key, which is a secret that can be used to generate signatures in a cryptographic system.\n *\n * The `PrivateKey` class extends from the `BigNumber` class. It offers methods to create signatures, verify them,\n * create a corresponding public key and derive a shared secret from a public key.\n *\n * @extends {BigNumber}\n * @see {@link BigNumber} for more information on BigNumber.\n */\nclass PrivateKey extends BigNumber_js_1.default {\n /**\n * Generates a private key randomly.\n *\n * @method fromRandom\n * @static\n * @returns The newly generated Private Key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n */\n static fromRandom() {\n return new PrivateKey((0, Random_js_1.default)(32));\n }\n /**\n * Generates a private key from a string.\n *\n * @method fromString\n * @static\n * @param str - The string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not valid.\n **/\n static fromString(str, base = 'hex') {\n return new PrivateKey(super.fromString(str, base).toArray());\n }\n /**\n * Generates a private key from a hexadecimal string.\n *\n * @method fromHex\n * @static\n * @param {string} str - The hexadecimal string representing the private key. The string must represent a valid private key in big-endian format.\n * @returns {PrivateKey} The generated Private Key instance.\n * @throws {Error} If the string is not a valid hexadecimal or represents an invalid private key.\n **/\n static fromHex(str) {\n return new PrivateKey(super.fromHex(str, 'big'));\n }\n /**\n * Generates a private key from a WIF (Wallet Import Format) string.\n *\n * @method fromWif\n * @static\n * @param wif - The WIF string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not a valid WIF.\n **/\n static fromWif(wif, prefixLength = 1) {\n const decoded = (0, utils_js_1.fromBase58Check)(wif, undefined, prefixLength);\n if (decoded.data.length !== 33) {\n throw new Error('Invalid WIF length');\n }\n if (decoded.data[32] !== 1) {\n throw new Error('Invalid WIF padding');\n }\n return new PrivateKey(decoded.data.slice(0, 32));\n }\n /**\n * @constructor\n *\n * @param number - The number (various types accepted) to construct a BigNumber from. Default is 0.\n *\n * @param base - The base of number provided. By default is 10. Ignored if number is BigNumber.\n *\n * @param endian - The endianness provided. By default is 'big endian'. Ignored if number is BigNumber.\n *\n * @param modN - Optional. Default 'apply. If 'apply', apply modN to input to guarantee a valid PrivateKey. If 'error', if input is out of field throw new Error('Input is out of field'). If 'nocheck', assumes input is in field.\n *\n * @example\n * import PrivateKey from './PrivateKey';\n * import BigNumber from './BigNumber';\n * const privKey = new PrivateKey(new BigNumber('123456', 10, 'be'));\n */\n constructor(number = 0, base = 10, endian = 'be', modN = 'apply') {\n if (number instanceof BigNumber_js_1.default) {\n super();\n number.copy(this);\n }\n else {\n super(number, base, endian);\n }\n if (modN !== 'nocheck') {\n const check = this.checkInField();\n if (!check.inField) {\n if (modN === 'error') {\n throw new Error('Input is out of field');\n }\n // Force the PrivateKey BigNumber value to lie in the field limited by curve.n\n BigNumber_js_1.default.move(this, check.modN);\n }\n }\n }\n /**\n * A utility function to check that the value of this PrivateKey lies in the field limited by curve.n\n * @returns { inField, modN } where modN is this PrivateKey's current BigNumber value mod curve.n, and inField is true only if modN equals current BigNumber value.\n */\n checkInField() {\n const curve = new Curve_js_1.default();\n const modN = this.mod(curve.n);\n const inField = this.cmp(modN) === 0;\n return { inField, modN };\n }\n /**\n * @returns true if the PrivateKey's current BigNumber value lies in the field limited by curve.n\n */\n isValid() {\n return this.checkInField().inField;\n }\n /**\n * Signs a message using the private key.\n *\n * @method sign\n * @param msg - The message (array of numbers or string) to be signed.\n * @param enc - If 'hex' the string will be treated as hex, utf8 otherwise.\n * @param forceLowS - If true (the default), the signature will be forced to have a low S value.\n * @param customK — If provided, uses a custom K-value for the signature. Provie a function that returns a BigNumber, or the BigNumber itself.\n * @returns A digital signature generated from the hash of the message and the private key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n */\n sign(msg, enc, forceLowS = true, customK) {\n const msgHash = new BigNumber_js_1.default((0, Hash_js_1.sha256)(msg, enc), 16);\n return (0, ECDSA_js_1.sign)(msgHash, this, forceLowS, customK);\n }\n /**\n * Verifies a message's signature using the public key associated with this private key.\n *\n * @method verify\n * @param msg - The original message which has been signed.\n * @param sig - The signature to be verified.\n * @param enc - The data encoding method.\n * @returns Whether or not the signature is valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n * const isSignatureValid = privateKey.verify('Hello, World!', signature);\n */\n verify(msg, sig, enc) {\n const msgHash = new BigNumber_js_1.default((0, Hash_js_1.sha256)(msg, enc), 16);\n return (0, ECDSA_js_1.verify)(msgHash, sig, this.toPublicKey());\n }\n /**\n * Converts the private key to its corresponding public key.\n *\n * The public key is generated by multiplying the base point G of the curve and the private key.\n *\n * @method toPublicKey\n * @returns The generated PublicKey.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n */\n toPublicKey() {\n const c = new Curve_js_1.default();\n const p = c.g.mul(this);\n return new PublicKey_js_1.default(p.x, p.y);\n }\n /**\n * Converts the private key to a Wallet Import Format (WIF) string.\n *\n * Base58Check encoding is used for encoding the private key.\n * The prefix\n *\n * @method toWif\n * @returns The WIF string.\n *\n * @param prefix defaults to [0x80] for mainnet, set it to [0xef] for testnet.\n *\n * @throws Error('Value is out of field') if current BigNumber value is out of field limited by curve.n\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const wif = privateKey.toWif();\n * const testnetWif = privateKey.toWif([0xef]);\n */\n toWif(prefix = [0x80]) {\n if (!this.isValid()) {\n throw new Error('Value is out of field');\n }\n return (0, utils_js_1.toBase58Check)([...this.toArray('be', 32), 1], prefix);\n }\n /**\n * Base58Check encodes the hash of the public key associated with this private key with a prefix to indicate locking script type.\n * Defaults to P2PKH for mainnet, otherwise known as a \"Bitcoin Address\".\n *\n * @param prefix defaults to [0x00] for mainnet, set to [0x6f] for testnet or use the strings 'testnet' or 'mainnet'\n *\n * @returns Returns the address encoding associated with the hash of the public key associated with this private key.\n *\n * @example\n * const address = privkey.toAddress()\n * const address = privkey.toAddress('mainnet')\n * const testnetAddress = privkey.toAddress([0x6f])\n * const testnetAddress = privkey.toAddress('testnet')\n */\n toAddress(prefix = [0x00]) {\n return this.toPublicKey().toAddress(prefix);\n }\n /**\n * Converts this PrivateKey to a hexadecimal string.\n *\n * @method toHex\n * @param length - The minimum length of the hex string\n * @returns Returns a string representing the hexadecimal value of this BigNumber.\n *\n * @example\n * const bigNumber = new BigNumber(255);\n * const hex = bigNumber.toHex();\n */\n toHex() {\n return super.toHex(32);\n }\n /**\n * Converts this PrivateKey to a string representation.\n *\n * @method toString\n * @param {number | 'hex'} [base='hex'] - The base for representing the number. Default is hexadecimal ('hex').\n * @param {number} [padding=64] - The minimum number of digits for the output string. Default is 64, ensuring a 256-bit representation in hexadecimal.\n * @returns {string} A string representation of the PrivateKey in the specified base, padded to the specified length.\n *\n **/\n toString(base = 'hex', padding = 64) {\n return super.toString(base, padding);\n }\n /**\n * Derives a shared secret from the public key.\n *\n * @method deriveSharedSecret\n * @param key - The public key to derive the shared secret from.\n * @returns The derived shared secret (a point on the curve).\n * @throws Will throw an error if the public key is not valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n * const sharedSecret = privateKey.deriveSharedSecret(publicKey);\n */\n deriveSharedSecret(key) {\n if (!key.validate()) {\n throw new Error('Public key not valid for ECDH secret derivation');\n }\n return key.mul(this);\n }\n /**\n * Derives a child key with BRC-42.\n * @param publicKey The public key of the other party\n * @param invoiceNumber The invoice number used to derive the child key\n * @param cacheSharedSecret Optional function to cache shared secrets\n * @param retrieveCachedSharedSecret Optional function to retrieve shared secrets from the cache\n * @returns The derived child key.\n */\n deriveChild(publicKey, invoiceNumber, cacheSharedSecret, retrieveCachedSharedSecret) {\n let sharedSecret;\n if (typeof retrieveCachedSharedSecret === 'function') {\n const retrieved = retrieveCachedSharedSecret(this, publicKey);\n if (typeof retrieved !== 'undefined') {\n sharedSecret = retrieved;\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n if (typeof cacheSharedSecret === 'function') {\n cacheSharedSecret(this, publicKey, sharedSecret);\n }\n }\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n }\n const invoiceNumberBin = (0, utils_js_1.toArray)(invoiceNumber, 'utf8');\n const hmac = (0, Hash_js_1.sha256hmac)(sharedSecret.encode(true), invoiceNumberBin);\n const curve = new Curve_js_1.default();\n return new PrivateKey(this.add(new BigNumber_js_1.default(hmac)).mod(curve.n).toArray());\n }\n /**\n * Splits the private key into shares using Shamir's Secret Sharing Scheme.\n *\n * @param threshold The minimum number of shares required to reconstruct the private key.\n * @param totalShares The total number of shares to generate.\n * @param prime The prime number to be used in Shamir's Secret Sharing Scheme.\n * @returns An array of shares.\n *\n * @example\n * const key = PrivateKey.fromRandom()\n * const shares = key.toKeyShares(2, 5)\n */\n toKeyShares(threshold, totalShares) {\n if (typeof threshold !== 'number' || typeof totalShares !== 'number') {\n throw new Error('threshold and totalShares must be numbers');\n }\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (totalShares < 2)\n throw new Error('totalShares must be at least 2');\n if (threshold > totalShares) {\n throw new Error('threshold should be less than or equal to totalShares');\n }\n const poly = Polynomial_js_1.default.fromPrivateKey(this, threshold);\n const points = [];\n for (let i = 0; i < totalShares; i++) {\n const x = new BigNumber_js_1.default(PrivateKey.fromRandom().toArray());\n const y = poly.valueAt(x);\n points.push(new Polynomial_js_1.PointInFiniteField(x, y));\n }\n const integrity = this.toPublicKey().toHash('hex').slice(0, 8);\n return new KeyShares(points, threshold, integrity);\n }\n /**\n * @method toBackupShares\n *\n * Creates a backup of the private key by splitting it into shares.\n *\n *\n * @param threshold The number of shares which will be required to reconstruct the private key.\n * @param totalShares The number of shares to generate for distribution.\n * @returns\n */\n toBackupShares(threshold, totalShares) {\n return this.toKeyShares(threshold, totalShares).toBackupFormat();\n }\n /**\n *\n * @method fromBackupShares\n *\n * Creates a private key from backup shares.\n *\n * @param shares\n * @returns PrivateKey\n *\n * @example\n *\n * const share1 = '3znuzt7DZp8HzZTfTh5MF9YQKNX3oSxTbSYmSRGrH2ev.2Nm17qoocmoAhBTCs8TEBxNXCskV9N41rB2PckcgYeqV.2.35449bb9'\n * const share2 = 'Cm5fuUc39X5xgdedao8Pr1kvCSm8Gk7Cfenc7xUKcfLX.2juyK9BxCWn2DiY5JUAgj9NsQ77cc9bWksFyW45haXZm.2.35449bb9'\n *\n * const recoveredKey = PrivateKey.fromBackupShares([share1, share2])\n */\n static fromBackupShares(shares) {\n return PrivateKey.fromKeyShares(KeyShares.fromBackupFormat(shares));\n }\n /**\n * Combines shares to reconstruct the private key.\n *\n * @param shares An array of points (shares) to be used to reconstruct the private key.\n * @param threshold The minimum number of shares required to reconstruct the private key.\n *\n * @returns The reconstructed private key.\n *\n **/\n static fromKeyShares(keyShares) {\n const { points, threshold, integrity } = keyShares;\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (points.length < threshold) {\n throw new Error(`At least ${threshold} shares are required to reconstruct the private key`);\n }\n // check to see if two points have the same x value\n for (let i = 0; i < threshold; i++) {\n for (let j = i + 1; j < threshold; j++) {\n if (points[i].x.eq(points[j].x)) {\n throw new Error('Duplicate share detected, each must be unique.');\n }\n }\n }\n const poly = new Polynomial_js_1.default(points, threshold);\n const privateKey = new PrivateKey(poly.valueAt(new BigNumber_js_1.default(0)).toArray());\n const integrityHash = privateKey.toPublicKey().toHash('hex').slice(0, 8);\n if (integrityHash !== integrity) {\n throw new Error('Integrity hash mismatch');\n }\n return privateKey;\n }\n}\nexports[\"default\"] = PrivateKey;\n//# sourceMappingURL=PrivateKey.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/cjs/src/primitives/PrivateKey.js?\n}"); /***/ }), @@ -759,7 +770,7 @@ /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nconst OP_js_1 = __importDefault(__webpack_require__(/*! ./OP.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/script/OP.js\"));\nconst utils_js_1 = __webpack_require__(/*! ../primitives/utils.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/utils.js\");\nconst BigNumber_js_1 = __importDefault(__webpack_require__(/*! ../primitives/BigNumber.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/BigNumber.js\"));\n/**\n * The Script class represents a script in a Bitcoin SV transaction,\n * encapsulating the functionality to construct, parse, and serialize\n * scripts used in both locking (output) and unlocking (input) scripts.\n *\n * @property {ScriptChunk[]} chunks - An array of script chunks that make up the script.\n */\nclass Script {\n /**\n * @method fromASM\n * Static method to construct a Script instance from an ASM (Assembly) formatted string.\n * @param asm - The script in ASM string format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromASM(\"OP_DUP OP_HASH160 abcd... OP_EQUALVERIFY OP_CHECKSIG\")\n */\n static fromASM(asm) {\n const chunks = [];\n const tokens = asm.split(' ');\n let i = 0;\n while (i < tokens.length) {\n const token = tokens[i];\n let opCode;\n let opCodeNum = 0;\n if (token.startsWith('OP_') && typeof OP_js_1.default[token] !== 'undefined') {\n opCode = token;\n opCodeNum = OP_js_1.default[token];\n }\n // we start with two special cases, 0 and -1, which are handled specially in\n // toASM. see _chunkToString.\n if (token === '0') {\n opCodeNum = 0;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (token === '-1') {\n opCodeNum = OP_js_1.default.OP_1NEGATE;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCode === undefined) {\n let hex = tokens[i];\n if (hex.length % 2 !== 0) {\n hex = '0' + hex;\n }\n const arr = (0, utils_js_1.toArray)(hex, 'hex');\n if ((0, utils_js_1.encode)(arr, 'hex') !== hex) {\n throw new Error('invalid hex string in script');\n }\n const len = arr.length;\n if (len >= 0 && len < OP_js_1.default.OP_PUSHDATA1) {\n opCodeNum = len;\n }\n else if (len < Math.pow(2, 8)) {\n opCodeNum = OP_js_1.default.OP_PUSHDATA1;\n }\n else if (len < Math.pow(2, 16)) {\n opCodeNum = OP_js_1.default.OP_PUSHDATA2;\n }\n else if (len < Math.pow(2, 32)) {\n opCodeNum = OP_js_1.default.OP_PUSHDATA4;\n }\n chunks.push({\n data: arr,\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCodeNum === OP_js_1.default.OP_PUSHDATA1 ||\n opCodeNum === OP_js_1.default.OP_PUSHDATA2 ||\n opCodeNum === OP_js_1.default.OP_PUSHDATA4) {\n chunks.push({\n data: (0, utils_js_1.toArray)(tokens[i + 2], 'hex'),\n op: opCodeNum\n });\n i = i + 3;\n }\n else {\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n }\n return new Script(chunks);\n }\n /**\n * @method fromHex\n * Static method to construct a Script instance from a hexadecimal string.\n * @param hex - The script in hexadecimal format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromHex(\"76a9...\");\n */\n static fromHex(hex) {\n if (hex.length === 0)\n return Script.fromBinary([]);\n if (hex.length % 2 !== 0) {\n throw new Error('There is an uneven number of characters in the string which suggests it is not hex encoded.');\n }\n if (!/^[0-9a-fA-F]+$/.test(hex)) {\n throw new Error('Some elements in this string are not hex encoded.');\n }\n return Script.fromBinary((0, utils_js_1.toArray)(hex, 'hex'));\n }\n /**\n * @method fromBinary\n * Static method to construct a Script instance from a binary array.\n * @param bin - The script in binary array format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromBinary([0x76, 0xa9, ...])\n */\n static fromBinary(bin) {\n bin = [...bin];\n const chunks = [];\n let inConditionalBlock = 0;\n const br = new utils_js_1.Reader(bin);\n while (!br.eof()) {\n const op = br.readUInt8();\n // if OP_RETURN and not in a conditional block, do not parse the rest of the data,\n // rather just return the last chunk as data without prefixing with data length.\n if (op === OP_js_1.default.OP_RETURN && inConditionalBlock === 0) {\n chunks.push({\n op,\n data: br.read()\n });\n break;\n }\n if (op === OP_js_1.default.OP_IF || op === OP_js_1.default.OP_NOTIF || op === OP_js_1.default.OP_VERIF || op === OP_js_1.default.OP_VERNOTIF) {\n inConditionalBlock++;\n }\n else if (op === OP_js_1.default.OP_ENDIF) {\n inConditionalBlock--;\n }\n let len = 0;\n // eslint-disable-next-line @typescript-eslint/no-shadow\n let data = [];\n if (op > 0 && op < OP_js_1.default.OP_PUSHDATA1) {\n len = op;\n chunks.push({\n data: br.read(len),\n op\n });\n }\n else if (op === OP_js_1.default.OP_PUSHDATA1) {\n try {\n len = br.readUInt8();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === OP_js_1.default.OP_PUSHDATA2) {\n try {\n len = br.readUInt16LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === OP_js_1.default.OP_PUSHDATA4) {\n try {\n len = br.readUInt32LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else {\n chunks.push({\n op\n });\n }\n }\n return new Script(chunks);\n }\n /**\n * @constructor\n * Constructs a new Script object.\n * @param chunks=[] - An array of script chunks to directly initialize the script.\n */\n constructor(chunks = []) {\n this.chunks = chunks;\n }\n /**\n * @method toASM\n * Serializes the script to an ASM formatted string.\n * @returns The script in ASM string format.\n */\n toASM() {\n let str = '';\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n str += this._chunkToString(chunk);\n }\n return str.slice(1);\n }\n /**\n * @method toHex\n * Serializes the script to a hexadecimal string.\n * @returns The script in hexadecimal format.\n */\n toHex() {\n return (0, utils_js_1.encode)(this.toBinary(), 'hex');\n }\n /**\n * @method toBinary\n * Serializes the script to a binary array.\n * @returns The script in binary array format.\n */\n toBinary() {\n const writer = new utils_js_1.Writer();\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const op = chunk.op;\n writer.writeUInt8(op);\n if (op === OP_js_1.default.OP_RETURN && chunk.data != null) { // special case for unformatted data\n writer.write(chunk.data);\n break;\n }\n else if (chunk.data != null) {\n if (op < OP_js_1.default.OP_PUSHDATA1) {\n writer.write(chunk.data);\n }\n else if (op === OP_js_1.default.OP_PUSHDATA1) {\n writer.writeUInt8(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === OP_js_1.default.OP_PUSHDATA2) {\n writer.writeUInt16LE(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === OP_js_1.default.OP_PUSHDATA4) {\n writer.writeUInt32LE(chunk.data.length);\n writer.write(chunk.data);\n }\n }\n }\n return writer.toArray();\n }\n /**\n * @method writeScript\n * Appends another script to this script.\n * @param script - The script to append.\n * @returns This script instance for chaining.\n */\n writeScript(script) {\n this.chunks = this.chunks.concat(script.chunks);\n return this;\n }\n /**\n * @method writeOpCode\n * Appends an opcode to the script.\n * @param op - The opcode to append.\n * @returns This script instance for chaining.\n */\n writeOpCode(op) {\n this.chunks.push({ op });\n return this;\n }\n /**\n * @method setChunkOpCode\n * Sets the opcode of a specific chunk in the script.\n * @param i - The index of the chunk.\n * @param op - The opcode to set.\n * @returns This script instance for chaining.\n */\n setChunkOpCode(i, op) {\n this.chunks[i] = { op };\n return this;\n }\n /**\n * @method writeBn\n * Appends a BigNumber to the script as an opcode.\n * @param bn - The BigNumber to append.\n * @returns This script instance for chaining.\n */\n writeBn(bn) {\n if (bn.cmpn(0) === OP_js_1.default.OP_0) {\n this.chunks.push({\n op: OP_js_1.default.OP_0\n });\n }\n else if (bn.cmpn(-1) === 0) {\n this.chunks.push({\n op: OP_js_1.default.OP_1NEGATE\n });\n }\n else if (bn.cmpn(1) >= 0 && bn.cmpn(16) <= 0) {\n // see OP_1 - OP_16\n this.chunks.push({\n op: bn.toNumber() + OP_js_1.default.OP_1 - 1\n });\n }\n else {\n const buf = bn.toSm('little');\n this.writeBin(buf);\n }\n return this;\n }\n /**\n * @method writeBin\n * Appends binary data to the script, determining the appropriate opcode based on length.\n * @param bin - The binary data to append.\n * @returns This script instance for chaining.\n * @throws {Error} Throws an error if the data is too large to be pushed.\n */\n writeBin(bin) {\n let op;\n const data = bin.length > 0 ? bin : undefined;\n if (bin.length > 0 && bin.length < OP_js_1.default.OP_PUSHDATA1) {\n op = bin.length;\n }\n else if (bin.length === 0) {\n op = OP_js_1.default.OP_0;\n }\n else if (bin.length < Math.pow(2, 8)) {\n op = OP_js_1.default.OP_PUSHDATA1;\n }\n else if (bin.length < Math.pow(2, 16)) {\n op = OP_js_1.default.OP_PUSHDATA2;\n }\n else if (bin.length < Math.pow(2, 32)) {\n op = OP_js_1.default.OP_PUSHDATA4;\n }\n else {\n throw new Error(\"You can't push that much data\");\n }\n this.chunks.push({\n data,\n op\n });\n return this;\n }\n /**\n * @method writeNumber\n * Appends a number to the script.\n * @param num - The number to append.\n * @returns This script instance for chaining.\n */\n writeNumber(num) {\n this.writeBn(new BigNumber_js_1.default(num));\n return this;\n }\n /**\n * @method removeCodeseparators\n * Removes all OP_CODESEPARATOR opcodes from the script.\n * @returns This script instance for chaining.\n */\n removeCodeseparators() {\n const chunks = [];\n for (let i = 0; i < this.chunks.length; i++) {\n if (this.chunks[i].op !== OP_js_1.default.OP_CODESEPARATOR) {\n chunks.push(this.chunks[i]);\n }\n }\n this.chunks = chunks;\n return this;\n }\n /**\n * Deletes the given item wherever it appears in the current script.\n *\n * @param script - The script containing the item to delete from the current script.\n *\n * @returns This script instance for chaining.\n */\n findAndDelete(script) {\n const buf = script.toHex();\n for (let i = 0; i < this.chunks.length; i++) {\n const script2 = new Script([this.chunks[i]]);\n const buf2 = script2.toHex();\n if (buf === buf2) {\n this.chunks.splice(i, 1);\n }\n }\n return this;\n }\n /**\n * @method isPushOnly\n * Checks if the script contains only push data operations.\n * @returns True if the script is push-only, otherwise false.\n */\n isPushOnly() {\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const opCodeNum = chunk.op;\n if (opCodeNum > OP_js_1.default.OP_16) {\n return false;\n }\n }\n return true;\n }\n /**\n * @method isLockingScript\n * Determines if the script is a locking script.\n * @returns True if the script is a locking script, otherwise false.\n */\n isLockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @method isUnlockingScript\n * Determines if the script is an unlocking script.\n * @returns True if the script is an unlocking script, otherwise false.\n */\n isUnlockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @private\n * @method _chunkToString\n * Converts a script chunk to its string representation.\n * @param chunk - The script chunk.\n * @returns The string representation of the chunk.\n */\n _chunkToString(chunk) {\n const op = chunk.op;\n let str = '';\n if (typeof chunk.data === 'undefined') {\n const val = OP_js_1.default[op];\n str = `${str} ${val}`;\n }\n else {\n str = `${str} ${(0, utils_js_1.toHex)(chunk.data)}`;\n }\n return str;\n }\n}\nexports[\"default\"] = Script;\n//# sourceMappingURL=Script.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/cjs/src/script/Script.js?\n}"); +eval("{\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nconst OP_js_1 = __importDefault(__webpack_require__(/*! ./OP.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/script/OP.js\"));\nconst utils_js_1 = __webpack_require__(/*! ../primitives/utils.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/utils.js\");\nconst BigNumber_js_1 = __importDefault(__webpack_require__(/*! ../primitives/BigNumber.js */ \"./node_modules/@bsv/sdk/dist/cjs/src/primitives/BigNumber.js\"));\n/**\n * The Script class represents a script in a Bitcoin SV transaction,\n * encapsulating the functionality to construct, parse, and serialize\n * scripts used in both locking (output) and unlocking (input) scripts.\n *\n * @property {ScriptChunk[]} chunks - An array of script chunks that make up the script.\n */\nclass Script {\n /**\n * @method fromASM\n * Static method to construct a Script instance from an ASM (Assembly) formatted string.\n * @param asm - The script in ASM string format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromASM(\"OP_DUP OP_HASH160 abcd... OP_EQUALVERIFY OP_CHECKSIG\")\n */\n static fromASM(asm) {\n const chunks = [];\n const tokens = asm.split(' ');\n let i = 0;\n while (i < tokens.length) {\n const token = tokens[i];\n let opCode;\n let opCodeNum = 0;\n if (token.startsWith('OP_') && typeof OP_js_1.default[token] !== 'undefined') {\n opCode = token;\n opCodeNum = OP_js_1.default[token];\n }\n // we start with two special cases, 0 and -1, which are handled specially in\n // toASM. see _chunkToString.\n if (token === '0') {\n opCodeNum = 0;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (token === '-1') {\n opCodeNum = OP_js_1.default.OP_1NEGATE;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCode === undefined) {\n let hex = tokens[i];\n if (hex.length % 2 !== 0) {\n hex = '0' + hex;\n }\n const arr = (0, utils_js_1.toArray)(hex, 'hex');\n if ((0, utils_js_1.encode)(arr, 'hex') !== hex) {\n throw new Error('invalid hex string in script');\n }\n const len = arr.length;\n if (len >= 0 && len < OP_js_1.default.OP_PUSHDATA1) {\n opCodeNum = len;\n }\n else if (len < Math.pow(2, 8)) {\n opCodeNum = OP_js_1.default.OP_PUSHDATA1;\n }\n else if (len < Math.pow(2, 16)) {\n opCodeNum = OP_js_1.default.OP_PUSHDATA2;\n }\n else if (len < Math.pow(2, 32)) {\n opCodeNum = OP_js_1.default.OP_PUSHDATA4;\n }\n chunks.push({\n data: arr,\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCodeNum === OP_js_1.default.OP_PUSHDATA1 ||\n opCodeNum === OP_js_1.default.OP_PUSHDATA2 ||\n opCodeNum === OP_js_1.default.OP_PUSHDATA4) {\n chunks.push({\n data: (0, utils_js_1.toArray)(tokens[i + 2], 'hex'),\n op: opCodeNum\n });\n i = i + 3;\n }\n else {\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n }\n return new Script(chunks);\n }\n /**\n * @method fromHex\n * Static method to construct a Script instance from a hexadecimal string.\n * @param hex - The script in hexadecimal format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromHex(\"76a9...\");\n */\n static fromHex(hex) {\n if (hex.length === 0)\n return Script.fromBinary([]);\n if (hex.length % 2 !== 0) {\n throw new Error('There is an uneven number of characters in the string which suggests it is not hex encoded.');\n }\n if (!/^[0-9a-fA-F]+$/.test(hex)) {\n throw new Error('Some elements in this string are not hex encoded.');\n }\n return Script.fromBinary((0, utils_js_1.toArray)(hex, 'hex'));\n }\n /**\n * @method fromBinary\n * Static method to construct a Script instance from a binary array.\n * @param bin - The script in binary array format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromBinary([0x76, 0xa9, ...])\n */\n static fromBinary(bin) {\n bin = [...bin];\n const chunks = [];\n let inConditionalBlock = 0;\n const br = new utils_js_1.Reader(bin);\n while (!br.eof()) {\n const op = br.readUInt8();\n // if OP_RETURN and not in a conditional block, do not parse the rest of the data,\n // rather just return the last chunk as data without prefixing with data length.\n if (op === OP_js_1.default.OP_RETURN && inConditionalBlock === 0) {\n chunks.push({\n op,\n data: br.read()\n });\n break;\n }\n if (op === OP_js_1.default.OP_IF || op === OP_js_1.default.OP_NOTIF || op === OP_js_1.default.OP_VERIF || op === OP_js_1.default.OP_VERNOTIF) {\n inConditionalBlock++;\n }\n else if (op === OP_js_1.default.OP_ENDIF) {\n inConditionalBlock--;\n }\n let len = 0;\n // eslint-disable-next-line @typescript-eslint/no-shadow\n let data = [];\n if (op > 0 && op < OP_js_1.default.OP_PUSHDATA1) {\n len = op;\n chunks.push({\n data: br.read(len),\n op\n });\n }\n else if (op === OP_js_1.default.OP_PUSHDATA1) {\n try {\n len = br.readUInt8();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === OP_js_1.default.OP_PUSHDATA2) {\n try {\n len = br.readUInt16LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === OP_js_1.default.OP_PUSHDATA4) {\n try {\n len = br.readUInt32LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else {\n chunks.push({\n op\n });\n }\n }\n return new Script(chunks);\n }\n /**\n * @constructor\n * Constructs a new Script object.\n * @param chunks=[] - An array of script chunks to directly initialize the script.\n */\n constructor(chunks = []) {\n this.chunks = chunks;\n }\n /**\n * @method toASM\n * Serializes the script to an ASM formatted string.\n * @returns The script in ASM string format.\n */\n toASM() {\n let str = '';\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n str += this._chunkToString(chunk);\n }\n return str.slice(1);\n }\n /**\n * @method toHex\n * Serializes the script to a hexadecimal string.\n * @returns The script in hexadecimal format.\n */\n toHex() {\n return (0, utils_js_1.encode)(this.toBinary(), 'hex');\n }\n /**\n * @method toBinary\n * Serializes the script to a binary array.\n * @returns The script in binary array format.\n */\n toBinary() {\n const writer = new utils_js_1.Writer();\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const op = chunk.op;\n writer.writeUInt8(op);\n if (op === OP_js_1.default.OP_RETURN && chunk.data != null) { // special case for unformatted data\n writer.write(chunk.data);\n break;\n }\n else if (chunk.data != null) {\n if (op < OP_js_1.default.OP_PUSHDATA1) {\n writer.write(chunk.data);\n }\n else if (op === OP_js_1.default.OP_PUSHDATA1) {\n writer.writeUInt8(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === OP_js_1.default.OP_PUSHDATA2) {\n writer.writeUInt16LE(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === OP_js_1.default.OP_PUSHDATA4) {\n writer.writeUInt32LE(chunk.data.length);\n writer.write(chunk.data);\n }\n }\n }\n return writer.toArray();\n }\n /**\n * @method writeScript\n * Appends another script to this script.\n * @param script - The script to append.\n * @returns This script instance for chaining.\n */\n writeScript(script) {\n this.chunks = this.chunks.concat(script.chunks);\n return this;\n }\n /**\n * @method writeOpCode\n * Appends an opcode to the script.\n * @param op - The opcode to append.\n * @returns This script instance for chaining.\n */\n writeOpCode(op) {\n this.chunks.push({ op });\n return this;\n }\n /**\n * @method setChunkOpCode\n * Sets the opcode of a specific chunk in the script.\n * @param i - The index of the chunk.\n * @param op - The opcode to set.\n * @returns This script instance for chaining.\n */\n setChunkOpCode(i, op) {\n this.chunks[i] = { op };\n return this;\n }\n /**\n * @method writeBn\n * Appends a BigNumber to the script as an opcode.\n * @param bn - The BigNumber to append.\n * @returns This script instance for chaining.\n */\n writeBn(bn) {\n if (bn.cmpn(0) === OP_js_1.default.OP_0) {\n this.chunks.push({\n op: OP_js_1.default.OP_0\n });\n }\n else if (bn.cmpn(-1) === 0) {\n this.chunks.push({\n op: OP_js_1.default.OP_1NEGATE\n });\n }\n else if (bn.cmpn(1) >= 0 && bn.cmpn(16) <= 0) {\n // see OP_1 - OP_16\n this.chunks.push({\n op: bn.toNumber() + OP_js_1.default.OP_1 - 1\n });\n }\n else {\n const buf = bn.toSm('little');\n this.writeBin(buf);\n }\n return this;\n }\n /**\n * @method writeBin\n * Appends binary data to the script, determining the appropriate opcode based on length.\n * @param bin - The binary data to append.\n * @returns This script instance for chaining.\n * @throws {Error} Throws an error if the data is too large to be pushed.\n */\n writeBin(bin) {\n let op;\n if (bin.length > 0 && bin.length < OP_js_1.default.OP_PUSHDATA1) {\n op = bin.length;\n }\n else if (bin.length === 0) {\n op = OP_js_1.default.OP_0;\n }\n else if (bin.length < Math.pow(2, 8)) {\n op = OP_js_1.default.OP_PUSHDATA1;\n }\n else if (bin.length < Math.pow(2, 16)) {\n op = OP_js_1.default.OP_PUSHDATA2;\n }\n else if (bin.length < Math.pow(2, 32)) {\n op = OP_js_1.default.OP_PUSHDATA4;\n }\n else {\n throw new Error(\"You can't push that much data\");\n }\n this.chunks.push({\n data: bin,\n op\n });\n return this;\n }\n /**\n * @method writeNumber\n * Appends a number to the script.\n * @param num - The number to append.\n * @returns This script instance for chaining.\n */\n writeNumber(num) {\n this.writeBn(new BigNumber_js_1.default(num));\n return this;\n }\n /**\n * @method removeCodeseparators\n * Removes all OP_CODESEPARATOR opcodes from the script.\n * @returns This script instance for chaining.\n */\n removeCodeseparators() {\n const chunks = [];\n for (let i = 0; i < this.chunks.length; i++) {\n if (this.chunks[i].op !== OP_js_1.default.OP_CODESEPARATOR) {\n chunks.push(this.chunks[i]);\n }\n }\n this.chunks = chunks;\n return this;\n }\n /**\n * Deletes the given item wherever it appears in the current script.\n *\n * @param script - The script containing the item to delete from the current script.\n *\n * @returns This script instance for chaining.\n */\n findAndDelete(script) {\n const buf = script.toHex();\n for (let i = 0; i < this.chunks.length; i++) {\n const script2 = new Script([this.chunks[i]]);\n const buf2 = script2.toHex();\n if (buf === buf2) {\n this.chunks.splice(i, 1);\n }\n }\n return this;\n }\n /**\n * @method isPushOnly\n * Checks if the script contains only push data operations.\n * @returns True if the script is push-only, otherwise false.\n */\n isPushOnly() {\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const opCodeNum = chunk.op;\n if (opCodeNum > OP_js_1.default.OP_16) {\n return false;\n }\n }\n return true;\n }\n /**\n * @method isLockingScript\n * Determines if the script is a locking script.\n * @returns True if the script is a locking script, otherwise false.\n */\n isLockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @method isUnlockingScript\n * Determines if the script is an unlocking script.\n * @returns True if the script is an unlocking script, otherwise false.\n */\n isUnlockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @private\n * @method _chunkToString\n * Converts a script chunk to its string representation.\n * @param chunk - The script chunk.\n * @returns The string representation of the chunk.\n */\n _chunkToString(chunk) {\n const op = chunk.op;\n let str = '';\n if (typeof chunk.data === 'undefined') {\n const val = OP_js_1.default[op];\n str = `${str} ${val}`;\n }\n else {\n str = `${str} ${(0, utils_js_1.toHex)(chunk.data)}`;\n }\n return str;\n }\n}\nexports[\"default\"] = Script;\n//# sourceMappingURL=Script.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/cjs/src/script/Script.js?\n}"); /***/ }), @@ -1947,7 +1958,7 @@ /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { "use strict"; -eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ KeyShares: () => (/* binding */ KeyShares),\n/* harmony export */ \"default\": () => (/* binding */ PrivateKey)\n/* harmony export */ });\n/* harmony import */ var _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./BigNumber.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/BigNumber.js\");\n/* harmony import */ var _PublicKey_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./PublicKey.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/PublicKey.js\");\n/* harmony import */ var _Curve_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./Curve.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Curve.js\");\n/* harmony import */ var _ECDSA_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./ECDSA.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/ECDSA.js\");\n/* harmony import */ var _Hash_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./Hash.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Hash.js\");\n/* harmony import */ var _Random_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./Random.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Random.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./utils.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/utils.js\");\n/* harmony import */ var _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./Polynomial.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Polynomial.js\");\n\n\n\n\n\n\n\n\n/**\n * @class KeyShares\n *\n * This class is used to store the shares of a private key.\n *\n * @param shares - An array of shares\n * @param threshold - The number of shares required to recombine the private key\n *\n * @returns KeyShares\n *\n * @example\n * const key = PrivateKey.fromShares(shares)\n *\n */\nclass KeyShares {\n points;\n threshold;\n integrity;\n constructor(points, threshold, integrity) {\n this.points = points;\n this.threshold = threshold;\n this.integrity = integrity;\n }\n static fromBackupFormat(shares) {\n let threshold = 0;\n let integrity = '';\n const points = shares.map((share, idx) => {\n const shareParts = share.split('.');\n if (shareParts.length !== 4) {\n throw new Error('Invalid share format in share ' +\n idx.toString() +\n '. Expected format: \"x.y.t.i\" - received ' +\n share);\n }\n const [x, y, t, i] = shareParts;\n if (t === undefined)\n throw new Error('Threshold not found in share ' + idx.toString());\n if (i === undefined)\n throw new Error('Integrity not found in share ' + idx.toString());\n const tInt = parseInt(t);\n if (idx !== 0 && threshold !== tInt) {\n throw new Error('Threshold mismatch in share ' + idx.toString());\n }\n if (idx !== 0 && integrity !== i) {\n throw new Error('Integrity mismatch in share ' + idx.toString());\n }\n threshold = tInt;\n integrity = i;\n return _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__.PointInFiniteField.fromString([x, y].join('.'));\n });\n return new KeyShares(points, threshold, integrity);\n }\n toBackupFormat() {\n return this.points.map((share) => share.toString() + '.' + this.threshold.toString() + '.' + this.integrity);\n }\n}\n/**\n * Represents a Private Key, which is a secret that can be used to generate signatures in a cryptographic system.\n *\n * The `PrivateKey` class extends from the `BigNumber` class. It offers methods to create signatures, verify them,\n * create a corresponding public key and derive a shared secret from a public key.\n *\n * @extends {BigNumber}\n * @see {@link BigNumber} for more information on BigNumber.\n */\nclass PrivateKey extends _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n /**\n * Generates a private key randomly.\n *\n * @method fromRandom\n * @static\n * @returns The newly generated Private Key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n */\n static fromRandom() {\n return new PrivateKey((0,_Random_js__WEBPACK_IMPORTED_MODULE_5__[\"default\"])(32));\n }\n /**\n * Generates a private key from a string.\n *\n * @method fromString\n * @static\n * @param str - The string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not valid.\n **/\n static fromString(str, base = 'hex') {\n return new PrivateKey(super.fromString(str, base).toArray());\n }\n /**\n * Generates a private key from a hexadecimal string.\n *\n * @method fromHex\n * @static\n * @param {string} str - The hexadecimal string representing the private key. The string must represent a valid private key in big-endian format.\n * @returns {PrivateKey} The generated Private Key instance.\n * @throws {Error} If the string is not a valid hexadecimal or represents an invalid private key.\n **/\n static fromHex(str) {\n return new PrivateKey(super.fromHex(str, 'big'));\n }\n /**\n * Generates a private key from a WIF (Wallet Import Format) string.\n *\n * @method fromWif\n * @static\n * @param wif - The WIF string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not a valid WIF.\n **/\n static fromWif(wif, prefixLength = 1) {\n const decoded = (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.fromBase58Check)(wif, undefined, prefixLength);\n if (decoded.data.length !== 33) {\n throw new Error('Invalid WIF length');\n }\n if (decoded.data[32] !== 1) {\n throw new Error('Invalid WIF padding');\n }\n return new PrivateKey(decoded.data.slice(0, 32));\n }\n /**\n * @constructor\n *\n * @param number - The number (various types accepted) to construct a BigNumber from. Default is 0.\n *\n * @param base - The base of number provided. By default is 10. Ignored if number is BigNumber.\n *\n * @param endian - The endianness provided. By default is 'big endian'. Ignored if number is BigNumber.\n *\n * @param modN - Optional. Default 'apply. If 'apply', apply modN to input to guarantee a valid PrivateKey. If 'error', if input is out of field throw new Error('Input is out of field'). If 'nocheck', assumes input is in field.\n *\n * @example\n * import PrivateKey from './PrivateKey';\n * import BigNumber from './BigNumber';\n * const privKey = new PrivateKey(new BigNumber('123456', 10, 'be'));\n */\n constructor(number = 0, base = 10, endian = 'be', modN = 'apply') {\n if (number instanceof _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"]) {\n super();\n number.copy(this);\n }\n else {\n super(number, base, endian);\n }\n if (modN !== 'nocheck') {\n const check = this.checkInField();\n if (!check.inField) {\n if (modN === 'error') {\n throw new Error('Input is out of field');\n }\n // Force the PrivateKey BigNumber value to lie in the field limited by curve.n\n _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].move(this, check.modN);\n }\n }\n }\n /**\n * A utility function to check that the value of this PrivateKey lies in the field limited by curve.n\n * @returns { inField, modN } where modN is this PrivateKey's current BigNumber value mod curve.n, and inField is true only if modN equals current BigNumber value.\n */\n checkInField() {\n const curve = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n const modN = this.mod(curve.n);\n const inField = this.cmp(modN) === 0;\n return { inField, modN };\n }\n /**\n * @returns true if the PrivateKey's current BigNumber value lies in the field limited by curve.n\n */\n isValid() {\n return this.checkInField().inField;\n }\n /**\n * Signs a message using the private key.\n *\n * @method sign\n * @param msg - The message (array of numbers or string) to be signed.\n * @param enc - If 'hex' the string will be treated as hex, utf8 otherwise.\n * @param forceLowS - If true (the default), the signature will be forced to have a low S value.\n * @param customK — If provided, uses a custom K-value for the signature. Provie a function that returns a BigNumber, or the BigNumber itself.\n * @returns A digital signature generated from the hash of the message and the private key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n */\n sign(msg, enc, forceLowS = true, customK) {\n const msgHash = new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"]((0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha256)(msg, enc), 16);\n return (0,_ECDSA_js__WEBPACK_IMPORTED_MODULE_3__.sign)(msgHash, this, forceLowS, customK);\n }\n /**\n * Verifies a message's signature using the public key associated with this private key.\n *\n * @method verify\n * @param msg - The original message which has been signed.\n * @param sig - The signature to be verified.\n * @param enc - The data encoding method.\n * @returns Whether or not the signature is valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n * const isSignatureValid = privateKey.verify('Hello, World!', signature);\n */\n verify(msg, sig, enc) {\n const msgHash = new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"]((0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha256)(msg, enc), 16);\n return (0,_ECDSA_js__WEBPACK_IMPORTED_MODULE_3__.verify)(msgHash, sig, this.toPublicKey());\n }\n /**\n * Converts the private key to its corresponding public key.\n *\n * The public key is generated by multiplying the base point G of the curve and the private key.\n *\n * @method toPublicKey\n * @returns The generated PublicKey.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n */\n toPublicKey() {\n const c = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n const p = c.g.mul(this);\n return new _PublicKey_js__WEBPACK_IMPORTED_MODULE_1__[\"default\"](p.x, p.y);\n }\n /**\n * Converts the private key to a Wallet Import Format (WIF) string.\n *\n * Base58Check encoding is used for encoding the private key.\n * The prefix\n *\n * @method toWif\n * @returns The WIF string.\n *\n * @param prefix defaults to [0x80] for mainnet, set it to [0xef] for testnet.\n *\n * @throws Error('Value is out of field') if current BigNumber value is out of field limited by curve.n\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const wif = privateKey.toWif();\n * const testnetWif = privateKey.toWif([0xef]);\n */\n toWif(prefix = [0x80]) {\n if (!this.isValid()) {\n throw new Error('Value is out of field');\n }\n return (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.toBase58Check)([...this.toArray('be', 32), 1], prefix);\n }\n /**\n * Base58Check encodes the hash of the public key associated with this private key with a prefix to indicate locking script type.\n * Defaults to P2PKH for mainnet, otherwise known as a \"Bitcoin Address\".\n *\n * @param prefix defaults to [0x00] for mainnet, set to [0x6f] for testnet or use the strings 'testnet' or 'mainnet'\n *\n * @returns Returns the address encoding associated with the hash of the public key associated with this private key.\n *\n * @example\n * const address = privkey.toAddress()\n * const address = privkey.toAddress('mainnet')\n * const testnetAddress = privkey.toAddress([0x6f])\n * const testnetAddress = privkey.toAddress('testnet')\n */\n toAddress(prefix = [0x00]) {\n return this.toPublicKey().toAddress(prefix);\n }\n /**\n * Converts this PrivateKey to a hexadecimal string.\n *\n * @method toHex\n * @param length - The minimum length of the hex string\n * @returns Returns a string representing the hexadecimal value of this BigNumber.\n *\n * @example\n * const bigNumber = new BigNumber(255);\n * const hex = bigNumber.toHex();\n */\n toHex() {\n return super.toHex(32);\n }\n /**\n * Converts this PrivateKey to a string representation.\n *\n * @method toString\n * @param {number | 'hex'} [base='hex'] - The base for representing the number. Default is hexadecimal ('hex').\n * @param {number} [padding=64] - The minimum number of digits for the output string. Default is 64, ensuring a 256-bit representation in hexadecimal.\n * @returns {string} A string representation of the PrivateKey in the specified base, padded to the specified length.\n *\n **/\n toString(base = 'hex', padding = 64) {\n return super.toString(base, padding);\n }\n /**\n * Derives a shared secret from the public key.\n *\n * @method deriveSharedSecret\n * @param key - The public key to derive the shared secret from.\n * @returns The derived shared secret (a point on the curve).\n * @throws Will throw an error if the public key is not valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n * const sharedSecret = privateKey.deriveSharedSecret(publicKey);\n */\n deriveSharedSecret(key) {\n if (!key.validate()) {\n throw new Error('Public key not valid for ECDH secret derivation');\n }\n return key.mul(this);\n }\n /**\n * Derives a child key with BRC-42.\n * @param publicKey The public key of the other party\n * @param invoiceNumber The invoice number used to derive the child key\n * @param cacheSharedSecret Optional function to cache shared secrets\n * @param retrieveCachedSharedSecret Optional function to retrieve shared secrets from the cache\n * @returns The derived child key.\n */\n deriveChild(publicKey, invoiceNumber, cacheSharedSecret, retrieveCachedSharedSecret) {\n let sharedSecret;\n if (typeof retrieveCachedSharedSecret === 'function') {\n const retrieved = retrieveCachedSharedSecret(this, publicKey);\n if (typeof retrieved !== 'undefined') {\n sharedSecret = retrieved;\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n if (typeof cacheSharedSecret === 'function') {\n cacheSharedSecret(this, publicKey, sharedSecret);\n }\n }\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n }\n const invoiceNumberBin = (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.toArray)(invoiceNumber, 'utf8');\n const hmac = (0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha256hmac)(sharedSecret.encode(true), invoiceNumberBin);\n const curve = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n return new PrivateKey(this.add(new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"](hmac)).mod(curve.n).toArray());\n }\n /**\n * Splits the private key into shares using Shamir's Secret Sharing Scheme.\n *\n * @param threshold The minimum number of shares required to reconstruct the private key.\n * @param totalShares The total number of shares to generate.\n * @param prime The prime number to be used in Shamir's Secret Sharing Scheme.\n * @returns An array of shares.\n *\n * @example\n * const key = PrivateKey.fromRandom()\n * const shares = key.toKeyShares(2, 5)\n */\n toKeyShares(threshold, totalShares) {\n if (typeof threshold !== 'number' || typeof totalShares !== 'number') {\n throw new Error('threshold and totalShares must be numbers');\n }\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (totalShares < 2)\n throw new Error('totalShares must be at least 2');\n if (threshold > totalShares) {\n throw new Error('threshold should be less than or equal to totalShares');\n }\n const poly = _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__[\"default\"].fromPrivateKey(this, threshold);\n const points = [];\n const usedXCoordinates = new Set();\n const curve = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n /**\n * Cryptographically secure x-coordinate generation for Shamir's Secret Sharing (toKeyShares)\n *\n * - Each x-coordinate is derived using a master seed (Random(64)) as the HMAC key and a per-attempt counter array as the message.\n * - The counter array includes the share index, the attempt number (to handle rare collisions), and 32 bytes of fresh randomness for each attempt.\n * - This ensures:\n * 1. **Non-determinism**: Each split is unique, even for the same key and parameters, due to the per-attempt randomness.\n * 2. **Uniqueness**: x-coordinates are checked for zero and duplication; retry logic ensures no repeats or invalid values.\n * 3. **Cryptographic strength**: HMAC-SHA-512 is robust, and combining deterministic and random values protects against RNG compromise or bias.\n * 4. **Defensive programming**: Attempts are capped (5 per share) to prevent infinite loops in pathological cases.\n *\n * This approach is robust against all practical attacks and is suitable for high-security environments where deterministic splits are not desired.\n */\n const seed = (0,_Random_js__WEBPACK_IMPORTED_MODULE_5__[\"default\"])(64);\n for (let i = 0; i < totalShares; i++) {\n let x;\n let attempts = 0;\n do {\n // To ensure no two points are ever the same, even if the system RNG is compromised,\n // we'll use a different counter value for each point and use SHA-512 HMAC.\n const counter = [i, attempts, ...(0,_Random_js__WEBPACK_IMPORTED_MODULE_5__[\"default\"])(32)];\n const h = (0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha512hmac)(seed, counter);\n x = new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"](h).umod(curve.p);\n // repeat generation if x is zero or has already been used (insanely unlikely)\n attempts++;\n if (attempts > 5) {\n throw new Error('Failed to generate unique x coordinate after 5 attempts');\n }\n } while (x.isZero() || usedXCoordinates.has(x.toString()));\n usedXCoordinates.add(x.toString());\n const y = poly.valueAt(x);\n points.push(new _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__.PointInFiniteField(x, y));\n }\n const integrity = this.toPublicKey().toHash('hex').slice(0, 8);\n return new KeyShares(points, threshold, integrity);\n }\n /**\n * @method toBackupShares\n *\n * Creates a backup of the private key by splitting it into shares.\n *\n *\n * @param threshold The number of shares which will be required to reconstruct the private key.\n * @param totalShares The number of shares to generate for distribution.\n * @returns\n */\n toBackupShares(threshold, totalShares) {\n return this.toKeyShares(threshold, totalShares).toBackupFormat();\n }\n /**\n *\n * @method fromBackupShares\n *\n * Creates a private key from backup shares.\n *\n * @param shares\n * @returns PrivateKey\n *\n * @example\n *\n * const share1 = '3znuzt7DZp8HzZTfTh5MF9YQKNX3oSxTbSYmSRGrH2ev.2Nm17qoocmoAhBTCs8TEBxNXCskV9N41rB2PckcgYeqV.2.35449bb9'\n * const share2 = 'Cm5fuUc39X5xgdedao8Pr1kvCSm8Gk7Cfenc7xUKcfLX.2juyK9BxCWn2DiY5JUAgj9NsQ77cc9bWksFyW45haXZm.2.35449bb9'\n *\n * const recoveredKey = PrivateKey.fromBackupShares([share1, share2])\n */\n static fromBackupShares(shares) {\n return PrivateKey.fromKeyShares(KeyShares.fromBackupFormat(shares));\n }\n /**\n * Combines shares to reconstruct the private key.\n *\n * @param shares An array of points (shares) to be used to reconstruct the private key.\n * @param threshold The minimum number of shares required to reconstruct the private key.\n *\n * @returns The reconstructed private key.\n *\n **/\n static fromKeyShares(keyShares) {\n const { points, threshold, integrity } = keyShares;\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (points.length < threshold) {\n throw new Error(`At least ${threshold} shares are required to reconstruct the private key`);\n }\n // check to see if two points have the same x value\n for (let i = 0; i < threshold; i++) {\n for (let j = i + 1; j < threshold; j++) {\n if (points[i].x.eq(points[j].x)) {\n throw new Error('Duplicate share detected, each must be unique.');\n }\n }\n }\n const poly = new _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__[\"default\"](points, threshold);\n const privateKey = new PrivateKey(poly.valueAt(new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"](0)).toArray());\n const integrityHash = privateKey.toPublicKey().toHash('hex').slice(0, 8);\n if (integrityHash !== integrity) {\n throw new Error('Integrity hash mismatch');\n }\n return privateKey;\n }\n}\n//# sourceMappingURL=PrivateKey.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/esm/src/primitives/PrivateKey.js?\n}"); +eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ KeyShares: () => (/* binding */ KeyShares),\n/* harmony export */ \"default\": () => (/* binding */ PrivateKey)\n/* harmony export */ });\n/* harmony import */ var _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./BigNumber.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/BigNumber.js\");\n/* harmony import */ var _PublicKey_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./PublicKey.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/PublicKey.js\");\n/* harmony import */ var _Curve_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./Curve.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Curve.js\");\n/* harmony import */ var _ECDSA_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./ECDSA.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/ECDSA.js\");\n/* harmony import */ var _Hash_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./Hash.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Hash.js\");\n/* harmony import */ var _Random_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./Random.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Random.js\");\n/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./utils.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/utils.js\");\n/* harmony import */ var _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./Polynomial.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/Polynomial.js\");\n\n\n\n\n\n\n\n\n/**\n * @class KeyShares\n *\n * This class is used to store the shares of a private key.\n *\n * @param shares - An array of shares\n * @param threshold - The number of shares required to recombine the private key\n *\n * @returns KeyShares\n *\n * @example\n * const key = PrivateKey.fromShares(shares)\n *\n */\nclass KeyShares {\n points;\n threshold;\n integrity;\n constructor(points, threshold, integrity) {\n this.points = points;\n this.threshold = threshold;\n this.integrity = integrity;\n }\n static fromBackupFormat(shares) {\n let threshold = 0;\n let integrity = '';\n const points = shares.map((share, idx) => {\n const shareParts = share.split('.');\n if (shareParts.length !== 4) {\n throw new Error('Invalid share format in share ' +\n idx.toString() +\n '. Expected format: \"x.y.t.i\" - received ' +\n share);\n }\n const [x, y, t, i] = shareParts;\n if (t === undefined)\n throw new Error('Threshold not found in share ' + idx.toString());\n if (i === undefined)\n throw new Error('Integrity not found in share ' + idx.toString());\n const tInt = parseInt(t);\n if (idx !== 0 && threshold !== tInt) {\n throw new Error('Threshold mismatch in share ' + idx.toString());\n }\n if (idx !== 0 && integrity !== i) {\n throw new Error('Integrity mismatch in share ' + idx.toString());\n }\n threshold = tInt;\n integrity = i;\n return _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__.PointInFiniteField.fromString([x, y].join('.'));\n });\n return new KeyShares(points, threshold, integrity);\n }\n toBackupFormat() {\n return this.points.map((share) => share.toString() + '.' + this.threshold.toString() + '.' + this.integrity);\n }\n}\n/**\n * Represents a Private Key, which is a secret that can be used to generate signatures in a cryptographic system.\n *\n * The `PrivateKey` class extends from the `BigNumber` class. It offers methods to create signatures, verify them,\n * create a corresponding public key and derive a shared secret from a public key.\n *\n * @extends {BigNumber}\n * @see {@link BigNumber} for more information on BigNumber.\n */\nclass PrivateKey extends _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"] {\n /**\n * Generates a private key randomly.\n *\n * @method fromRandom\n * @static\n * @returns The newly generated Private Key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n */\n static fromRandom() {\n return new PrivateKey((0,_Random_js__WEBPACK_IMPORTED_MODULE_5__[\"default\"])(32));\n }\n /**\n * Generates a private key from a string.\n *\n * @method fromString\n * @static\n * @param str - The string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not valid.\n **/\n static fromString(str, base = 'hex') {\n return new PrivateKey(super.fromString(str, base).toArray());\n }\n /**\n * Generates a private key from a hexadecimal string.\n *\n * @method fromHex\n * @static\n * @param {string} str - The hexadecimal string representing the private key. The string must represent a valid private key in big-endian format.\n * @returns {PrivateKey} The generated Private Key instance.\n * @throws {Error} If the string is not a valid hexadecimal or represents an invalid private key.\n **/\n static fromHex(str) {\n return new PrivateKey(super.fromHex(str, 'big'));\n }\n /**\n * Generates a private key from a WIF (Wallet Import Format) string.\n *\n * @method fromWif\n * @static\n * @param wif - The WIF string to generate the private key from.\n * @param base - The base of the string.\n * @returns The generated Private Key.\n * @throws Will throw an error if the string is not a valid WIF.\n **/\n static fromWif(wif, prefixLength = 1) {\n const decoded = (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.fromBase58Check)(wif, undefined, prefixLength);\n if (decoded.data.length !== 33) {\n throw new Error('Invalid WIF length');\n }\n if (decoded.data[32] !== 1) {\n throw new Error('Invalid WIF padding');\n }\n return new PrivateKey(decoded.data.slice(0, 32));\n }\n /**\n * @constructor\n *\n * @param number - The number (various types accepted) to construct a BigNumber from. Default is 0.\n *\n * @param base - The base of number provided. By default is 10. Ignored if number is BigNumber.\n *\n * @param endian - The endianness provided. By default is 'big endian'. Ignored if number is BigNumber.\n *\n * @param modN - Optional. Default 'apply. If 'apply', apply modN to input to guarantee a valid PrivateKey. If 'error', if input is out of field throw new Error('Input is out of field'). If 'nocheck', assumes input is in field.\n *\n * @example\n * import PrivateKey from './PrivateKey';\n * import BigNumber from './BigNumber';\n * const privKey = new PrivateKey(new BigNumber('123456', 10, 'be'));\n */\n constructor(number = 0, base = 10, endian = 'be', modN = 'apply') {\n if (number instanceof _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"]) {\n super();\n number.copy(this);\n }\n else {\n super(number, base, endian);\n }\n if (modN !== 'nocheck') {\n const check = this.checkInField();\n if (!check.inField) {\n if (modN === 'error') {\n throw new Error('Input is out of field');\n }\n // Force the PrivateKey BigNumber value to lie in the field limited by curve.n\n _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].move(this, check.modN);\n }\n }\n }\n /**\n * A utility function to check that the value of this PrivateKey lies in the field limited by curve.n\n * @returns { inField, modN } where modN is this PrivateKey's current BigNumber value mod curve.n, and inField is true only if modN equals current BigNumber value.\n */\n checkInField() {\n const curve = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n const modN = this.mod(curve.n);\n const inField = this.cmp(modN) === 0;\n return { inField, modN };\n }\n /**\n * @returns true if the PrivateKey's current BigNumber value lies in the field limited by curve.n\n */\n isValid() {\n return this.checkInField().inField;\n }\n /**\n * Signs a message using the private key.\n *\n * @method sign\n * @param msg - The message (array of numbers or string) to be signed.\n * @param enc - If 'hex' the string will be treated as hex, utf8 otherwise.\n * @param forceLowS - If true (the default), the signature will be forced to have a low S value.\n * @param customK — If provided, uses a custom K-value for the signature. Provie a function that returns a BigNumber, or the BigNumber itself.\n * @returns A digital signature generated from the hash of the message and the private key.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n */\n sign(msg, enc, forceLowS = true, customK) {\n const msgHash = new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"]((0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha256)(msg, enc), 16);\n return (0,_ECDSA_js__WEBPACK_IMPORTED_MODULE_3__.sign)(msgHash, this, forceLowS, customK);\n }\n /**\n * Verifies a message's signature using the public key associated with this private key.\n *\n * @method verify\n * @param msg - The original message which has been signed.\n * @param sig - The signature to be verified.\n * @param enc - The data encoding method.\n * @returns Whether or not the signature is valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const signature = privateKey.sign('Hello, World!');\n * const isSignatureValid = privateKey.verify('Hello, World!', signature);\n */\n verify(msg, sig, enc) {\n const msgHash = new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"]((0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha256)(msg, enc), 16);\n return (0,_ECDSA_js__WEBPACK_IMPORTED_MODULE_3__.verify)(msgHash, sig, this.toPublicKey());\n }\n /**\n * Converts the private key to its corresponding public key.\n *\n * The public key is generated by multiplying the base point G of the curve and the private key.\n *\n * @method toPublicKey\n * @returns The generated PublicKey.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n */\n toPublicKey() {\n const c = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n const p = c.g.mul(this);\n return new _PublicKey_js__WEBPACK_IMPORTED_MODULE_1__[\"default\"](p.x, p.y);\n }\n /**\n * Converts the private key to a Wallet Import Format (WIF) string.\n *\n * Base58Check encoding is used for encoding the private key.\n * The prefix\n *\n * @method toWif\n * @returns The WIF string.\n *\n * @param prefix defaults to [0x80] for mainnet, set it to [0xef] for testnet.\n *\n * @throws Error('Value is out of field') if current BigNumber value is out of field limited by curve.n\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const wif = privateKey.toWif();\n * const testnetWif = privateKey.toWif([0xef]);\n */\n toWif(prefix = [0x80]) {\n if (!this.isValid()) {\n throw new Error('Value is out of field');\n }\n return (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.toBase58Check)([...this.toArray('be', 32), 1], prefix);\n }\n /**\n * Base58Check encodes the hash of the public key associated with this private key with a prefix to indicate locking script type.\n * Defaults to P2PKH for mainnet, otherwise known as a \"Bitcoin Address\".\n *\n * @param prefix defaults to [0x00] for mainnet, set to [0x6f] for testnet or use the strings 'testnet' or 'mainnet'\n *\n * @returns Returns the address encoding associated with the hash of the public key associated with this private key.\n *\n * @example\n * const address = privkey.toAddress()\n * const address = privkey.toAddress('mainnet')\n * const testnetAddress = privkey.toAddress([0x6f])\n * const testnetAddress = privkey.toAddress('testnet')\n */\n toAddress(prefix = [0x00]) {\n return this.toPublicKey().toAddress(prefix);\n }\n /**\n * Converts this PrivateKey to a hexadecimal string.\n *\n * @method toHex\n * @param length - The minimum length of the hex string\n * @returns Returns a string representing the hexadecimal value of this BigNumber.\n *\n * @example\n * const bigNumber = new BigNumber(255);\n * const hex = bigNumber.toHex();\n */\n toHex() {\n return super.toHex(32);\n }\n /**\n * Converts this PrivateKey to a string representation.\n *\n * @method toString\n * @param {number | 'hex'} [base='hex'] - The base for representing the number. Default is hexadecimal ('hex').\n * @param {number} [padding=64] - The minimum number of digits for the output string. Default is 64, ensuring a 256-bit representation in hexadecimal.\n * @returns {string} A string representation of the PrivateKey in the specified base, padded to the specified length.\n *\n **/\n toString(base = 'hex', padding = 64) {\n return super.toString(base, padding);\n }\n /**\n * Derives a shared secret from the public key.\n *\n * @method deriveSharedSecret\n * @param key - The public key to derive the shared secret from.\n * @returns The derived shared secret (a point on the curve).\n * @throws Will throw an error if the public key is not valid.\n *\n * @example\n * const privateKey = PrivateKey.fromRandom();\n * const publicKey = privateKey.toPublicKey();\n * const sharedSecret = privateKey.deriveSharedSecret(publicKey);\n */\n deriveSharedSecret(key) {\n if (!key.validate()) {\n throw new Error('Public key not valid for ECDH secret derivation');\n }\n return key.mul(this);\n }\n /**\n * Derives a child key with BRC-42.\n * @param publicKey The public key of the other party\n * @param invoiceNumber The invoice number used to derive the child key\n * @param cacheSharedSecret Optional function to cache shared secrets\n * @param retrieveCachedSharedSecret Optional function to retrieve shared secrets from the cache\n * @returns The derived child key.\n */\n deriveChild(publicKey, invoiceNumber, cacheSharedSecret, retrieveCachedSharedSecret) {\n let sharedSecret;\n if (typeof retrieveCachedSharedSecret === 'function') {\n const retrieved = retrieveCachedSharedSecret(this, publicKey);\n if (typeof retrieved !== 'undefined') {\n sharedSecret = retrieved;\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n if (typeof cacheSharedSecret === 'function') {\n cacheSharedSecret(this, publicKey, sharedSecret);\n }\n }\n }\n else {\n sharedSecret = this.deriveSharedSecret(publicKey);\n }\n const invoiceNumberBin = (0,_utils_js__WEBPACK_IMPORTED_MODULE_6__.toArray)(invoiceNumber, 'utf8');\n const hmac = (0,_Hash_js__WEBPACK_IMPORTED_MODULE_4__.sha256hmac)(sharedSecret.encode(true), invoiceNumberBin);\n const curve = new _Curve_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"]();\n return new PrivateKey(this.add(new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"](hmac)).mod(curve.n).toArray());\n }\n /**\n * Splits the private key into shares using Shamir's Secret Sharing Scheme.\n *\n * @param threshold The minimum number of shares required to reconstruct the private key.\n * @param totalShares The total number of shares to generate.\n * @param prime The prime number to be used in Shamir's Secret Sharing Scheme.\n * @returns An array of shares.\n *\n * @example\n * const key = PrivateKey.fromRandom()\n * const shares = key.toKeyShares(2, 5)\n */\n toKeyShares(threshold, totalShares) {\n if (typeof threshold !== 'number' || typeof totalShares !== 'number') {\n throw new Error('threshold and totalShares must be numbers');\n }\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (totalShares < 2)\n throw new Error('totalShares must be at least 2');\n if (threshold > totalShares) {\n throw new Error('threshold should be less than or equal to totalShares');\n }\n const poly = _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__[\"default\"].fromPrivateKey(this, threshold);\n const points = [];\n for (let i = 0; i < totalShares; i++) {\n const x = new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"](PrivateKey.fromRandom().toArray());\n const y = poly.valueAt(x);\n points.push(new _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__.PointInFiniteField(x, y));\n }\n const integrity = this.toPublicKey().toHash('hex').slice(0, 8);\n return new KeyShares(points, threshold, integrity);\n }\n /**\n * @method toBackupShares\n *\n * Creates a backup of the private key by splitting it into shares.\n *\n *\n * @param threshold The number of shares which will be required to reconstruct the private key.\n * @param totalShares The number of shares to generate for distribution.\n * @returns\n */\n toBackupShares(threshold, totalShares) {\n return this.toKeyShares(threshold, totalShares).toBackupFormat();\n }\n /**\n *\n * @method fromBackupShares\n *\n * Creates a private key from backup shares.\n *\n * @param shares\n * @returns PrivateKey\n *\n * @example\n *\n * const share1 = '3znuzt7DZp8HzZTfTh5MF9YQKNX3oSxTbSYmSRGrH2ev.2Nm17qoocmoAhBTCs8TEBxNXCskV9N41rB2PckcgYeqV.2.35449bb9'\n * const share2 = 'Cm5fuUc39X5xgdedao8Pr1kvCSm8Gk7Cfenc7xUKcfLX.2juyK9BxCWn2DiY5JUAgj9NsQ77cc9bWksFyW45haXZm.2.35449bb9'\n *\n * const recoveredKey = PrivateKey.fromBackupShares([share1, share2])\n */\n static fromBackupShares(shares) {\n return PrivateKey.fromKeyShares(KeyShares.fromBackupFormat(shares));\n }\n /**\n * Combines shares to reconstruct the private key.\n *\n * @param shares An array of points (shares) to be used to reconstruct the private key.\n * @param threshold The minimum number of shares required to reconstruct the private key.\n *\n * @returns The reconstructed private key.\n *\n **/\n static fromKeyShares(keyShares) {\n const { points, threshold, integrity } = keyShares;\n if (threshold < 2)\n throw new Error('threshold must be at least 2');\n if (points.length < threshold) {\n throw new Error(`At least ${threshold} shares are required to reconstruct the private key`);\n }\n // check to see if two points have the same x value\n for (let i = 0; i < threshold; i++) {\n for (let j = i + 1; j < threshold; j++) {\n if (points[i].x.eq(points[j].x)) {\n throw new Error('Duplicate share detected, each must be unique.');\n }\n }\n }\n const poly = new _Polynomial_js__WEBPACK_IMPORTED_MODULE_7__[\"default\"](points, threshold);\n const privateKey = new PrivateKey(poly.valueAt(new _BigNumber_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"](0)).toArray());\n const integrityHash = privateKey.toPublicKey().toHash('hex').slice(0, 8);\n if (integrityHash !== integrity) {\n throw new Error('Integrity hash mismatch');\n }\n return privateKey;\n }\n}\n//# sourceMappingURL=PrivateKey.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/esm/src/primitives/PrivateKey.js?\n}"); /***/ }), @@ -2112,7 +2123,7 @@ /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { "use strict"; -eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ \"default\": () => (/* binding */ Script)\n/* harmony export */ });\n/* harmony import */ var _OP_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./OP.js */ \"./node_modules/@bsv/sdk/dist/esm/src/script/OP.js\");\n/* harmony import */ var _primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../primitives/utils.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/utils.js\");\n/* harmony import */ var _primitives_BigNumber_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../primitives/BigNumber.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/BigNumber.js\");\n\n\n\n/**\n * The Script class represents a script in a Bitcoin SV transaction,\n * encapsulating the functionality to construct, parse, and serialize\n * scripts used in both locking (output) and unlocking (input) scripts.\n *\n * @property {ScriptChunk[]} chunks - An array of script chunks that make up the script.\n */\nclass Script {\n chunks;\n /**\n * @method fromASM\n * Static method to construct a Script instance from an ASM (Assembly) formatted string.\n * @param asm - The script in ASM string format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromASM(\"OP_DUP OP_HASH160 abcd... OP_EQUALVERIFY OP_CHECKSIG\")\n */\n static fromASM(asm) {\n const chunks = [];\n const tokens = asm.split(' ');\n let i = 0;\n while (i < tokens.length) {\n const token = tokens[i];\n let opCode;\n let opCodeNum = 0;\n if (token.startsWith('OP_') && typeof _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"][token] !== 'undefined') {\n opCode = token;\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"][token];\n }\n // we start with two special cases, 0 and -1, which are handled specially in\n // toASM. see _chunkToString.\n if (token === '0') {\n opCodeNum = 0;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (token === '-1') {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_1NEGATE;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCode === undefined) {\n let hex = tokens[i];\n if (hex.length % 2 !== 0) {\n hex = '0' + hex;\n }\n const arr = (0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toArray)(hex, 'hex');\n if ((0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.encode)(arr, 'hex') !== hex) {\n throw new Error('invalid hex string in script');\n }\n const len = arr.length;\n if (len >= 0 && len < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n opCodeNum = len;\n }\n else if (len < Math.pow(2, 8)) {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1;\n }\n else if (len < Math.pow(2, 16)) {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2;\n }\n else if (len < Math.pow(2, 32)) {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4;\n }\n chunks.push({\n data: arr,\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCodeNum === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1 ||\n opCodeNum === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2 ||\n opCodeNum === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4) {\n chunks.push({\n data: (0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toArray)(tokens[i + 2], 'hex'),\n op: opCodeNum\n });\n i = i + 3;\n }\n else {\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n }\n return new Script(chunks);\n }\n /**\n * @method fromHex\n * Static method to construct a Script instance from a hexadecimal string.\n * @param hex - The script in hexadecimal format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromHex(\"76a9...\");\n */\n static fromHex(hex) {\n if (hex.length === 0)\n return Script.fromBinary([]);\n if (hex.length % 2 !== 0) {\n throw new Error('There is an uneven number of characters in the string which suggests it is not hex encoded.');\n }\n if (!/^[0-9a-fA-F]+$/.test(hex)) {\n throw new Error('Some elements in this string are not hex encoded.');\n }\n return Script.fromBinary((0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toArray)(hex, 'hex'));\n }\n /**\n * @method fromBinary\n * Static method to construct a Script instance from a binary array.\n * @param bin - The script in binary array format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromBinary([0x76, 0xa9, ...])\n */\n static fromBinary(bin) {\n bin = [...bin];\n const chunks = [];\n let inConditionalBlock = 0;\n const br = new _primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.Reader(bin);\n while (!br.eof()) {\n const op = br.readUInt8();\n // if OP_RETURN and not in a conditional block, do not parse the rest of the data,\n // rather just return the last chunk as data without prefixing with data length.\n if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_RETURN && inConditionalBlock === 0) {\n chunks.push({\n op,\n data: br.read()\n });\n break;\n }\n if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_IF || op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_NOTIF || op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_VERIF || op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_VERNOTIF) {\n inConditionalBlock++;\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_ENDIF) {\n inConditionalBlock--;\n }\n let len = 0;\n // eslint-disable-next-line @typescript-eslint/no-shadow\n let data = [];\n if (op > 0 && op < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n len = op;\n chunks.push({\n data: br.read(len),\n op\n });\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n try {\n len = br.readUInt8();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2) {\n try {\n len = br.readUInt16LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4) {\n try {\n len = br.readUInt32LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else {\n chunks.push({\n op\n });\n }\n }\n return new Script(chunks);\n }\n /**\n * @constructor\n * Constructs a new Script object.\n * @param chunks=[] - An array of script chunks to directly initialize the script.\n */\n constructor(chunks = []) {\n this.chunks = chunks;\n }\n /**\n * @method toASM\n * Serializes the script to an ASM formatted string.\n * @returns The script in ASM string format.\n */\n toASM() {\n let str = '';\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n str += this._chunkToString(chunk);\n }\n return str.slice(1);\n }\n /**\n * @method toHex\n * Serializes the script to a hexadecimal string.\n * @returns The script in hexadecimal format.\n */\n toHex() {\n return (0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.encode)(this.toBinary(), 'hex');\n }\n /**\n * @method toBinary\n * Serializes the script to a binary array.\n * @returns The script in binary array format.\n */\n toBinary() {\n const writer = new _primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.Writer();\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const op = chunk.op;\n writer.writeUInt8(op);\n if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_RETURN && chunk.data != null) { // special case for unformatted data\n writer.write(chunk.data);\n break;\n }\n else if (chunk.data != null) {\n if (op < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n writer.write(chunk.data);\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n writer.writeUInt8(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2) {\n writer.writeUInt16LE(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4) {\n writer.writeUInt32LE(chunk.data.length);\n writer.write(chunk.data);\n }\n }\n }\n return writer.toArray();\n }\n /**\n * @method writeScript\n * Appends another script to this script.\n * @param script - The script to append.\n * @returns This script instance for chaining.\n */\n writeScript(script) {\n this.chunks = this.chunks.concat(script.chunks);\n return this;\n }\n /**\n * @method writeOpCode\n * Appends an opcode to the script.\n * @param op - The opcode to append.\n * @returns This script instance for chaining.\n */\n writeOpCode(op) {\n this.chunks.push({ op });\n return this;\n }\n /**\n * @method setChunkOpCode\n * Sets the opcode of a specific chunk in the script.\n * @param i - The index of the chunk.\n * @param op - The opcode to set.\n * @returns This script instance for chaining.\n */\n setChunkOpCode(i, op) {\n this.chunks[i] = { op };\n return this;\n }\n /**\n * @method writeBn\n * Appends a BigNumber to the script as an opcode.\n * @param bn - The BigNumber to append.\n * @returns This script instance for chaining.\n */\n writeBn(bn) {\n if (bn.cmpn(0) === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_0) {\n this.chunks.push({\n op: _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_0\n });\n }\n else if (bn.cmpn(-1) === 0) {\n this.chunks.push({\n op: _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_1NEGATE\n });\n }\n else if (bn.cmpn(1) >= 0 && bn.cmpn(16) <= 0) {\n // see OP_1 - OP_16\n this.chunks.push({\n op: bn.toNumber() + _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_1 - 1\n });\n }\n else {\n const buf = bn.toSm('little');\n this.writeBin(buf);\n }\n return this;\n }\n /**\n * @method writeBin\n * Appends binary data to the script, determining the appropriate opcode based on length.\n * @param bin - The binary data to append.\n * @returns This script instance for chaining.\n * @throws {Error} Throws an error if the data is too large to be pushed.\n */\n writeBin(bin) {\n let op;\n const data = bin.length > 0 ? bin : undefined;\n if (bin.length > 0 && bin.length < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n op = bin.length;\n }\n else if (bin.length === 0) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_0;\n }\n else if (bin.length < Math.pow(2, 8)) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1;\n }\n else if (bin.length < Math.pow(2, 16)) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2;\n }\n else if (bin.length < Math.pow(2, 32)) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4;\n }\n else {\n throw new Error(\"You can't push that much data\");\n }\n this.chunks.push({\n data,\n op\n });\n return this;\n }\n /**\n * @method writeNumber\n * Appends a number to the script.\n * @param num - The number to append.\n * @returns This script instance for chaining.\n */\n writeNumber(num) {\n this.writeBn(new _primitives_BigNumber_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"](num));\n return this;\n }\n /**\n * @method removeCodeseparators\n * Removes all OP_CODESEPARATOR opcodes from the script.\n * @returns This script instance for chaining.\n */\n removeCodeseparators() {\n const chunks = [];\n for (let i = 0; i < this.chunks.length; i++) {\n if (this.chunks[i].op !== _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_CODESEPARATOR) {\n chunks.push(this.chunks[i]);\n }\n }\n this.chunks = chunks;\n return this;\n }\n /**\n * Deletes the given item wherever it appears in the current script.\n *\n * @param script - The script containing the item to delete from the current script.\n *\n * @returns This script instance for chaining.\n */\n findAndDelete(script) {\n const buf = script.toHex();\n for (let i = 0; i < this.chunks.length; i++) {\n const script2 = new Script([this.chunks[i]]);\n const buf2 = script2.toHex();\n if (buf === buf2) {\n this.chunks.splice(i, 1);\n }\n }\n return this;\n }\n /**\n * @method isPushOnly\n * Checks if the script contains only push data operations.\n * @returns True if the script is push-only, otherwise false.\n */\n isPushOnly() {\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const opCodeNum = chunk.op;\n if (opCodeNum > _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_16) {\n return false;\n }\n }\n return true;\n }\n /**\n * @method isLockingScript\n * Determines if the script is a locking script.\n * @returns True if the script is a locking script, otherwise false.\n */\n isLockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @method isUnlockingScript\n * Determines if the script is an unlocking script.\n * @returns True if the script is an unlocking script, otherwise false.\n */\n isUnlockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @private\n * @method _chunkToString\n * Converts a script chunk to its string representation.\n * @param chunk - The script chunk.\n * @returns The string representation of the chunk.\n */\n _chunkToString(chunk) {\n const op = chunk.op;\n let str = '';\n if (typeof chunk.data === 'undefined') {\n const val = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"][op];\n str = `${str} ${val}`;\n }\n else {\n str = `${str} ${(0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toHex)(chunk.data)}`;\n }\n return str;\n }\n}\n//# sourceMappingURL=Script.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/esm/src/script/Script.js?\n}"); +eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ \"default\": () => (/* binding */ Script)\n/* harmony export */ });\n/* harmony import */ var _OP_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./OP.js */ \"./node_modules/@bsv/sdk/dist/esm/src/script/OP.js\");\n/* harmony import */ var _primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../primitives/utils.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/utils.js\");\n/* harmony import */ var _primitives_BigNumber_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../primitives/BigNumber.js */ \"./node_modules/@bsv/sdk/dist/esm/src/primitives/BigNumber.js\");\n\n\n\n/**\n * The Script class represents a script in a Bitcoin SV transaction,\n * encapsulating the functionality to construct, parse, and serialize\n * scripts used in both locking (output) and unlocking (input) scripts.\n *\n * @property {ScriptChunk[]} chunks - An array of script chunks that make up the script.\n */\nclass Script {\n chunks;\n /**\n * @method fromASM\n * Static method to construct a Script instance from an ASM (Assembly) formatted string.\n * @param asm - The script in ASM string format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromASM(\"OP_DUP OP_HASH160 abcd... OP_EQUALVERIFY OP_CHECKSIG\")\n */\n static fromASM(asm) {\n const chunks = [];\n const tokens = asm.split(' ');\n let i = 0;\n while (i < tokens.length) {\n const token = tokens[i];\n let opCode;\n let opCodeNum = 0;\n if (token.startsWith('OP_') && typeof _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"][token] !== 'undefined') {\n opCode = token;\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"][token];\n }\n // we start with two special cases, 0 and -1, which are handled specially in\n // toASM. see _chunkToString.\n if (token === '0') {\n opCodeNum = 0;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (token === '-1') {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_1NEGATE;\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCode === undefined) {\n let hex = tokens[i];\n if (hex.length % 2 !== 0) {\n hex = '0' + hex;\n }\n const arr = (0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toArray)(hex, 'hex');\n if ((0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.encode)(arr, 'hex') !== hex) {\n throw new Error('invalid hex string in script');\n }\n const len = arr.length;\n if (len >= 0 && len < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n opCodeNum = len;\n }\n else if (len < Math.pow(2, 8)) {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1;\n }\n else if (len < Math.pow(2, 16)) {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2;\n }\n else if (len < Math.pow(2, 32)) {\n opCodeNum = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4;\n }\n chunks.push({\n data: arr,\n op: opCodeNum\n });\n i = i + 1;\n }\n else if (opCodeNum === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1 ||\n opCodeNum === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2 ||\n opCodeNum === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4) {\n chunks.push({\n data: (0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toArray)(tokens[i + 2], 'hex'),\n op: opCodeNum\n });\n i = i + 3;\n }\n else {\n chunks.push({\n op: opCodeNum\n });\n i = i + 1;\n }\n }\n return new Script(chunks);\n }\n /**\n * @method fromHex\n * Static method to construct a Script instance from a hexadecimal string.\n * @param hex - The script in hexadecimal format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromHex(\"76a9...\");\n */\n static fromHex(hex) {\n if (hex.length === 0)\n return Script.fromBinary([]);\n if (hex.length % 2 !== 0) {\n throw new Error('There is an uneven number of characters in the string which suggests it is not hex encoded.');\n }\n if (!/^[0-9a-fA-F]+$/.test(hex)) {\n throw new Error('Some elements in this string are not hex encoded.');\n }\n return Script.fromBinary((0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toArray)(hex, 'hex'));\n }\n /**\n * @method fromBinary\n * Static method to construct a Script instance from a binary array.\n * @param bin - The script in binary array format.\n * @returns A new Script instance.\n * @example\n * const script = Script.fromBinary([0x76, 0xa9, ...])\n */\n static fromBinary(bin) {\n bin = [...bin];\n const chunks = [];\n let inConditionalBlock = 0;\n const br = new _primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.Reader(bin);\n while (!br.eof()) {\n const op = br.readUInt8();\n // if OP_RETURN and not in a conditional block, do not parse the rest of the data,\n // rather just return the last chunk as data without prefixing with data length.\n if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_RETURN && inConditionalBlock === 0) {\n chunks.push({\n op,\n data: br.read()\n });\n break;\n }\n if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_IF || op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_NOTIF || op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_VERIF || op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_VERNOTIF) {\n inConditionalBlock++;\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_ENDIF) {\n inConditionalBlock--;\n }\n let len = 0;\n // eslint-disable-next-line @typescript-eslint/no-shadow\n let data = [];\n if (op > 0 && op < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n len = op;\n chunks.push({\n data: br.read(len),\n op\n });\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n try {\n len = br.readUInt8();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2) {\n try {\n len = br.readUInt16LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4) {\n try {\n len = br.readUInt32LE();\n data = br.read(len);\n }\n catch {\n br.read();\n }\n chunks.push({\n data,\n op\n });\n }\n else {\n chunks.push({\n op\n });\n }\n }\n return new Script(chunks);\n }\n /**\n * @constructor\n * Constructs a new Script object.\n * @param chunks=[] - An array of script chunks to directly initialize the script.\n */\n constructor(chunks = []) {\n this.chunks = chunks;\n }\n /**\n * @method toASM\n * Serializes the script to an ASM formatted string.\n * @returns The script in ASM string format.\n */\n toASM() {\n let str = '';\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n str += this._chunkToString(chunk);\n }\n return str.slice(1);\n }\n /**\n * @method toHex\n * Serializes the script to a hexadecimal string.\n * @returns The script in hexadecimal format.\n */\n toHex() {\n return (0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.encode)(this.toBinary(), 'hex');\n }\n /**\n * @method toBinary\n * Serializes the script to a binary array.\n * @returns The script in binary array format.\n */\n toBinary() {\n const writer = new _primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.Writer();\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const op = chunk.op;\n writer.writeUInt8(op);\n if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_RETURN && chunk.data != null) { // special case for unformatted data\n writer.write(chunk.data);\n break;\n }\n else if (chunk.data != null) {\n if (op < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n writer.write(chunk.data);\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n writer.writeUInt8(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2) {\n writer.writeUInt16LE(chunk.data.length);\n writer.write(chunk.data);\n }\n else if (op === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4) {\n writer.writeUInt32LE(chunk.data.length);\n writer.write(chunk.data);\n }\n }\n }\n return writer.toArray();\n }\n /**\n * @method writeScript\n * Appends another script to this script.\n * @param script - The script to append.\n * @returns This script instance for chaining.\n */\n writeScript(script) {\n this.chunks = this.chunks.concat(script.chunks);\n return this;\n }\n /**\n * @method writeOpCode\n * Appends an opcode to the script.\n * @param op - The opcode to append.\n * @returns This script instance for chaining.\n */\n writeOpCode(op) {\n this.chunks.push({ op });\n return this;\n }\n /**\n * @method setChunkOpCode\n * Sets the opcode of a specific chunk in the script.\n * @param i - The index of the chunk.\n * @param op - The opcode to set.\n * @returns This script instance for chaining.\n */\n setChunkOpCode(i, op) {\n this.chunks[i] = { op };\n return this;\n }\n /**\n * @method writeBn\n * Appends a BigNumber to the script as an opcode.\n * @param bn - The BigNumber to append.\n * @returns This script instance for chaining.\n */\n writeBn(bn) {\n if (bn.cmpn(0) === _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_0) {\n this.chunks.push({\n op: _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_0\n });\n }\n else if (bn.cmpn(-1) === 0) {\n this.chunks.push({\n op: _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_1NEGATE\n });\n }\n else if (bn.cmpn(1) >= 0 && bn.cmpn(16) <= 0) {\n // see OP_1 - OP_16\n this.chunks.push({\n op: bn.toNumber() + _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_1 - 1\n });\n }\n else {\n const buf = bn.toSm('little');\n this.writeBin(buf);\n }\n return this;\n }\n /**\n * @method writeBin\n * Appends binary data to the script, determining the appropriate opcode based on length.\n * @param bin - The binary data to append.\n * @returns This script instance for chaining.\n * @throws {Error} Throws an error if the data is too large to be pushed.\n */\n writeBin(bin) {\n let op;\n if (bin.length > 0 && bin.length < _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1) {\n op = bin.length;\n }\n else if (bin.length === 0) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_0;\n }\n else if (bin.length < Math.pow(2, 8)) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA1;\n }\n else if (bin.length < Math.pow(2, 16)) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA2;\n }\n else if (bin.length < Math.pow(2, 32)) {\n op = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_PUSHDATA4;\n }\n else {\n throw new Error(\"You can't push that much data\");\n }\n this.chunks.push({\n data: bin,\n op\n });\n return this;\n }\n /**\n * @method writeNumber\n * Appends a number to the script.\n * @param num - The number to append.\n * @returns This script instance for chaining.\n */\n writeNumber(num) {\n this.writeBn(new _primitives_BigNumber_js__WEBPACK_IMPORTED_MODULE_2__[\"default\"](num));\n return this;\n }\n /**\n * @method removeCodeseparators\n * Removes all OP_CODESEPARATOR opcodes from the script.\n * @returns This script instance for chaining.\n */\n removeCodeseparators() {\n const chunks = [];\n for (let i = 0; i < this.chunks.length; i++) {\n if (this.chunks[i].op !== _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_CODESEPARATOR) {\n chunks.push(this.chunks[i]);\n }\n }\n this.chunks = chunks;\n return this;\n }\n /**\n * Deletes the given item wherever it appears in the current script.\n *\n * @param script - The script containing the item to delete from the current script.\n *\n * @returns This script instance for chaining.\n */\n findAndDelete(script) {\n const buf = script.toHex();\n for (let i = 0; i < this.chunks.length; i++) {\n const script2 = new Script([this.chunks[i]]);\n const buf2 = script2.toHex();\n if (buf === buf2) {\n this.chunks.splice(i, 1);\n }\n }\n return this;\n }\n /**\n * @method isPushOnly\n * Checks if the script contains only push data operations.\n * @returns True if the script is push-only, otherwise false.\n */\n isPushOnly() {\n for (let i = 0; i < this.chunks.length; i++) {\n const chunk = this.chunks[i];\n const opCodeNum = chunk.op;\n if (opCodeNum > _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"].OP_16) {\n return false;\n }\n }\n return true;\n }\n /**\n * @method isLockingScript\n * Determines if the script is a locking script.\n * @returns True if the script is a locking script, otherwise false.\n */\n isLockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @method isUnlockingScript\n * Determines if the script is an unlocking script.\n * @returns True if the script is an unlocking script, otherwise false.\n */\n isUnlockingScript() {\n throw new Error('Not implemented');\n }\n /**\n * @private\n * @method _chunkToString\n * Converts a script chunk to its string representation.\n * @param chunk - The script chunk.\n * @returns The string representation of the chunk.\n */\n _chunkToString(chunk) {\n const op = chunk.op;\n let str = '';\n if (typeof chunk.data === 'undefined') {\n const val = _OP_js__WEBPACK_IMPORTED_MODULE_0__[\"default\"][op];\n str = `${str} ${val}`;\n }\n else {\n str = `${str} ${(0,_primitives_utils_js__WEBPACK_IMPORTED_MODULE_1__.toHex)(chunk.data)}`;\n }\n return str;\n }\n}\n//# sourceMappingURL=Script.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/sdk/dist/esm/src/script/Script.js?\n}"); /***/ }), @@ -2739,7 +2750,29 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.CWIStyleWalletManager = exports.OverlayUMPTokenInteractor = exports.DEFAULT_PROFILE_ID = exports.PBKDF2_NUM_ROUNDS = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst PrivilegedKeyManager_1 = __webpack_require__(/*! ./sdk/PrivilegedKeyManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/PrivilegedKeyManager.js\");\n/**\n * Number of rounds used in PBKDF2 for deriving password keys.\n */\nexports.PBKDF2_NUM_ROUNDS = 7777;\n/**\n * PBKDF-2 that prefers the browser / Node 20+ WebCrypto implementation and\n * silently falls back to the existing JS code.\n *\n * @param passwordBytes Raw password bytes.\n * @param salt Salt bytes.\n * @param iterations Number of rounds.\n * @param keyLen Desired key length in bytes.\n * @param hash Digest algorithm (default \"sha512\").\n * @returns Derived key bytes.\n */\nasync function pbkdf2NativeOrJs(passwordBytes, salt, iterations, keyLen, hash = 'sha512') {\n var _a;\n // ----- fast-path: WebCrypto (both browser & recent Node expose globalThis.crypto.subtle)\n const subtle = (_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.subtle;\n if (subtle) {\n try {\n const baseKey = await subtle.importKey('raw', new Uint8Array(passwordBytes), { name: 'PBKDF2' }, \n /*extractable*/ false, ['deriveBits']);\n const bits = await subtle.deriveBits({\n name: 'PBKDF2',\n salt: new Uint8Array(salt),\n iterations,\n hash: hash.toUpperCase()\n }, baseKey, keyLen * 8);\n return Array.from(new Uint8Array(bits));\n }\n catch (err) {\n //console.warn('[pbkdf2] WebCrypto path failed → falling back to JS implementation', err)\n /* fall through */\n }\n }\n // ----- slow-path: old JavaScript implementation\n return sdk_1.Hash.pbkdf2(passwordBytes, salt, iterations, keyLen, hash);\n}\n/**\n * Unique Identifier for the default profile (16 zero bytes).\n */\nexports.DEFAULT_PROFILE_ID = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];\n/**\n * @class OverlayUMPTokenInteractor\n *\n * A concrete implementation of the UMPTokenInteractor interface that interacts\n * with Overlay Services and the UMP (User Management Protocol) topic. This class\n * is responsible for:\n *\n * 1) Locating UMP tokens via overlay lookups (ls_users).\n * 2) Creating and publishing new or updated UMP token outputs on-chain under\n * the \"tm_users\" topic.\n * 3) Consuming (spending) an old token if provided.\n */\nclass OverlayUMPTokenInteractor {\n /**\n * Construct a new OverlayUMPTokenInteractor.\n *\n * @param resolver A LookupResolver instance for performing overlay queries (ls_users).\n * @param broadcaster A SHIPBroadcaster instance for sharing new or updated tokens across the `tm_users` overlay.\n */\n constructor(resolver = new sdk_1.LookupResolver(), broadcaster = new sdk_1.SHIPBroadcaster(['tm_users'])) {\n this.resolver = resolver;\n this.broadcaster = broadcaster;\n }\n /**\n * Finds a UMP token on-chain by the given presentation key hash, if it exists.\n * Uses the ls_users overlay service to perform the lookup.\n *\n * @param hash The 32-byte SHA-256 hash of the presentation key.\n * @returns A UMPToken object (including currentOutpoint) if found, otherwise undefined.\n */\n async findByPresentationKeyHash(hash) {\n // Query ls_users for the given presentationHash\n const question = {\n service: 'ls_users',\n query: { presentationHash: sdk_1.Utils.toHex(hash) }\n };\n const answer = await this.resolver.query(question);\n return this.parseLookupAnswer(answer);\n }\n /**\n * Finds a UMP token on-chain by the given recovery key hash, if it exists.\n * Uses the ls_users overlay service to perform the lookup.\n *\n * @param hash The 32-byte SHA-256 hash of the recovery key.\n * @returns A UMPToken object (including currentOutpoint) if found, otherwise undefined.\n */\n async findByRecoveryKeyHash(hash) {\n const question = {\n service: 'ls_users',\n query: { recoveryHash: sdk_1.Utils.toHex(hash) }\n };\n const answer = await this.resolver.query(question);\n return this.parseLookupAnswer(answer);\n }\n /**\n * Creates or updates (replaces) a UMP token on-chain. If `oldTokenToConsume` is provided,\n * it is spent in the same transaction that creates the new token output. The new token is\n * then broadcast and published under the `tm_users` topic using a SHIP broadcast, ensuring\n * overlay participants see the updated token.\n *\n * @param wallet The wallet used to build and sign the transaction (MUST be operating under the DEFAULT profile).\n * @param adminOriginator The domain/FQDN of the administrative originator (wallet operator).\n * @param token The new UMPToken to create on-chain.\n * @param oldTokenToConsume Optionally, an existing token to consume/spend in the same transaction.\n * @returns The outpoint of the newly created UMP token (e.g. \"abcd1234...ef.0\").\n */\n async buildAndSend(wallet, // This wallet MUST be the one built for the default profile\n adminOriginator, token, oldTokenToConsume) {\n // 1) Construct the data fields for the new UMP token.\n const fields = [];\n fields[0] = token.passwordSalt;\n fields[1] = token.passwordPresentationPrimary;\n fields[2] = token.passwordRecoveryPrimary;\n fields[3] = token.presentationRecoveryPrimary;\n fields[4] = token.passwordPrimaryPrivileged;\n fields[5] = token.presentationRecoveryPrivileged;\n fields[6] = token.presentationHash;\n fields[7] = token.recoveryHash;\n fields[8] = token.presentationKeyEncrypted;\n fields[9] = token.passwordKeyEncrypted;\n fields[10] = token.recoveryKeyEncrypted;\n // Optional field (11) for encrypted profiles\n if (token.profilesEncrypted) {\n fields[11] = token.profilesEncrypted;\n }\n // 2) Create a PushDrop script referencing these fields, locked with the admin key.\n const script = await new sdk_1.PushDrop(wallet, adminOriginator).lock(fields, [2, 'admin user management token'], // protocolID\n '1', // keyID\n 'self', // counterparty\n /*forSelf=*/ true, \n /*includeSignature=*/ true);\n // 3) Prepare the createAction call. If oldTokenToConsume is provided, gather the outpoint.\n const inputs = [];\n let inputToken;\n if (oldTokenToConsume === null || oldTokenToConsume === void 0 ? void 0 : oldTokenToConsume.currentOutpoint) {\n inputToken = await this.findByOutpoint(oldTokenToConsume.currentOutpoint);\n // If there is no token on the overlay, we can't consume it. Just start over with a new token.\n if (!inputToken) {\n oldTokenToConsume = undefined;\n // Otherwise, add the input\n }\n else {\n inputs.push({\n outpoint: oldTokenToConsume.currentOutpoint,\n unlockingScriptLength: 73, // typical signature length\n inputDescription: 'Consume old UMP token'\n });\n }\n }\n const outputs = [\n {\n lockingScript: script.toHex(),\n satoshis: 1,\n outputDescription: 'New UMP token output'\n }\n ];\n // 4) Build the partial transaction via createAction.\n let createResult;\n try {\n createResult = await wallet.createAction({\n description: oldTokenToConsume ? 'Renew UMP token (consume old, create new)' : 'Create new UMP token',\n inputs,\n outputs,\n inputBEEF: inputToken === null || inputToken === void 0 ? void 0 : inputToken.beef,\n options: {\n randomizeOutputs: false,\n acceptDelayedBroadcast: false\n }\n }, adminOriginator);\n }\n catch (e) {\n console.error('Error with UMP token update. Attempting a last-ditch effort to get a new one', e);\n createResult = await wallet.createAction({\n description: 'Recover UMP token',\n outputs,\n options: {\n randomizeOutputs: false,\n acceptDelayedBroadcast: false\n }\n }, adminOriginator);\n }\n // If the transaction is fully processed by the wallet\n if (!createResult.signableTransaction) {\n const finalTxid = createResult.txid || (createResult.tx ? sdk_1.Transaction.fromAtomicBEEF(createResult.tx).id('hex') : undefined);\n if (!finalTxid) {\n throw new Error('No signableTransaction and no final TX found.');\n }\n // Now broadcast to `tm_users` using SHIP\n const broadcastTx = sdk_1.Transaction.fromAtomicBEEF(createResult.tx);\n const result = await this.broadcaster.broadcast(broadcastTx);\n console.log('BROADCAST RESULT', result);\n return `${finalTxid}.0`;\n }\n // 5) If oldTokenToConsume is present, we must sign the input referencing it.\n // (If there's no old token, there's nothing to sign for the input.)\n let finalTxid = '';\n const reference = createResult.signableTransaction.reference;\n const partialTx = sdk_1.Transaction.fromBEEF(createResult.signableTransaction.tx);\n if (oldTokenToConsume === null || oldTokenToConsume === void 0 ? void 0 : oldTokenToConsume.currentOutpoint) {\n // Unlock the old token with a matching PushDrop unlocker\n const unlocker = new sdk_1.PushDrop(wallet, adminOriginator).unlock([2, 'admin user management token'], '1', 'self');\n const unlockingScript = await unlocker.sign(partialTx, 0);\n // Provide it to the wallet\n const signResult = await wallet.signAction({\n reference,\n spends: {\n 0: {\n unlockingScript: unlockingScript.toHex()\n }\n }\n }, adminOriginator);\n finalTxid = signResult.txid || (signResult.tx ? sdk_1.Transaction.fromAtomicBEEF(signResult.tx).id('hex') : '');\n if (!finalTxid) {\n throw new Error('Could not finalize transaction for renewed UMP token.');\n }\n // 6) Broadcast to `tm_users`\n const finalAtomicTx = signResult.tx;\n if (!finalAtomicTx) {\n throw new Error('Final transaction data missing after signing renewed UMP token.');\n }\n const broadcastTx = sdk_1.Transaction.fromAtomicBEEF(finalAtomicTx);\n const result = await this.broadcaster.broadcast(broadcastTx);\n console.log('BROADCAST RESULT', result);\n return `${finalTxid}.0`;\n }\n else {\n // Fallback for creating a new token (no input spending)\n const signResult = await wallet.signAction({ reference, spends: {} }, adminOriginator);\n finalTxid = signResult.txid || (signResult.tx ? sdk_1.Transaction.fromAtomicBEEF(signResult.tx).id('hex') : '');\n if (!finalTxid) {\n throw new Error('Failed to finalize new UMP token transaction.');\n }\n const finalAtomicTx = signResult.tx;\n if (!finalAtomicTx) {\n throw new Error('Final transaction data missing after signing new UMP token.');\n }\n const broadcastTx = sdk_1.Transaction.fromAtomicBEEF(finalAtomicTx);\n const result = await this.broadcaster.broadcast(broadcastTx);\n console.log('BROADCAST RESULT', result);\n return `${finalTxid}.0`;\n }\n }\n /**\n * Attempts to parse a LookupAnswer from the UMP lookup service. If successful,\n * extracts the token fields from the resulting transaction and constructs\n * a UMPToken object.\n *\n * @param answer The LookupAnswer returned by a query to ls_users.\n * @returns The parsed UMPToken or `undefined` if none found/decodable.\n */\n parseLookupAnswer(answer) {\n var _a;\n if (answer.type !== 'output-list') {\n return undefined;\n }\n if (!answer.outputs || answer.outputs.length === 0) {\n return undefined;\n }\n const { beef, outputIndex } = answer.outputs[0];\n try {\n const tx = sdk_1.Transaction.fromBEEF(beef);\n const outpoint = `${tx.id('hex')}.${outputIndex}`;\n const decoded = sdk_1.PushDrop.decode(tx.outputs[outputIndex].lockingScript);\n // Expecting 11 or more fields for UMP\n if (!decoded.fields || decoded.fields.length < 11) {\n console.warn(`Unexpected number of fields in UMP token: ${(_a = decoded.fields) === null || _a === void 0 ? void 0 : _a.length}`);\n return undefined;\n }\n // Build the UMP token from these fields, preserving outpoint\n const t = {\n // Order matches buildAndSend and serialize/deserialize\n passwordSalt: decoded.fields[0],\n passwordPresentationPrimary: decoded.fields[1],\n passwordRecoveryPrimary: decoded.fields[2],\n presentationRecoveryPrimary: decoded.fields[3],\n passwordPrimaryPrivileged: decoded.fields[4],\n presentationRecoveryPrivileged: decoded.fields[5],\n presentationHash: decoded.fields[6],\n recoveryHash: decoded.fields[7],\n presentationKeyEncrypted: decoded.fields[8],\n passwordKeyEncrypted: decoded.fields[9],\n recoveryKeyEncrypted: decoded.fields[10],\n profilesEncrypted: decoded.fields[12] ? decoded.fields[11] : undefined, // If there's a signature in field 12, use field 11\n currentOutpoint: outpoint\n };\n return t;\n }\n catch (e) {\n console.error('Failed to parse or decode UMP token:', e);\n return undefined;\n }\n }\n /**\n * Finds by outpoint for unlocking / spending previous tokens.\n * @param outpoint The outpoint we are searching by\n * @returns The result so that we can use it to unlock the transaction\n */\n async findByOutpoint(outpoint) {\n const results = await this.resolver.query({\n service: 'ls_users',\n query: {\n outpoint\n }\n });\n if (results.type !== 'output-list') {\n return undefined;\n }\n if (!results.outputs || !results.outputs.length) {\n return undefined;\n }\n return results.outputs[0];\n }\n}\nexports.OverlayUMPTokenInteractor = OverlayUMPTokenInteractor;\n/**\n * Manages a \"CWI-style\" wallet that uses a UMP token and a\n * multi-key authentication scheme (password, presentation key, and recovery key),\n * supporting multiple user profiles under a single account.\n */\nclass CWIStyleWalletManager {\n /**\n * Constructs a new CWIStyleWalletManager.\n *\n * @param adminOriginator The domain name of the administrative originator.\n * @param walletBuilder A function that can build an underlying wallet instance for a profile.\n * @param interactor An instance of UMPTokenInteractor.\n * @param recoveryKeySaver A function to persist a new recovery key.\n * @param passwordRetriever A function to request the user's password.\n * @param newWalletFunder Optional function to fund a new wallet.\n * @param stateSnapshot Optional previously saved state snapshot.\n */\n constructor(adminOriginator, walletBuilder, interactor = new OverlayUMPTokenInteractor(), recoveryKeySaver, passwordRetriever, newWalletFunder, stateSnapshot) {\n /**\n * Current mode of authentication.\n */\n this.authenticationMode = 'presentation-key-and-password';\n /**\n * Indicates new user or existing user flow.\n */\n this.authenticationFlow = 'new-user';\n /**\n * The currently active profile ID (null or DEFAULT_PROFILE_ID means default profile).\n */\n this.activeProfileId = exports.DEFAULT_PROFILE_ID;\n /**\n * List of loaded non-default profiles.\n */\n this.profiles = [];\n this.adminOriginator = adminOriginator;\n this.walletBuilder = walletBuilder;\n this.UMPTokenInteractor = interactor;\n this.recoveryKeySaver = recoveryKeySaver;\n this.passwordRetriever = passwordRetriever;\n this.authenticated = false;\n this.newWalletFunder = newWalletFunder;\n // If a saved snapshot is provided, attempt to load it.\n // Note: loadSnapshot now returns a promise. We don't await it here,\n // as the constructor must be synchronous. The caller should check\n // `this.authenticated` after construction if a snapshot was provided.\n if (stateSnapshot) {\n this.loadSnapshot(stateSnapshot).catch(err => {\n console.error('Failed to load snapshot during construction:', err);\n // Clear potentially partially loaded state\n this.destroy();\n });\n }\n }\n // --- Authentication Methods ---\n /**\n * Provides the presentation key.\n */\n async providePresentationKey(key) {\n if (this.authenticated) {\n throw new Error('User is already authenticated');\n }\n if (this.authenticationMode === 'recovery-key-and-password') {\n throw new Error('Presentation key is not needed in this mode');\n }\n const hash = sdk_1.Hash.sha256(key);\n const token = await this.UMPTokenInteractor.findByPresentationKeyHash(hash);\n if (!token) {\n // No token found -> New user\n this.authenticationFlow = 'new-user';\n this.presentationKey = key;\n }\n else {\n // Found token -> existing user\n this.authenticationFlow = 'existing-user';\n this.presentationKey = key;\n this.currentUMPToken = token;\n }\n }\n /**\n * Provides the password.\n */\n async providePassword(password) {\n if (this.authenticated) {\n throw new Error('User is already authenticated');\n }\n if (this.authenticationMode === 'presentation-key-and-recovery-key') {\n throw new Error('Password is not needed in this mode');\n }\n if (this.authenticationFlow === 'existing-user') {\n // Existing user flow\n if (!this.currentUMPToken) {\n throw new Error('Provide presentation or recovery key first.');\n }\n const derivedPasswordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(password, 'utf8'), this.currentUMPToken.passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n let rootPrimaryKey;\n let rootPrivilegedKey; // Only needed for recovery mode\n if (this.authenticationMode === 'presentation-key-and-password') {\n if (!this.presentationKey)\n throw new Error('No presentation key found!');\n const xorKey = this.XOR(this.presentationKey, derivedPasswordKey);\n rootPrimaryKey = new sdk_1.SymmetricKey(xorKey).decrypt(this.currentUMPToken.passwordPresentationPrimary);\n }\n else {\n // 'recovery-key-and-password'\n if (!this.recoveryKey)\n throw new Error('No recovery key found!');\n const primaryDecryptionKey = this.XOR(this.recoveryKey, derivedPasswordKey);\n rootPrimaryKey = new sdk_1.SymmetricKey(primaryDecryptionKey).decrypt(this.currentUMPToken.passwordRecoveryPrimary);\n const privilegedDecryptionKey = this.XOR(rootPrimaryKey, derivedPasswordKey);\n rootPrivilegedKey = new sdk_1.SymmetricKey(privilegedDecryptionKey).decrypt(this.currentUMPToken.passwordPrimaryPrivileged);\n }\n // Build root infrastructure, load profiles, and switch to default profile initially\n await this.setupRootInfrastructure(rootPrimaryKey, rootPrivilegedKey);\n await this.switchProfile(this.activeProfileId);\n }\n else {\n // New user flow (only 'presentation-key-and-password')\n if (this.authenticationMode !== 'presentation-key-and-password') {\n throw new Error('New-user flow requires presentation key and password mode.');\n }\n if (!this.presentationKey) {\n throw new Error('No presentation key provided for new-user flow.');\n }\n // Generate new keys/salt\n const recoveryKey = (0, sdk_1.Random)(32);\n await this.recoveryKeySaver(recoveryKey);\n const passwordSalt = (0, sdk_1.Random)(32);\n const passwordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(password, 'utf8'), passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n const rootPrimaryKey = (0, sdk_1.Random)(32);\n const rootPrivilegedKey = (0, sdk_1.Random)(32);\n // Build XOR keys\n const presentationPassword = new sdk_1.SymmetricKey(this.XOR(this.presentationKey, passwordKey));\n const presentationRecovery = new sdk_1.SymmetricKey(this.XOR(this.presentationKey, recoveryKey));\n const recoveryPassword = new sdk_1.SymmetricKey(this.XOR(recoveryKey, passwordKey));\n const primaryPassword = new sdk_1.SymmetricKey(this.XOR(rootPrimaryKey, passwordKey));\n // Temp manager for encryption\n const tempPrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async () => new sdk_1.PrivateKey(rootPrivilegedKey));\n // Build new UMP token (no profiles initially)\n const newToken = {\n passwordSalt,\n passwordPresentationPrimary: presentationPassword.encrypt(rootPrimaryKey),\n passwordRecoveryPrimary: recoveryPassword.encrypt(rootPrimaryKey),\n presentationRecoveryPrimary: presentationRecovery.encrypt(rootPrimaryKey),\n passwordPrimaryPrivileged: primaryPassword.encrypt(rootPrivilegedKey),\n presentationRecoveryPrivileged: presentationRecovery.encrypt(rootPrivilegedKey),\n presentationHash: sdk_1.Hash.sha256(this.presentationKey),\n recoveryHash: sdk_1.Hash.sha256(recoveryKey),\n presentationKeyEncrypted: (await tempPrivilegedKeyManager.encrypt({\n plaintext: this.presentationKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n passwordKeyEncrypted: (await tempPrivilegedKeyManager.encrypt({\n plaintext: passwordKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n recoveryKeyEncrypted: (await tempPrivilegedKeyManager.encrypt({\n plaintext: recoveryKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n profilesEncrypted: undefined // No profiles yet\n };\n this.currentUMPToken = newToken;\n // Setup root infrastructure and switch to default profile\n await this.setupRootInfrastructure(rootPrimaryKey);\n await this.switchProfile(exports.DEFAULT_PROFILE_ID);\n // Fund the *default* wallet if funder provided\n if (this.newWalletFunder && this.underlying) {\n try {\n await this.newWalletFunder(this.presentationKey, this.underlying, this.adminOriginator);\n }\n catch (e) {\n console.error('Error funding new wallet:', e);\n // Decide if this should halt the process or just log\n }\n }\n // Publish the new UMP token *after* potentially funding\n // We need the default profile wallet to sign the UMP creation TX\n if (!this.underlying) {\n throw new Error('Default profile wallet not built before attempting to publish UMP token.');\n }\n this.currentUMPToken.currentOutpoint = await this.UMPTokenInteractor.buildAndSend(this.underlying, // Use the default profile wallet\n this.adminOriginator, newToken);\n }\n }\n /**\n * Provides the recovery key.\n */\n async provideRecoveryKey(recoveryKey) {\n if (this.authenticated) {\n throw new Error('Already authenticated');\n }\n if (this.authenticationFlow === 'new-user') {\n throw new Error('Do not submit recovery key in new-user flow');\n }\n if (this.authenticationMode === 'presentation-key-and-password') {\n throw new Error('No recovery key required in this mode');\n }\n else if (this.authenticationMode === 'recovery-key-and-password') {\n // Wait for password\n const hash = sdk_1.Hash.sha256(recoveryKey);\n const token = await this.UMPTokenInteractor.findByRecoveryKeyHash(hash);\n if (!token)\n throw new Error('No user found with this recovery key');\n this.recoveryKey = recoveryKey;\n this.currentUMPToken = token;\n }\n else {\n // 'presentation-key-and-recovery-key'\n if (!this.presentationKey)\n throw new Error('Provide the presentation key first');\n if (!this.currentUMPToken)\n throw new Error('Current UMP token not found');\n const xorKey = this.XOR(this.presentationKey, recoveryKey);\n const rootPrimaryKey = new sdk_1.SymmetricKey(xorKey).decrypt(this.currentUMPToken.presentationRecoveryPrimary);\n const rootPrivilegedKey = new sdk_1.SymmetricKey(xorKey).decrypt(this.currentUMPToken.presentationRecoveryPrivileged);\n // Build root infrastructure, load profiles, switch to default\n await this.setupRootInfrastructure(rootPrimaryKey, rootPrivilegedKey);\n await this.switchProfile(this.activeProfileId);\n }\n }\n // --- State Management Methods ---\n /**\n * Saves the current wallet state (root key, UMP token, active profile) into an encrypted snapshot.\n * Version 2 format: [1 byte version=2] + [32 byte snapshot key] + [16 byte activeProfileId] + [encrypted payload]\n * Encrypted Payload: [32 byte rootPrimaryKey] + [varint token length + serialized UMP token]\n *\n * @returns Encrypted snapshot bytes.\n */\n saveSnapshot() {\n if (!this.rootPrimaryKey || !this.currentUMPToken) {\n throw new Error('No root primary key or current UMP token set');\n }\n const snapshotKey = (0, sdk_1.Random)(32);\n const snapshotPreimageWriter = new sdk_1.Utils.Writer();\n // Write root primary key\n snapshotPreimageWriter.write(this.rootPrimaryKey);\n // Write serialized UMP token (must have outpoint)\n if (!this.currentUMPToken.currentOutpoint) {\n throw new Error('UMP token cannot be saved without a current outpoint.');\n }\n const serializedToken = this.serializeUMPToken(this.currentUMPToken);\n snapshotPreimageWriter.writeVarIntNum(serializedToken.length);\n snapshotPreimageWriter.write(serializedToken);\n // Encrypt the payload\n const snapshotPreimage = snapshotPreimageWriter.toArray();\n const snapshotPayload = new sdk_1.SymmetricKey(snapshotKey).encrypt(snapshotPreimage);\n // Build final snapshot (Version 2)\n const snapshotWriter = new sdk_1.Utils.Writer();\n snapshotWriter.writeUInt8(2); // Version\n snapshotWriter.write(snapshotKey);\n snapshotWriter.write(this.activeProfileId); // Active profile ID\n snapshotWriter.write(snapshotPayload); // Encrypted data\n return snapshotWriter.toArray();\n }\n /**\n * Loads a previously saved state snapshot. Restores root key, UMP token, profiles, and active profile.\n * Handles Version 1 (legacy) and Version 2 formats.\n *\n * @param snapshot Encrypted snapshot bytes.\n */\n async loadSnapshot(snapshot) {\n try {\n const reader = new sdk_1.Utils.Reader(snapshot);\n const version = reader.readUInt8();\n let snapshotKey;\n let encryptedPayload;\n let activeProfileId = exports.DEFAULT_PROFILE_ID; // Default for V1\n if (version === 1) {\n snapshotKey = reader.read(32);\n encryptedPayload = reader.read();\n }\n else if (version === 2) {\n snapshotKey = reader.read(32);\n activeProfileId = reader.read(16); // Read active profile ID\n encryptedPayload = reader.read();\n }\n else {\n throw new Error(`Unsupported snapshot version: ${version}`);\n }\n // Decrypt payload\n const decryptedPayload = new sdk_1.SymmetricKey(snapshotKey).decrypt(encryptedPayload);\n const payloadReader = new sdk_1.Utils.Reader(decryptedPayload);\n // Read root primary key\n const rootPrimaryKey = payloadReader.read(32);\n // Read serialized UMP token\n const tokenLen = payloadReader.readVarIntNum();\n const tokenBytes = payloadReader.read(tokenLen);\n const token = this.deserializeUMPToken(tokenBytes);\n // Assign loaded data\n this.currentUMPToken = token;\n // Setup root infrastructure, load profiles, and switch to the loaded active profile\n await this.setupRootInfrastructure(rootPrimaryKey); // Will automatically load profiles\n await this.switchProfile(activeProfileId); // Switch to the profile saved in the snapshot\n this.authenticationFlow = 'existing-user'; // Loading implies existing user\n }\n catch (error) {\n this.destroy(); // Clear state on error\n throw new Error(`Failed to load snapshot: ${error.message}`);\n }\n }\n /**\n * Destroys the wallet state, clearing keys, tokens, and profiles.\n */\n destroy() {\n this.underlying = undefined;\n this.rootPrivilegedKeyManager = undefined;\n this.authenticated = false;\n this.rootPrimaryKey = undefined;\n this.currentUMPToken = undefined;\n this.presentationKey = undefined;\n this.recoveryKey = undefined;\n this.profiles = [];\n this.activeProfileId = exports.DEFAULT_PROFILE_ID;\n this.authenticationMode = 'presentation-key-and-password';\n this.authenticationFlow = 'new-user';\n }\n // --- Profile Management Methods ---\n /**\n * Lists all available profiles, including the default profile.\n * @returns Array of profile info objects, including an 'active' flag.\n */\n listProfiles() {\n if (!this.authenticated) {\n throw new Error('Not authenticated.');\n }\n const profileList = [\n // Default profile\n {\n id: exports.DEFAULT_PROFILE_ID,\n name: 'default',\n createdAt: null, // Default profile doesn't have a creation timestamp in the same way\n active: this.activeProfileId.every(x => x === 0)\n },\n // Other profiles\n ...this.profiles.map(p => ({\n id: p.id,\n name: p.name,\n createdAt: p.createdAt,\n active: this.activeProfileId.every((x, i) => x === p.id[i])\n }))\n ];\n return profileList;\n }\n /**\n * Adds a new profile with the given name.\n * Generates necessary pads and updates the UMP token.\n * Does not switch to the new profile automatically.\n *\n * @param name The desired name for the new profile.\n * @returns The ID of the newly created profile.\n */\n async addProfile(name) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error('Wallet not fully initialized or authenticated.');\n }\n // Ensure name is unique (including 'default')\n if (name === 'default' || this.profiles.some(p => p.name.toLowerCase() === name.toLowerCase())) {\n throw new Error(`Profile name \"${name}\" is already in use.`);\n }\n const newProfile = {\n name,\n id: (0, sdk_1.Random)(16),\n primaryPad: (0, sdk_1.Random)(32),\n privilegedPad: (0, sdk_1.Random)(32),\n createdAt: Math.floor(Date.now() / 1000)\n };\n this.profiles.push(newProfile);\n // Update the UMP token with the new profile list\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, \n // Need to re-derive/decrypt factors needed for re-encryption\n await this.getFactor('passwordKey'), await this.getFactor('presentationKey'), await this.getFactor('recoveryKey'), this.rootPrimaryKey, await this.getFactor('privilegedKey'), // Get ROOT privileged key\n this.profiles // Pass the updated profile list\n );\n return newProfile.id;\n }\n /**\n * Deletes a profile by its ID.\n * Cannot delete the default profile. If the active profile is deleted,\n * it switches back to the default profile.\n *\n * @param profileId The 16-byte ID of the profile to delete.\n */\n async deleteProfile(profileId) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error('Wallet not fully initialized or authenticated.');\n }\n if (profileId.every(x => x === 0)) {\n throw new Error('Cannot delete the default profile.');\n }\n const profileIndex = this.profiles.findIndex(p => p.id.every((x, i) => x === profileId[i]));\n if (profileIndex === -1) {\n throw new Error('Profile not found.');\n }\n // Remove the profile\n this.profiles.splice(profileIndex, 1);\n // If the deleted profile was active, switch to default\n if (this.activeProfileId.every((x, i) => x === profileId[i])) {\n await this.switchProfile(exports.DEFAULT_PROFILE_ID); // This rebuilds the wallet\n }\n // Update the UMP token\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, await this.getFactor('passwordKey'), await this.getFactor('presentationKey'), await this.getFactor('recoveryKey'), this.rootPrimaryKey, await this.getFactor('privilegedKey'), // Get ROOT privileged key\n this.profiles // Pass updated list\n );\n }\n /**\n * Switches the active profile. This re-derives keys and rebuilds the underlying wallet.\n *\n * @param profileId The 16-byte ID of the profile to switch to (use DEFAULT_PROFILE_ID for default).\n */\n async switchProfile(profileId) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Cannot switch profile: Wallet not authenticated or root keys missing.');\n }\n let profilePrimaryKey;\n let profilePrivilegedPad; // Pad for the target profile\n if (profileId.every(x => x === 0)) {\n // Switching to default profile\n profilePrimaryKey = this.rootPrimaryKey;\n profilePrivilegedPad = undefined; // No pad for default\n this.activeProfileId = exports.DEFAULT_PROFILE_ID;\n }\n else {\n // Switching to a non-default profile\n const profile = this.profiles.find(p => p.id.every((x, i) => x === profileId[i]));\n if (!profile) {\n throw new Error('Profile not found.');\n }\n profilePrimaryKey = this.XOR(this.rootPrimaryKey, profile.primaryPad);\n profilePrivilegedPad = profile.privilegedPad;\n this.activeProfileId = profileId;\n }\n // Create a *profile-specific* PrivilegedKeyManager.\n // It uses the ROOT manager internally but applies the profile's pad.\n const profilePrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async (reason) => {\n // Request the ROOT privileged key using the root manager\n const rootPrivileged = await this.rootPrivilegedKeyManager.getPrivilegedKey(reason);\n const rootPrivilegedBytes = rootPrivileged.toArray();\n // Apply the profile's pad if applicable\n const profilePrivilegedBytes = profilePrivilegedPad\n ? this.XOR(rootPrivilegedBytes, profilePrivilegedPad)\n : rootPrivilegedBytes;\n return new sdk_1.PrivateKey(profilePrivilegedBytes);\n });\n // Build the underlying wallet for the specific profile\n this.underlying = await this.walletBuilder(profilePrimaryKey, profilePrivilegedKeyManager, // Pass the profile-specific manager\n this.activeProfileId // Pass the ID of the profile being activated\n );\n }\n // --- Key Management Methods ---\n /**\n * Changes the user's password. Re-wraps keys and updates the UMP token.\n */\n async changePassword(newPassword) {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n const passwordSalt = (0, sdk_1.Random)(32);\n const newPasswordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(newPassword, 'utf8'), passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n // Decrypt existing factors needed for re-encryption, using the *root* privileged key manager\n const recoveryKey = await this.getFactor('recoveryKey');\n const presentationKey = await this.getFactor('presentationKey');\n const rootPrivilegedKey = await this.getFactor('privilegedKey'); // Get ROOT privileged key\n await this.updateAuthFactors(passwordSalt, newPasswordKey, presentationKey, recoveryKey, this.rootPrimaryKey, rootPrivilegedKey, // Pass the explicitly fetched root key\n this.profiles // Preserve existing profiles\n );\n }\n /**\n * Retrieves the current recovery key. Requires privileged access.\n */\n async getRecoveryKey() {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n return this.getFactor('recoveryKey');\n }\n /**\n * Changes the user's recovery key. Prompts user to save the new key.\n */\n async changeRecoveryKey() {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n // Decrypt existing factors needed\n const passwordKey = await this.getFactor('passwordKey');\n const presentationKey = await this.getFactor('presentationKey');\n const rootPrivilegedKey = await this.getFactor('privilegedKey'); // Get ROOT privileged key\n // Generate and save new recovery key\n const newRecoveryKey = (0, sdk_1.Random)(32);\n await this.recoveryKeySaver(newRecoveryKey);\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, passwordKey, presentationKey, newRecoveryKey, // Use the new key\n this.rootPrimaryKey, rootPrivilegedKey, this.profiles // Preserve profiles\n );\n }\n /**\n * Changes the user's presentation key.\n */\n async changePresentationKey(newPresentationKey) {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n if (newPresentationKey.length !== 32) {\n throw new Error('Presentation key must be 32 bytes.');\n }\n // Decrypt existing factors\n const recoveryKey = await this.getFactor('recoveryKey');\n const passwordKey = await this.getFactor('passwordKey');\n const rootPrivilegedKey = await this.getFactor('privilegedKey'); // Get ROOT privileged key\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, passwordKey, newPresentationKey, // Use the new key\n recoveryKey, this.rootPrimaryKey, rootPrivilegedKey, this.profiles // Preserve profiles\n );\n // Update the temporarily stored key if it was set\n if (this.presentationKey) {\n this.presentationKey = newPresentationKey;\n }\n }\n // --- Internal Helper Methods ---\n /**\n * Performs XOR operation on two byte arrays.\n */\n XOR(n1, n2) {\n if (n1.length !== n2.length) {\n // Provide more context in error\n throw new Error(`XOR length mismatch: ${n1.length} vs ${n2.length}`);\n }\n const r = new Array(n1.length);\n for (let i = 0; i < n1.length; i++) {\n r[i] = n1[i] ^ n2[i];\n }\n return r;\n }\n /**\n * Helper to decrypt a specific factor (key) stored encrypted in the UMP token.\n * Requires the root privileged key manager.\n * @param factorName Name of the factor to decrypt ('passwordKey', 'presentationKey', 'recoveryKey', 'privilegedKey').\n * @param getRoot If true and factorName is 'privilegedKey', returns the root privileged key bytes directly.\n * @returns The decrypted key bytes.\n */\n async getFactor(factorName) {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error(`Cannot get factor \"${factorName}\": Wallet not ready.`);\n }\n const protocolID = [2, 'admin key wrapping']; // Protocol used for encrypting factors\n const keyID = '1'; // Key ID used\n try {\n switch (factorName) {\n case 'passwordKey':\n return (await this.rootPrivilegedKeyManager.decrypt({\n ciphertext: this.currentUMPToken.passwordKeyEncrypted,\n protocolID,\n keyID\n })).plaintext;\n case 'presentationKey':\n return (await this.rootPrivilegedKeyManager.decrypt({\n ciphertext: this.currentUMPToken.presentationKeyEncrypted,\n protocolID,\n keyID\n })).plaintext;\n case 'recoveryKey':\n return (await this.rootPrivilegedKeyManager.decrypt({\n ciphertext: this.currentUMPToken.recoveryKeyEncrypted,\n protocolID,\n keyID\n })).plaintext;\n case 'privilegedKey': {\n // This needs careful handling based on whether the ROOT or PROFILE key is needed.\n // This helper is mostly used for UMP updates, which need the ROOT key.\n // We retrieve the PrivateKey object first.\n const pk = await this.rootPrivilegedKeyManager.getPrivilegedKey('UMP token update', true); // Force retrieval of root key\n return pk.toArray(); // Return bytes\n }\n default:\n throw new Error(`Unknown factor name: ${factorName}`);\n }\n }\n catch (error) {\n console.error(`Error decrypting factor ${factorName}:`, error);\n throw new Error(`Failed to decrypt factor \"${factorName}\": ${error.message}`);\n }\n }\n /**\n * Recomputes UMP token fields with updated factors and profiles, then publishes the update.\n * This operation requires the *root* privileged key and the *default* profile wallet.\n */\n async updateAuthFactors(passwordSalt, passwordKey, presentationKey, recoveryKey, rootPrimaryKey, rootPrivilegedKey, // Explicitly pass the root key bytes\n profiles // Pass current/new profiles list\n ) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.currentUMPToken) {\n throw new Error('Wallet is not properly authenticated or missing data for update.');\n }\n // Ensure we have the OLD token to consume\n const oldTokenToConsume = { ...this.currentUMPToken };\n if (!oldTokenToConsume.currentOutpoint) {\n throw new Error('Cannot update UMP token: Old token has no outpoint.');\n }\n // Derive symmetrical encryption keys using XOR for the *root* keys\n const presentationPassword = new sdk_1.SymmetricKey(this.XOR(presentationKey, passwordKey));\n const presentationRecovery = new sdk_1.SymmetricKey(this.XOR(presentationKey, recoveryKey));\n const recoveryPassword = new sdk_1.SymmetricKey(this.XOR(recoveryKey, passwordKey));\n const primaryPassword = new sdk_1.SymmetricKey(this.XOR(rootPrimaryKey, passwordKey)); // Use rootPrimaryKey\n // Build a temporary privileged key manager using the explicit ROOT privileged key\n const tempRootPrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async () => new sdk_1.PrivateKey(rootPrivilegedKey));\n // Encrypt profiles if provided\n let profilesEncrypted;\n if (profiles && profiles.length > 0) {\n const profilesJson = JSON.stringify(profiles);\n const profilesBytes = sdk_1.Utils.toArray(profilesJson, 'utf8');\n profilesEncrypted = new sdk_1.SymmetricKey(rootPrimaryKey).encrypt(profilesBytes);\n }\n // Construct the new UMP token data\n const newTokenData = {\n passwordSalt,\n passwordPresentationPrimary: presentationPassword.encrypt(rootPrimaryKey),\n passwordRecoveryPrimary: recoveryPassword.encrypt(rootPrimaryKey),\n presentationRecoveryPrimary: presentationRecovery.encrypt(rootPrimaryKey),\n passwordPrimaryPrivileged: primaryPassword.encrypt(rootPrivilegedKey),\n presentationRecoveryPrivileged: presentationRecovery.encrypt(rootPrivilegedKey),\n presentationHash: sdk_1.Hash.sha256(presentationKey),\n recoveryHash: sdk_1.Hash.sha256(recoveryKey),\n presentationKeyEncrypted: (await tempRootPrivilegedKeyManager.encrypt({\n plaintext: presentationKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n passwordKeyEncrypted: (await tempRootPrivilegedKeyManager.encrypt({\n plaintext: passwordKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n recoveryKeyEncrypted: (await tempRootPrivilegedKeyManager.encrypt({\n plaintext: recoveryKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n profilesEncrypted // Add encrypted profiles\n // currentOutpoint will be set after publishing\n };\n // We need the wallet built for the DEFAULT profile to publish the UMP token.\n // If the current active profile is not default, temporarily switch, publish, then switch back.\n const currentActiveId = this.activeProfileId;\n let walletToUse = this.underlying;\n if (!currentActiveId.every(x => x === 0)) {\n console.log('Temporarily switching to default profile to update UMP token...');\n await this.switchProfile(exports.DEFAULT_PROFILE_ID); // This rebuilds this.underlying\n walletToUse = this.underlying;\n }\n if (!walletToUse) {\n throw new Error('Default profile wallet could not be activated for UMP token update.');\n }\n // Publish the new token on-chain, consuming the old one\n try {\n newTokenData.currentOutpoint = await this.UMPTokenInteractor.buildAndSend(walletToUse, this.adminOriginator, newTokenData, oldTokenToConsume // Consume the previous token\n );\n // Update the manager's state\n this.currentUMPToken = newTokenData;\n // Profiles are already updated in this.profiles if they were passed in\n }\n finally {\n // Switch back if we temporarily switched\n if (!currentActiveId.every(x => x === 0)) {\n console.log('Switching back to original profile...');\n await this.switchProfile(currentActiveId);\n }\n }\n }\n /**\n * Serializes a UMP token to binary format (Version 2 with optional profiles).\n * Layout: [1 byte version=2] + [11 * (varint len + bytes) for standard fields] + [1 byte profile_flag] + [IF flag=1 THEN varint len + profile bytes] + [varint len + outpoint bytes]\n */\n serializeUMPToken(token) {\n if (!token.currentOutpoint) {\n throw new Error('Token must have outpoint for serialization');\n }\n const writer = new sdk_1.Utils.Writer();\n writer.writeUInt8(2); // Version 2\n const writeArray = (arr) => {\n writer.writeVarIntNum(arr.length);\n writer.write(arr);\n };\n // Write standard fields in specific order\n writeArray(token.passwordSalt); // 0\n writeArray(token.passwordPresentationPrimary); // 1\n writeArray(token.passwordRecoveryPrimary); // 2\n writeArray(token.presentationRecoveryPrimary); // 3\n writeArray(token.passwordPrimaryPrivileged); // 4\n writeArray(token.presentationRecoveryPrivileged); // 5\n writeArray(token.presentationHash); // 6\n writeArray(token.recoveryHash); // 7\n writeArray(token.presentationKeyEncrypted); // 8\n writeArray(token.passwordKeyEncrypted); // 9 - Swapped order vs original doc comment\n writeArray(token.recoveryKeyEncrypted); // 10\n // Write optional profiles field\n if (token.profilesEncrypted && token.profilesEncrypted.length > 0) {\n writer.writeUInt8(1); // Flag indicating profiles present\n writeArray(token.profilesEncrypted);\n }\n else {\n writer.writeUInt8(0); // Flag indicating no profiles\n }\n // Write outpoint string\n const outpointBytes = sdk_1.Utils.toArray(token.currentOutpoint, 'utf8');\n writer.writeVarIntNum(outpointBytes.length);\n writer.write(outpointBytes);\n return writer.toArray();\n }\n /**\n * Deserializes a UMP token from binary format (Handles Version 1 and 2).\n */\n deserializeUMPToken(bin) {\n const reader = new sdk_1.Utils.Reader(bin);\n const version = reader.readUInt8();\n if (version !== 1 && version !== 2) {\n throw new Error(`Unsupported UMP token serialization version: ${version}`);\n }\n const readArray = () => {\n const length = reader.readVarIntNum();\n return reader.read(length);\n };\n // Read standard fields (order matches serialization V2)\n const passwordSalt = readArray(); // 0\n const passwordPresentationPrimary = readArray(); // 1\n const passwordRecoveryPrimary = readArray(); // 2\n const presentationRecoveryPrimary = readArray(); // 3\n const passwordPrimaryPrivileged = readArray(); // 4\n const presentationRecoveryPrivileged = readArray(); // 5\n const presentationHash = readArray(); // 6\n const recoveryHash = readArray(); // 7\n const presentationKeyEncrypted = readArray(); // 8\n const passwordKeyEncrypted = readArray(); // 9\n const recoveryKeyEncrypted = readArray(); // 10\n // Read optional profiles (only in V2)\n let profilesEncrypted;\n if (version === 2) {\n const profilesFlag = reader.readUInt8();\n if (profilesFlag === 1) {\n profilesEncrypted = readArray();\n }\n }\n // Read outpoint string\n const outpointLen = reader.readVarIntNum();\n const outpointBytes = reader.read(outpointLen);\n const currentOutpoint = sdk_1.Utils.toUTF8(outpointBytes);\n const token = {\n passwordSalt,\n passwordPresentationPrimary,\n passwordRecoveryPrimary,\n presentationRecoveryPrimary,\n passwordPrimaryPrivileged,\n presentationRecoveryPrivileged,\n presentationHash,\n recoveryHash,\n presentationKeyEncrypted,\n passwordKeyEncrypted, // Corrected order\n recoveryKeyEncrypted,\n profilesEncrypted, // May be undefined\n currentOutpoint\n };\n return token;\n }\n /**\n * Sets up the root key infrastructure after authentication or loading from snapshot.\n * Initializes the root primary key, root privileged key manager, loads profiles,\n * and sets the authenticated flag. Does NOT switch profile initially.\n *\n * @param rootPrimaryKey The user's root primary key (32 bytes).\n * @param ephemeralRootPrivilegedKey Optional root privileged key (e.g., during recovery flows).\n */\n async setupRootInfrastructure(rootPrimaryKey, ephemeralRootPrivilegedKey) {\n if (!this.currentUMPToken) {\n throw new Error('A UMP token must exist before setting up root infrastructure!');\n }\n this.rootPrimaryKey = rootPrimaryKey;\n // Store ephemeral key if provided, for one-time use by the manager\n let oneTimePrivilegedKey = ephemeralRootPrivilegedKey\n ? new sdk_1.PrivateKey(ephemeralRootPrivilegedKey)\n : undefined;\n // Create the ROOT PrivilegedKeyManager\n this.rootPrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async (reason) => {\n // 1. Use one-time key if available (for recovery)\n if (oneTimePrivilegedKey) {\n const tempKey = oneTimePrivilegedKey;\n oneTimePrivilegedKey = undefined; // Consume it\n return tempKey;\n }\n // 2. Otherwise, derive from password\n const password = await this.passwordRetriever(reason, (passwordCandidate) => {\n try {\n const derivedPasswordKey = sdk_1.Hash.pbkdf2(sdk_1.Utils.toArray(passwordCandidate, 'utf8'), this.currentUMPToken.passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n const privilegedDecryptor = this.XOR(this.rootPrimaryKey, derivedPasswordKey);\n const decryptedPrivileged = new sdk_1.SymmetricKey(privilegedDecryptor).decrypt(this.currentUMPToken.passwordPrimaryPrivileged);\n return !!decryptedPrivileged; // Test passes if decryption works\n }\n catch (e) {\n return false;\n }\n });\n // Decrypt the root privileged key using the confirmed password\n const derivedPasswordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(password, 'utf8'), this.currentUMPToken.passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n const privilegedDecryptor = this.XOR(this.rootPrimaryKey, derivedPasswordKey);\n const rootPrivilegedBytes = new sdk_1.SymmetricKey(privilegedDecryptor).decrypt(this.currentUMPToken.passwordPrimaryPrivileged);\n return new sdk_1.PrivateKey(rootPrivilegedBytes); // Return the ROOT key object\n });\n // Decrypt and load profiles if present in the token\n this.profiles = []; // Clear existing profiles before loading\n if (this.currentUMPToken.profilesEncrypted && this.currentUMPToken.profilesEncrypted.length > 0) {\n try {\n const decryptedProfileBytes = new sdk_1.SymmetricKey(rootPrimaryKey).decrypt(this.currentUMPToken.profilesEncrypted);\n const profilesJson = sdk_1.Utils.toUTF8(decryptedProfileBytes);\n this.profiles = JSON.parse(profilesJson);\n }\n catch (error) {\n console.error('Failed to decrypt or parse profiles:', error);\n // Decide if this should be fatal or just log and continue without profiles\n this.profiles = []; // Ensure profiles are empty on error\n // Optionally re-throw or handle more gracefully\n throw new Error(`Failed to load profiles: ${error.message}`);\n }\n }\n this.authenticated = true;\n // Note: We don't call switchProfile here anymore.\n // It's called by the auth methods (providePassword/provideRecoveryKey) or loadSnapshot after this.\n }\n /*\n * ---------------------------------------------------------------------------------------\n * Standard WalletInterface methods proxying to the *active* underlying wallet.\n * Includes authentication checks and admin originator protection.\n * ---------------------------------------------------------------------------------------\n */\n checkAuthAndUnderlying(originator) {\n if (!this.authenticated) {\n throw new Error('User is not authenticated.');\n }\n if (!this.underlying) {\n // This might happen if authentication succeeded but profile switching failed\n throw new Error('Underlying wallet for the active profile is not initialized.');\n }\n if (originator === this.adminOriginator) {\n throw new Error('External applications are not allowed to use the admin originator.');\n }\n }\n // Example proxy method (repeat pattern for all others)\n async getPublicKey(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getPublicKey(args, originator);\n }\n async revealCounterpartyKeyLinkage(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.revealCounterpartyKeyLinkage(args, originator);\n }\n async revealSpecificKeyLinkage(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.revealSpecificKeyLinkage(args, originator);\n }\n async encrypt(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.encrypt(args, originator);\n }\n async decrypt(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.decrypt(args, originator);\n }\n async createHmac(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.createHmac(args, originator);\n }\n async verifyHmac(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.verifyHmac(args, originator);\n }\n async createSignature(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.createSignature(args, originator);\n }\n async verifySignature(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.verifySignature(args, originator);\n }\n async createAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.createAction(args, originator);\n }\n async signAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.signAction(args, originator);\n }\n async abortAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.abortAction(args, originator);\n }\n async listActions(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.listActions(args, originator);\n }\n async internalizeAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.internalizeAction(args, originator);\n }\n async listOutputs(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.listOutputs(args, originator);\n }\n async relinquishOutput(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.relinquishOutput(args, originator);\n }\n async acquireCertificate(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.acquireCertificate(args, originator);\n }\n async listCertificates(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.listCertificates(args, originator);\n }\n async proveCertificate(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.proveCertificate(args, originator);\n }\n async relinquishCertificate(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.relinquishCertificate(args, originator);\n }\n async discoverByIdentityKey(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.discoverByIdentityKey(args, originator);\n }\n async discoverByAttributes(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.discoverByAttributes(args, originator);\n }\n async isAuthenticated(_, originator) {\n if (!this.authenticated) {\n throw new Error('User is not authenticated.');\n }\n if (originator === this.adminOriginator) {\n throw new Error('External applications are not allowed to use the admin originator.');\n }\n return { authenticated: true };\n }\n async waitForAuthentication(_, originator) {\n if (originator === this.adminOriginator) {\n throw new Error('External applications are not allowed to use the admin originator.');\n }\n while (!this.authenticated || !this.underlying) {\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n return await this.underlying.waitForAuthentication({}, originator);\n }\n async getHeight(_, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getHeight({}, originator);\n }\n async getHeaderForHeight(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getHeaderForHeight(args, originator);\n }\n async getNetwork(_, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getNetwork({}, originator);\n }\n async getVersion(_, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getVersion({}, originator);\n }\n}\nexports.CWIStyleWalletManager = CWIStyleWalletManager;\n//# sourceMappingURL=CWIStyleWalletManager.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/CWIStyleWalletManager.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.CWIStyleWalletManager = exports.OverlayUMPTokenInteractor = exports.DEFAULT_PROFILE_ID = exports.PBKDF2_NUM_ROUNDS = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst PrivilegedKeyManager_1 = __webpack_require__(/*! ./sdk/PrivilegedKeyManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/PrivilegedKeyManager.js\");\n/**\n * Number of rounds used in PBKDF2 for deriving password keys.\n */\nexports.PBKDF2_NUM_ROUNDS = 7777;\n/**\n * PBKDF-2 that prefers the browser / Node 20+ WebCrypto implementation and\n * silently falls back to the existing JS code.\n *\n * @param passwordBytes Raw password bytes.\n * @param salt Salt bytes.\n * @param iterations Number of rounds.\n * @param keyLen Desired key length in bytes.\n * @param hash Digest algorithm (default \"sha512\").\n * @returns Derived key bytes.\n */\nasync function pbkdf2NativeOrJs(passwordBytes, salt, iterations, keyLen, hash = 'sha512') {\n var _a;\n // ----- fast-path: WebCrypto (both browser & recent Node expose globalThis.crypto.subtle)\n const subtle = (_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.subtle;\n if (subtle) {\n try {\n const baseKey = await subtle.importKey('raw', new Uint8Array(passwordBytes), { name: 'PBKDF2' }, \n /*extractable*/ false, ['deriveBits']);\n const bits = await subtle.deriveBits({\n name: 'PBKDF2',\n salt: new Uint8Array(salt),\n iterations,\n hash: hash.toUpperCase()\n }, baseKey, keyLen * 8);\n return Array.from(new Uint8Array(bits));\n }\n catch (err) {\n console.warn('[pbkdf2] WebCrypto path failed → falling back to JS implementation', err);\n /* fall through */\n }\n }\n // ----- slow-path: old JavaScript implementation\n return sdk_1.Hash.pbkdf2(passwordBytes, salt, iterations, keyLen, hash);\n}\n/**\n * Unique Identifier for the default profile (16 zero bytes).\n */\nexports.DEFAULT_PROFILE_ID = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];\n/**\n * @class OverlayUMPTokenInteractor\n *\n * A concrete implementation of the UMPTokenInteractor interface that interacts\n * with Overlay Services and the UMP (User Management Protocol) topic. This class\n * is responsible for:\n *\n * 1) Locating UMP tokens via overlay lookups (ls_users).\n * 2) Creating and publishing new or updated UMP token outputs on-chain under\n * the \"tm_users\" topic.\n * 3) Consuming (spending) an old token if provided.\n */\nclass OverlayUMPTokenInteractor {\n /**\n * Construct a new OverlayUMPTokenInteractor.\n *\n * @param resolver A LookupResolver instance for performing overlay queries (ls_users).\n * @param broadcaster A SHIPBroadcaster instance for sharing new or updated tokens across the `tm_users` overlay.\n */\n constructor(resolver = new sdk_1.LookupResolver(), broadcaster = new sdk_1.SHIPBroadcaster(['tm_users'])) {\n this.resolver = resolver;\n this.broadcaster = broadcaster;\n }\n /**\n * Finds a UMP token on-chain by the given presentation key hash, if it exists.\n * Uses the ls_users overlay service to perform the lookup.\n *\n * @param hash The 32-byte SHA-256 hash of the presentation key.\n * @returns A UMPToken object (including currentOutpoint) if found, otherwise undefined.\n */\n async findByPresentationKeyHash(hash) {\n // Query ls_users for the given presentationHash\n const question = {\n service: 'ls_users',\n query: { presentationHash: sdk_1.Utils.toHex(hash) }\n };\n const answer = await this.resolver.query(question);\n return this.parseLookupAnswer(answer);\n }\n /**\n * Finds a UMP token on-chain by the given recovery key hash, if it exists.\n * Uses the ls_users overlay service to perform the lookup.\n *\n * @param hash The 32-byte SHA-256 hash of the recovery key.\n * @returns A UMPToken object (including currentOutpoint) if found, otherwise undefined.\n */\n async findByRecoveryKeyHash(hash) {\n const question = {\n service: 'ls_users',\n query: { recoveryHash: sdk_1.Utils.toHex(hash) }\n };\n const answer = await this.resolver.query(question);\n return this.parseLookupAnswer(answer);\n }\n /**\n * Creates or updates (replaces) a UMP token on-chain. If `oldTokenToConsume` is provided,\n * it is spent in the same transaction that creates the new token output. The new token is\n * then broadcast and published under the `tm_users` topic using a SHIP broadcast, ensuring\n * overlay participants see the updated token.\n *\n * @param wallet The wallet used to build and sign the transaction (MUST be operating under the DEFAULT profile).\n * @param adminOriginator The domain/FQDN of the administrative originator (wallet operator).\n * @param token The new UMPToken to create on-chain.\n * @param oldTokenToConsume Optionally, an existing token to consume/spend in the same transaction.\n * @returns The outpoint of the newly created UMP token (e.g. \"abcd1234...ef.0\").\n */\n async buildAndSend(wallet, // This wallet MUST be the one built for the default profile\n adminOriginator, token, oldTokenToConsume) {\n // 1) Construct the data fields for the new UMP token.\n const fields = [];\n fields[0] = token.passwordSalt;\n fields[1] = token.passwordPresentationPrimary;\n fields[2] = token.passwordRecoveryPrimary;\n fields[3] = token.presentationRecoveryPrimary;\n fields[4] = token.passwordPrimaryPrivileged;\n fields[5] = token.presentationRecoveryPrivileged;\n fields[6] = token.presentationHash;\n fields[7] = token.recoveryHash;\n fields[8] = token.presentationKeyEncrypted;\n fields[9] = token.passwordKeyEncrypted;\n fields[10] = token.recoveryKeyEncrypted;\n // Optional field (11) for encrypted profiles\n if (token.profilesEncrypted) {\n fields[11] = token.profilesEncrypted;\n }\n // 2) Create a PushDrop script referencing these fields, locked with the admin key.\n const script = await new sdk_1.PushDrop(wallet, adminOriginator).lock(fields, [2, 'admin user management token'], // protocolID\n '1', // keyID\n 'self', // counterparty\n /*forSelf=*/ true, \n /*includeSignature=*/ true);\n // 3) Prepare the createAction call. If oldTokenToConsume is provided, gather the outpoint.\n const inputs = [];\n let inputToken;\n if (oldTokenToConsume === null || oldTokenToConsume === void 0 ? void 0 : oldTokenToConsume.currentOutpoint) {\n inputToken = await this.findByOutpoint(oldTokenToConsume.currentOutpoint);\n // If there is no token on the overlay, we can't consume it. Just start over with a new token.\n if (!inputToken) {\n oldTokenToConsume = undefined;\n // Otherwise, add the input\n }\n else {\n inputs.push({\n outpoint: oldTokenToConsume.currentOutpoint,\n unlockingScriptLength: 73, // typical signature length\n inputDescription: 'Consume old UMP token'\n });\n }\n }\n const outputs = [\n {\n lockingScript: script.toHex(),\n satoshis: 1,\n outputDescription: 'New UMP token output'\n }\n ];\n // 4) Build the partial transaction via createAction.\n let createResult;\n try {\n createResult = await wallet.createAction({\n description: oldTokenToConsume ? 'Renew UMP token (consume old, create new)' : 'Create new UMP token',\n inputs,\n outputs,\n inputBEEF: inputToken === null || inputToken === void 0 ? void 0 : inputToken.beef,\n options: {\n randomizeOutputs: false,\n acceptDelayedBroadcast: false\n }\n }, adminOriginator);\n }\n catch (e) {\n console.error('Error with UMP token update. Attempting a last-ditch effort to get a new one', e);\n createResult = await wallet.createAction({\n description: 'Recover UMP token',\n outputs,\n options: {\n randomizeOutputs: false,\n acceptDelayedBroadcast: false\n }\n }, adminOriginator);\n }\n // If the transaction is fully processed by the wallet\n if (!createResult.signableTransaction) {\n const finalTxid = createResult.txid || (createResult.tx ? sdk_1.Transaction.fromAtomicBEEF(createResult.tx).id('hex') : undefined);\n if (!finalTxid) {\n throw new Error('No signableTransaction and no final TX found.');\n }\n // Now broadcast to `tm_users` using SHIP\n const broadcastTx = sdk_1.Transaction.fromAtomicBEEF(createResult.tx);\n const result = await this.broadcaster.broadcast(broadcastTx);\n console.log('BROADCAST RESULT', result);\n return `${finalTxid}.0`;\n }\n // 5) If oldTokenToConsume is present, we must sign the input referencing it.\n // (If there's no old token, there's nothing to sign for the input.)\n let finalTxid = '';\n const reference = createResult.signableTransaction.reference;\n const partialTx = sdk_1.Transaction.fromBEEF(createResult.signableTransaction.tx);\n if (oldTokenToConsume === null || oldTokenToConsume === void 0 ? void 0 : oldTokenToConsume.currentOutpoint) {\n // Unlock the old token with a matching PushDrop unlocker\n const unlocker = new sdk_1.PushDrop(wallet, adminOriginator).unlock([2, 'admin user management token'], '1', 'self');\n const unlockingScript = await unlocker.sign(partialTx, 0);\n // Provide it to the wallet\n const signResult = await wallet.signAction({\n reference,\n spends: {\n 0: {\n unlockingScript: unlockingScript.toHex()\n }\n }\n }, adminOriginator);\n finalTxid = signResult.txid || (signResult.tx ? sdk_1.Transaction.fromAtomicBEEF(signResult.tx).id('hex') : '');\n if (!finalTxid) {\n throw new Error('Could not finalize transaction for renewed UMP token.');\n }\n // 6) Broadcast to `tm_users`\n const finalAtomicTx = signResult.tx;\n if (!finalAtomicTx) {\n throw new Error('Final transaction data missing after signing renewed UMP token.');\n }\n const broadcastTx = sdk_1.Transaction.fromAtomicBEEF(finalAtomicTx);\n const result = await this.broadcaster.broadcast(broadcastTx);\n console.log('BROADCAST RESULT', result);\n return `${finalTxid}.0`;\n }\n else {\n // Fallback for creating a new token (no input spending)\n const signResult = await wallet.signAction({ reference, spends: {} }, adminOriginator);\n finalTxid = signResult.txid || (signResult.tx ? sdk_1.Transaction.fromAtomicBEEF(signResult.tx).id('hex') : '');\n if (!finalTxid) {\n throw new Error('Failed to finalize new UMP token transaction.');\n }\n const finalAtomicTx = signResult.tx;\n if (!finalAtomicTx) {\n throw new Error('Final transaction data missing after signing new UMP token.');\n }\n const broadcastTx = sdk_1.Transaction.fromAtomicBEEF(finalAtomicTx);\n const result = await this.broadcaster.broadcast(broadcastTx);\n console.log('BROADCAST RESULT', result);\n return `${finalTxid}.0`;\n }\n }\n /**\n * Attempts to parse a LookupAnswer from the UMP lookup service. If successful,\n * extracts the token fields from the resulting transaction and constructs\n * a UMPToken object.\n *\n * @param answer The LookupAnswer returned by a query to ls_users.\n * @returns The parsed UMPToken or `undefined` if none found/decodable.\n */\n parseLookupAnswer(answer) {\n var _a;\n if (answer.type !== 'output-list') {\n return undefined;\n }\n if (!answer.outputs || answer.outputs.length === 0) {\n return undefined;\n }\n const { beef, outputIndex } = answer.outputs[0];\n try {\n const tx = sdk_1.Transaction.fromBEEF(beef);\n const outpoint = `${tx.id('hex')}.${outputIndex}`;\n const decoded = sdk_1.PushDrop.decode(tx.outputs[outputIndex].lockingScript);\n // Expecting 11 or more fields for UMP\n if (!decoded.fields || decoded.fields.length < 11) {\n console.warn(`Unexpected number of fields in UMP token: ${(_a = decoded.fields) === null || _a === void 0 ? void 0 : _a.length}`);\n return undefined;\n }\n // Build the UMP token from these fields, preserving outpoint\n const t = {\n // Order matches buildAndSend and serialize/deserialize\n passwordSalt: decoded.fields[0],\n passwordPresentationPrimary: decoded.fields[1],\n passwordRecoveryPrimary: decoded.fields[2],\n presentationRecoveryPrimary: decoded.fields[3],\n passwordPrimaryPrivileged: decoded.fields[4],\n presentationRecoveryPrivileged: decoded.fields[5],\n presentationHash: decoded.fields[6],\n recoveryHash: decoded.fields[7],\n presentationKeyEncrypted: decoded.fields[8],\n passwordKeyEncrypted: decoded.fields[9],\n recoveryKeyEncrypted: decoded.fields[10],\n profilesEncrypted: decoded.fields[12] ? decoded.fields[11] : undefined, // If there's a signature in field 12, use field 11\n currentOutpoint: outpoint\n };\n return t;\n }\n catch (e) {\n console.error('Failed to parse or decode UMP token:', e);\n return undefined;\n }\n }\n /**\n * Finds by outpoint for unlocking / spending previous tokens.\n * @param outpoint The outpoint we are searching by\n * @returns The result so that we can use it to unlock the transaction\n */\n async findByOutpoint(outpoint) {\n const results = await this.resolver.query({\n service: 'ls_users',\n query: {\n outpoint\n }\n });\n if (results.type !== 'output-list') {\n return undefined;\n }\n if (!results.outputs || !results.outputs.length) {\n return undefined;\n }\n return results.outputs[0];\n }\n}\nexports.OverlayUMPTokenInteractor = OverlayUMPTokenInteractor;\n/**\n * Manages a \"CWI-style\" wallet that uses a UMP token and a\n * multi-key authentication scheme (password, presentation key, and recovery key),\n * supporting multiple user profiles under a single account.\n */\nclass CWIStyleWalletManager {\n /**\n * Constructs a new CWIStyleWalletManager.\n *\n * @param adminOriginator The domain name of the administrative originator.\n * @param walletBuilder A function that can build an underlying wallet instance for a profile.\n * @param interactor An instance of UMPTokenInteractor.\n * @param recoveryKeySaver A function to persist a new recovery key.\n * @param passwordRetriever A function to request the user's password.\n * @param newWalletFunder Optional function to fund a new wallet.\n * @param stateSnapshot Optional previously saved state snapshot.\n */\n constructor(adminOriginator, walletBuilder, interactor = new OverlayUMPTokenInteractor(), recoveryKeySaver, passwordRetriever, newWalletFunder, stateSnapshot) {\n /**\n * Current mode of authentication.\n */\n this.authenticationMode = 'presentation-key-and-password';\n /**\n * Indicates new user or existing user flow.\n */\n this.authenticationFlow = 'new-user';\n /**\n * The currently active profile ID (null or DEFAULT_PROFILE_ID means default profile).\n */\n this.activeProfileId = exports.DEFAULT_PROFILE_ID;\n /**\n * List of loaded non-default profiles.\n */\n this.profiles = [];\n this.adminOriginator = adminOriginator;\n this.walletBuilder = walletBuilder;\n this.UMPTokenInteractor = interactor;\n this.recoveryKeySaver = recoveryKeySaver;\n this.passwordRetriever = passwordRetriever;\n this.authenticated = false;\n this.newWalletFunder = newWalletFunder;\n // If a saved snapshot is provided, attempt to load it.\n // Note: loadSnapshot now returns a promise. We don't await it here,\n // as the constructor must be synchronous. The caller should check\n // `this.authenticated` after construction if a snapshot was provided.\n if (stateSnapshot) {\n this.loadSnapshot(stateSnapshot).catch(err => {\n console.error('Failed to load snapshot during construction:', err);\n // Clear potentially partially loaded state\n this.destroy();\n });\n }\n }\n // --- Authentication Methods ---\n /**\n * Provides the presentation key.\n */\n async providePresentationKey(key) {\n if (this.authenticated) {\n throw new Error('User is already authenticated');\n }\n if (this.authenticationMode === 'recovery-key-and-password') {\n throw new Error('Presentation key is not needed in this mode');\n }\n const hash = sdk_1.Hash.sha256(key);\n const token = await this.UMPTokenInteractor.findByPresentationKeyHash(hash);\n if (!token) {\n // No token found -> New user\n this.authenticationFlow = 'new-user';\n this.presentationKey = key;\n }\n else {\n // Found token -> existing user\n this.authenticationFlow = 'existing-user';\n this.presentationKey = key;\n this.currentUMPToken = token;\n }\n }\n /**\n * Provides the password.\n */\n async providePassword(password) {\n if (this.authenticated) {\n throw new Error('User is already authenticated');\n }\n if (this.authenticationMode === 'presentation-key-and-recovery-key') {\n throw new Error('Password is not needed in this mode');\n }\n if (this.authenticationFlow === 'existing-user') {\n // Existing user flow\n if (!this.currentUMPToken) {\n throw new Error('Provide presentation or recovery key first.');\n }\n const derivedPasswordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(password, 'utf8'), this.currentUMPToken.passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n let rootPrimaryKey;\n let rootPrivilegedKey; // Only needed for recovery mode\n if (this.authenticationMode === 'presentation-key-and-password') {\n if (!this.presentationKey)\n throw new Error('No presentation key found!');\n const xorKey = this.XOR(this.presentationKey, derivedPasswordKey);\n rootPrimaryKey = new sdk_1.SymmetricKey(xorKey).decrypt(this.currentUMPToken.passwordPresentationPrimary);\n }\n else {\n // 'recovery-key-and-password'\n if (!this.recoveryKey)\n throw new Error('No recovery key found!');\n const primaryDecryptionKey = this.XOR(this.recoveryKey, derivedPasswordKey);\n rootPrimaryKey = new sdk_1.SymmetricKey(primaryDecryptionKey).decrypt(this.currentUMPToken.passwordRecoveryPrimary);\n const privilegedDecryptionKey = this.XOR(rootPrimaryKey, derivedPasswordKey);\n rootPrivilegedKey = new sdk_1.SymmetricKey(privilegedDecryptionKey).decrypt(this.currentUMPToken.passwordPrimaryPrivileged);\n }\n // Build root infrastructure, load profiles, and switch to default profile initially\n await this.setupRootInfrastructure(rootPrimaryKey, rootPrivilegedKey);\n await this.switchProfile(this.activeProfileId);\n }\n else {\n // New user flow (only 'presentation-key-and-password')\n if (this.authenticationMode !== 'presentation-key-and-password') {\n throw new Error('New-user flow requires presentation key and password mode.');\n }\n if (!this.presentationKey) {\n throw new Error('No presentation key provided for new-user flow.');\n }\n // Generate new keys/salt\n const recoveryKey = (0, sdk_1.Random)(32);\n await this.recoveryKeySaver(recoveryKey);\n const passwordSalt = (0, sdk_1.Random)(32);\n const passwordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(password, 'utf8'), passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n const rootPrimaryKey = (0, sdk_1.Random)(32);\n const rootPrivilegedKey = (0, sdk_1.Random)(32);\n // Build XOR keys\n const presentationPassword = new sdk_1.SymmetricKey(this.XOR(this.presentationKey, passwordKey));\n const presentationRecovery = new sdk_1.SymmetricKey(this.XOR(this.presentationKey, recoveryKey));\n const recoveryPassword = new sdk_1.SymmetricKey(this.XOR(recoveryKey, passwordKey));\n const primaryPassword = new sdk_1.SymmetricKey(this.XOR(rootPrimaryKey, passwordKey));\n // Temp manager for encryption\n const tempPrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async () => new sdk_1.PrivateKey(rootPrivilegedKey));\n // Build new UMP token (no profiles initially)\n const newToken = {\n passwordSalt,\n passwordPresentationPrimary: presentationPassword.encrypt(rootPrimaryKey),\n passwordRecoveryPrimary: recoveryPassword.encrypt(rootPrimaryKey),\n presentationRecoveryPrimary: presentationRecovery.encrypt(rootPrimaryKey),\n passwordPrimaryPrivileged: primaryPassword.encrypt(rootPrivilegedKey),\n presentationRecoveryPrivileged: presentationRecovery.encrypt(rootPrivilegedKey),\n presentationHash: sdk_1.Hash.sha256(this.presentationKey),\n recoveryHash: sdk_1.Hash.sha256(recoveryKey),\n presentationKeyEncrypted: (await tempPrivilegedKeyManager.encrypt({\n plaintext: this.presentationKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n passwordKeyEncrypted: (await tempPrivilegedKeyManager.encrypt({\n plaintext: passwordKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n recoveryKeyEncrypted: (await tempPrivilegedKeyManager.encrypt({\n plaintext: recoveryKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n profilesEncrypted: undefined // No profiles yet\n };\n this.currentUMPToken = newToken;\n // Setup root infrastructure and switch to default profile\n await this.setupRootInfrastructure(rootPrimaryKey);\n await this.switchProfile(exports.DEFAULT_PROFILE_ID);\n // Fund the *default* wallet if funder provided\n if (this.newWalletFunder && this.underlying) {\n try {\n await this.newWalletFunder(this.presentationKey, this.underlying, this.adminOriginator);\n }\n catch (e) {\n console.error('Error funding new wallet:', e);\n // Decide if this should halt the process or just log\n }\n }\n // Publish the new UMP token *after* potentially funding\n // We need the default profile wallet to sign the UMP creation TX\n if (!this.underlying) {\n throw new Error('Default profile wallet not built before attempting to publish UMP token.');\n }\n this.currentUMPToken.currentOutpoint = await this.UMPTokenInteractor.buildAndSend(this.underlying, // Use the default profile wallet\n this.adminOriginator, newToken);\n }\n }\n /**\n * Provides the recovery key.\n */\n async provideRecoveryKey(recoveryKey) {\n if (this.authenticated) {\n throw new Error('Already authenticated');\n }\n if (this.authenticationFlow === 'new-user') {\n throw new Error('Do not submit recovery key in new-user flow');\n }\n if (this.authenticationMode === 'presentation-key-and-password') {\n throw new Error('No recovery key required in this mode');\n }\n else if (this.authenticationMode === 'recovery-key-and-password') {\n // Wait for password\n const hash = sdk_1.Hash.sha256(recoveryKey);\n const token = await this.UMPTokenInteractor.findByRecoveryKeyHash(hash);\n if (!token)\n throw new Error('No user found with this recovery key');\n this.recoveryKey = recoveryKey;\n this.currentUMPToken = token;\n }\n else {\n // 'presentation-key-and-recovery-key'\n if (!this.presentationKey)\n throw new Error('Provide the presentation key first');\n if (!this.currentUMPToken)\n throw new Error('Current UMP token not found');\n const xorKey = this.XOR(this.presentationKey, recoveryKey);\n const rootPrimaryKey = new sdk_1.SymmetricKey(xorKey).decrypt(this.currentUMPToken.presentationRecoveryPrimary);\n const rootPrivilegedKey = new sdk_1.SymmetricKey(xorKey).decrypt(this.currentUMPToken.presentationRecoveryPrivileged);\n // Build root infrastructure, load profiles, switch to default\n await this.setupRootInfrastructure(rootPrimaryKey, rootPrivilegedKey);\n await this.switchProfile(this.activeProfileId);\n }\n }\n // --- State Management Methods ---\n /**\n * Saves the current wallet state (root key, UMP token, active profile) into an encrypted snapshot.\n * Version 2 format: [1 byte version=2] + [32 byte snapshot key] + [16 byte activeProfileId] + [encrypted payload]\n * Encrypted Payload: [32 byte rootPrimaryKey] + [varint token length + serialized UMP token]\n *\n * @returns Encrypted snapshot bytes.\n */\n saveSnapshot() {\n if (!this.rootPrimaryKey || !this.currentUMPToken) {\n throw new Error('No root primary key or current UMP token set');\n }\n const snapshotKey = (0, sdk_1.Random)(32);\n const snapshotPreimageWriter = new sdk_1.Utils.Writer();\n // Write root primary key\n snapshotPreimageWriter.write(this.rootPrimaryKey);\n // Write serialized UMP token (must have outpoint)\n if (!this.currentUMPToken.currentOutpoint) {\n throw new Error('UMP token cannot be saved without a current outpoint.');\n }\n const serializedToken = this.serializeUMPToken(this.currentUMPToken);\n snapshotPreimageWriter.writeVarIntNum(serializedToken.length);\n snapshotPreimageWriter.write(serializedToken);\n // Encrypt the payload\n const snapshotPreimage = snapshotPreimageWriter.toArray();\n const snapshotPayload = new sdk_1.SymmetricKey(snapshotKey).encrypt(snapshotPreimage);\n // Build final snapshot (Version 2)\n const snapshotWriter = new sdk_1.Utils.Writer();\n snapshotWriter.writeUInt8(2); // Version\n snapshotWriter.write(snapshotKey);\n snapshotWriter.write(this.activeProfileId); // Active profile ID\n snapshotWriter.write(snapshotPayload); // Encrypted data\n return snapshotWriter.toArray();\n }\n /**\n * Loads a previously saved state snapshot. Restores root key, UMP token, profiles, and active profile.\n * Handles Version 1 (legacy) and Version 2 formats.\n *\n * @param snapshot Encrypted snapshot bytes.\n */\n async loadSnapshot(snapshot) {\n try {\n const reader = new sdk_1.Utils.Reader(snapshot);\n const version = reader.readUInt8();\n let snapshotKey;\n let encryptedPayload;\n let activeProfileId = exports.DEFAULT_PROFILE_ID; // Default for V1\n if (version === 1) {\n snapshotKey = reader.read(32);\n encryptedPayload = reader.read();\n }\n else if (version === 2) {\n snapshotKey = reader.read(32);\n activeProfileId = reader.read(16); // Read active profile ID\n encryptedPayload = reader.read();\n }\n else {\n throw new Error(`Unsupported snapshot version: ${version}`);\n }\n // Decrypt payload\n const decryptedPayload = new sdk_1.SymmetricKey(snapshotKey).decrypt(encryptedPayload);\n const payloadReader = new sdk_1.Utils.Reader(decryptedPayload);\n // Read root primary key\n const rootPrimaryKey = payloadReader.read(32);\n // Read serialized UMP token\n const tokenLen = payloadReader.readVarIntNum();\n const tokenBytes = payloadReader.read(tokenLen);\n const token = this.deserializeUMPToken(tokenBytes);\n // Assign loaded data\n this.currentUMPToken = token;\n // Setup root infrastructure, load profiles, and switch to the loaded active profile\n await this.setupRootInfrastructure(rootPrimaryKey); // Will automatically load profiles\n await this.switchProfile(activeProfileId); // Switch to the profile saved in the snapshot\n this.authenticationFlow = 'existing-user'; // Loading implies existing user\n }\n catch (error) {\n this.destroy(); // Clear state on error\n throw new Error(`Failed to load snapshot: ${error.message}`);\n }\n }\n /**\n * Destroys the wallet state, clearing keys, tokens, and profiles.\n */\n destroy() {\n this.underlying = undefined;\n this.rootPrivilegedKeyManager = undefined;\n this.authenticated = false;\n this.rootPrimaryKey = undefined;\n this.currentUMPToken = undefined;\n this.presentationKey = undefined;\n this.recoveryKey = undefined;\n this.profiles = [];\n this.activeProfileId = exports.DEFAULT_PROFILE_ID;\n this.authenticationMode = 'presentation-key-and-password';\n this.authenticationFlow = 'new-user';\n }\n // --- Profile Management Methods ---\n /**\n * Lists all available profiles, including the default profile.\n * @returns Array of profile info objects, including an 'active' flag.\n */\n listProfiles() {\n if (!this.authenticated) {\n throw new Error('Not authenticated.');\n }\n const profileList = [\n // Default profile\n {\n id: exports.DEFAULT_PROFILE_ID,\n name: 'default',\n createdAt: null, // Default profile doesn't have a creation timestamp in the same way\n active: this.activeProfileId.every(x => x === 0)\n },\n // Other profiles\n ...this.profiles.map(p => ({\n id: p.id,\n name: p.name,\n createdAt: p.createdAt,\n active: this.activeProfileId.every((x, i) => x === p.id[i])\n }))\n ];\n return profileList;\n }\n /**\n * Adds a new profile with the given name.\n * Generates necessary pads and updates the UMP token.\n * Does not switch to the new profile automatically.\n *\n * @param name The desired name for the new profile.\n * @returns The ID of the newly created profile.\n */\n async addProfile(name) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error('Wallet not fully initialized or authenticated.');\n }\n // Ensure name is unique (including 'default')\n if (name === 'default' || this.profiles.some(p => p.name.toLowerCase() === name.toLowerCase())) {\n throw new Error(`Profile name \"${name}\" is already in use.`);\n }\n const newProfile = {\n name,\n id: (0, sdk_1.Random)(16),\n primaryPad: (0, sdk_1.Random)(32),\n privilegedPad: (0, sdk_1.Random)(32),\n createdAt: Math.floor(Date.now() / 1000)\n };\n this.profiles.push(newProfile);\n // Update the UMP token with the new profile list\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, \n // Need to re-derive/decrypt factors needed for re-encryption\n await this.getFactor('passwordKey'), await this.getFactor('presentationKey'), await this.getFactor('recoveryKey'), this.rootPrimaryKey, await this.getFactor('privilegedKey'), // Get ROOT privileged key\n this.profiles // Pass the updated profile list\n );\n return newProfile.id;\n }\n /**\n * Deletes a profile by its ID.\n * Cannot delete the default profile. If the active profile is deleted,\n * it switches back to the default profile.\n *\n * @param profileId The 16-byte ID of the profile to delete.\n */\n async deleteProfile(profileId) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error('Wallet not fully initialized or authenticated.');\n }\n if (profileId.every(x => x === 0)) {\n throw new Error('Cannot delete the default profile.');\n }\n const profileIndex = this.profiles.findIndex(p => p.id.every((x, i) => x === profileId[i]));\n if (profileIndex === -1) {\n throw new Error('Profile not found.');\n }\n // Remove the profile\n this.profiles.splice(profileIndex, 1);\n // If the deleted profile was active, switch to default\n if (this.activeProfileId.every((x, i) => x === profileId[i])) {\n await this.switchProfile(exports.DEFAULT_PROFILE_ID); // This rebuilds the wallet\n }\n // Update the UMP token\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, await this.getFactor('passwordKey'), await this.getFactor('presentationKey'), await this.getFactor('recoveryKey'), this.rootPrimaryKey, await this.getFactor('privilegedKey'), // Get ROOT privileged key\n this.profiles // Pass updated list\n );\n }\n /**\n * Switches the active profile. This re-derives keys and rebuilds the underlying wallet.\n *\n * @param profileId The 16-byte ID of the profile to switch to (use DEFAULT_PROFILE_ID for default).\n */\n async switchProfile(profileId) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Cannot switch profile: Wallet not authenticated or root keys missing.');\n }\n let profilePrimaryKey;\n let profilePrivilegedPad; // Pad for the target profile\n if (profileId.every(x => x === 0)) {\n // Switching to default profile\n profilePrimaryKey = this.rootPrimaryKey;\n profilePrivilegedPad = undefined; // No pad for default\n this.activeProfileId = exports.DEFAULT_PROFILE_ID;\n }\n else {\n // Switching to a non-default profile\n const profile = this.profiles.find(p => p.id.every((x, i) => x === profileId[i]));\n if (!profile) {\n throw new Error('Profile not found.');\n }\n profilePrimaryKey = this.XOR(this.rootPrimaryKey, profile.primaryPad);\n profilePrivilegedPad = profile.privilegedPad;\n this.activeProfileId = profileId;\n }\n // Create a *profile-specific* PrivilegedKeyManager.\n // It uses the ROOT manager internally but applies the profile's pad.\n const profilePrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async (reason) => {\n // Request the ROOT privileged key using the root manager\n const rootPrivileged = await this.rootPrivilegedKeyManager.getPrivilegedKey(reason);\n const rootPrivilegedBytes = rootPrivileged.toArray();\n // Apply the profile's pad if applicable\n const profilePrivilegedBytes = profilePrivilegedPad\n ? this.XOR(rootPrivilegedBytes, profilePrivilegedPad)\n : rootPrivilegedBytes;\n return new sdk_1.PrivateKey(profilePrivilegedBytes);\n });\n // Build the underlying wallet for the specific profile\n this.underlying = await this.walletBuilder(profilePrimaryKey, profilePrivilegedKeyManager, // Pass the profile-specific manager\n this.activeProfileId // Pass the ID of the profile being activated\n );\n }\n // --- Key Management Methods ---\n /**\n * Changes the user's password. Re-wraps keys and updates the UMP token.\n */\n async changePassword(newPassword) {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n const passwordSalt = (0, sdk_1.Random)(32);\n const newPasswordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(newPassword, 'utf8'), passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n // Decrypt existing factors needed for re-encryption, using the *root* privileged key manager\n const recoveryKey = await this.getFactor('recoveryKey');\n const presentationKey = await this.getFactor('presentationKey');\n const rootPrivilegedKey = await this.getFactor('privilegedKey'); // Get ROOT privileged key\n await this.updateAuthFactors(passwordSalt, newPasswordKey, presentationKey, recoveryKey, this.rootPrimaryKey, rootPrivilegedKey, // Pass the explicitly fetched root key\n this.profiles // Preserve existing profiles\n );\n }\n /**\n * Retrieves the current recovery key. Requires privileged access.\n */\n async getRecoveryKey() {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n return this.getFactor('recoveryKey');\n }\n /**\n * Changes the user's recovery key. Prompts user to save the new key.\n */\n async changeRecoveryKey() {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n // Decrypt existing factors needed\n const passwordKey = await this.getFactor('passwordKey');\n const presentationKey = await this.getFactor('presentationKey');\n const rootPrivilegedKey = await this.getFactor('privilegedKey'); // Get ROOT privileged key\n // Generate and save new recovery key\n const newRecoveryKey = (0, sdk_1.Random)(32);\n await this.recoveryKeySaver(newRecoveryKey);\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, passwordKey, presentationKey, newRecoveryKey, // Use the new key\n this.rootPrimaryKey, rootPrivilegedKey, this.profiles // Preserve profiles\n );\n }\n /**\n * Changes the user's presentation key.\n */\n async changePresentationKey(newPresentationKey) {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrimaryKey || !this.rootPrivilegedKeyManager) {\n throw new Error('Not authenticated or missing required data.');\n }\n if (newPresentationKey.length !== 32) {\n throw new Error('Presentation key must be 32 bytes.');\n }\n // Decrypt existing factors\n const recoveryKey = await this.getFactor('recoveryKey');\n const passwordKey = await this.getFactor('passwordKey');\n const rootPrivilegedKey = await this.getFactor('privilegedKey'); // Get ROOT privileged key\n await this.updateAuthFactors(this.currentUMPToken.passwordSalt, passwordKey, newPresentationKey, // Use the new key\n recoveryKey, this.rootPrimaryKey, rootPrivilegedKey, this.profiles // Preserve profiles\n );\n // Update the temporarily stored key if it was set\n if (this.presentationKey) {\n this.presentationKey = newPresentationKey;\n }\n }\n // --- Internal Helper Methods ---\n /**\n * Performs XOR operation on two byte arrays.\n */\n XOR(n1, n2) {\n if (n1.length !== n2.length) {\n // Provide more context in error\n throw new Error(`XOR length mismatch: ${n1.length} vs ${n2.length}`);\n }\n const r = new Array(n1.length);\n for (let i = 0; i < n1.length; i++) {\n r[i] = n1[i] ^ n2[i];\n }\n return r;\n }\n /**\n * Helper to decrypt a specific factor (key) stored encrypted in the UMP token.\n * Requires the root privileged key manager.\n * @param factorName Name of the factor to decrypt ('passwordKey', 'presentationKey', 'recoveryKey', 'privilegedKey').\n * @param getRoot If true and factorName is 'privilegedKey', returns the root privileged key bytes directly.\n * @returns The decrypted key bytes.\n */\n async getFactor(factorName) {\n if (!this.authenticated || !this.currentUMPToken || !this.rootPrivilegedKeyManager) {\n throw new Error(`Cannot get factor \"${factorName}\": Wallet not ready.`);\n }\n const protocolID = [2, 'admin key wrapping']; // Protocol used for encrypting factors\n const keyID = '1'; // Key ID used\n try {\n switch (factorName) {\n case 'passwordKey':\n return (await this.rootPrivilegedKeyManager.decrypt({\n ciphertext: this.currentUMPToken.passwordKeyEncrypted,\n protocolID,\n keyID\n })).plaintext;\n case 'presentationKey':\n return (await this.rootPrivilegedKeyManager.decrypt({\n ciphertext: this.currentUMPToken.presentationKeyEncrypted,\n protocolID,\n keyID\n })).plaintext;\n case 'recoveryKey':\n return (await this.rootPrivilegedKeyManager.decrypt({\n ciphertext: this.currentUMPToken.recoveryKeyEncrypted,\n protocolID,\n keyID\n })).plaintext;\n case 'privilegedKey': {\n // This needs careful handling based on whether the ROOT or PROFILE key is needed.\n // This helper is mostly used for UMP updates, which need the ROOT key.\n // We retrieve the PrivateKey object first.\n const pk = await this.rootPrivilegedKeyManager.getPrivilegedKey('UMP token update', true); // Force retrieval of root key\n return pk.toArray(); // Return bytes\n }\n default:\n throw new Error(`Unknown factor name: ${factorName}`);\n }\n }\n catch (error) {\n console.error(`Error decrypting factor ${factorName}:`, error);\n throw new Error(`Failed to decrypt factor \"${factorName}\": ${error.message}`);\n }\n }\n /**\n * Recomputes UMP token fields with updated factors and profiles, then publishes the update.\n * This operation requires the *root* privileged key and the *default* profile wallet.\n */\n async updateAuthFactors(passwordSalt, passwordKey, presentationKey, recoveryKey, rootPrimaryKey, rootPrivilegedKey, // Explicitly pass the root key bytes\n profiles // Pass current/new profiles list\n ) {\n if (!this.authenticated || !this.rootPrimaryKey || !this.currentUMPToken) {\n throw new Error('Wallet is not properly authenticated or missing data for update.');\n }\n // Ensure we have the OLD token to consume\n const oldTokenToConsume = { ...this.currentUMPToken };\n if (!oldTokenToConsume.currentOutpoint) {\n throw new Error('Cannot update UMP token: Old token has no outpoint.');\n }\n // Derive symmetrical encryption keys using XOR for the *root* keys\n const presentationPassword = new sdk_1.SymmetricKey(this.XOR(presentationKey, passwordKey));\n const presentationRecovery = new sdk_1.SymmetricKey(this.XOR(presentationKey, recoveryKey));\n const recoveryPassword = new sdk_1.SymmetricKey(this.XOR(recoveryKey, passwordKey));\n const primaryPassword = new sdk_1.SymmetricKey(this.XOR(rootPrimaryKey, passwordKey)); // Use rootPrimaryKey\n // Build a temporary privileged key manager using the explicit ROOT privileged key\n const tempRootPrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async () => new sdk_1.PrivateKey(rootPrivilegedKey));\n // Encrypt profiles if provided\n let profilesEncrypted;\n if (profiles && profiles.length > 0) {\n const profilesJson = JSON.stringify(profiles);\n const profilesBytes = sdk_1.Utils.toArray(profilesJson, 'utf8');\n profilesEncrypted = new sdk_1.SymmetricKey(rootPrimaryKey).encrypt(profilesBytes);\n }\n // Construct the new UMP token data\n const newTokenData = {\n passwordSalt,\n passwordPresentationPrimary: presentationPassword.encrypt(rootPrimaryKey),\n passwordRecoveryPrimary: recoveryPassword.encrypt(rootPrimaryKey),\n presentationRecoveryPrimary: presentationRecovery.encrypt(rootPrimaryKey),\n passwordPrimaryPrivileged: primaryPassword.encrypt(rootPrivilegedKey),\n presentationRecoveryPrivileged: presentationRecovery.encrypt(rootPrivilegedKey),\n presentationHash: sdk_1.Hash.sha256(presentationKey),\n recoveryHash: sdk_1.Hash.sha256(recoveryKey),\n presentationKeyEncrypted: (await tempRootPrivilegedKeyManager.encrypt({\n plaintext: presentationKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n passwordKeyEncrypted: (await tempRootPrivilegedKeyManager.encrypt({\n plaintext: passwordKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n recoveryKeyEncrypted: (await tempRootPrivilegedKeyManager.encrypt({\n plaintext: recoveryKey,\n protocolID: [2, 'admin key wrapping'],\n keyID: '1'\n })).ciphertext,\n profilesEncrypted // Add encrypted profiles\n // currentOutpoint will be set after publishing\n };\n // We need the wallet built for the DEFAULT profile to publish the UMP token.\n // If the current active profile is not default, temporarily switch, publish, then switch back.\n const currentActiveId = this.activeProfileId;\n let walletToUse = this.underlying;\n if (!currentActiveId.every(x => x === 0)) {\n console.log('Temporarily switching to default profile to update UMP token...');\n await this.switchProfile(exports.DEFAULT_PROFILE_ID); // This rebuilds this.underlying\n walletToUse = this.underlying;\n }\n if (!walletToUse) {\n throw new Error('Default profile wallet could not be activated for UMP token update.');\n }\n // Publish the new token on-chain, consuming the old one\n try {\n newTokenData.currentOutpoint = await this.UMPTokenInteractor.buildAndSend(walletToUse, this.adminOriginator, newTokenData, oldTokenToConsume // Consume the previous token\n );\n // Update the manager's state\n this.currentUMPToken = newTokenData;\n // Profiles are already updated in this.profiles if they were passed in\n }\n finally {\n // Switch back if we temporarily switched\n if (!currentActiveId.every(x => x === 0)) {\n console.log('Switching back to original profile...');\n await this.switchProfile(currentActiveId);\n }\n }\n }\n /**\n * Serializes a UMP token to binary format (Version 2 with optional profiles).\n * Layout: [1 byte version=2] + [11 * (varint len + bytes) for standard fields] + [1 byte profile_flag] + [IF flag=1 THEN varint len + profile bytes] + [varint len + outpoint bytes]\n */\n serializeUMPToken(token) {\n if (!token.currentOutpoint) {\n throw new Error('Token must have outpoint for serialization');\n }\n const writer = new sdk_1.Utils.Writer();\n writer.writeUInt8(2); // Version 2\n const writeArray = (arr) => {\n writer.writeVarIntNum(arr.length);\n writer.write(arr);\n };\n // Write standard fields in specific order\n writeArray(token.passwordSalt); // 0\n writeArray(token.passwordPresentationPrimary); // 1\n writeArray(token.passwordRecoveryPrimary); // 2\n writeArray(token.presentationRecoveryPrimary); // 3\n writeArray(token.passwordPrimaryPrivileged); // 4\n writeArray(token.presentationRecoveryPrivileged); // 5\n writeArray(token.presentationHash); // 6\n writeArray(token.recoveryHash); // 7\n writeArray(token.presentationKeyEncrypted); // 8\n writeArray(token.passwordKeyEncrypted); // 9 - Swapped order vs original doc comment\n writeArray(token.recoveryKeyEncrypted); // 10\n // Write optional profiles field\n if (token.profilesEncrypted && token.profilesEncrypted.length > 0) {\n writer.writeUInt8(1); // Flag indicating profiles present\n writeArray(token.profilesEncrypted);\n }\n else {\n writer.writeUInt8(0); // Flag indicating no profiles\n }\n // Write outpoint string\n const outpointBytes = sdk_1.Utils.toArray(token.currentOutpoint, 'utf8');\n writer.writeVarIntNum(outpointBytes.length);\n writer.write(outpointBytes);\n return writer.toArray();\n }\n /**\n * Deserializes a UMP token from binary format (Handles Version 1 and 2).\n */\n deserializeUMPToken(bin) {\n const reader = new sdk_1.Utils.Reader(bin);\n const version = reader.readUInt8();\n if (version !== 1 && version !== 2) {\n throw new Error(`Unsupported UMP token serialization version: ${version}`);\n }\n const readArray = () => {\n const length = reader.readVarIntNum();\n return reader.read(length);\n };\n // Read standard fields (order matches serialization V2)\n const passwordSalt = readArray(); // 0\n const passwordPresentationPrimary = readArray(); // 1\n const passwordRecoveryPrimary = readArray(); // 2\n const presentationRecoveryPrimary = readArray(); // 3\n const passwordPrimaryPrivileged = readArray(); // 4\n const presentationRecoveryPrivileged = readArray(); // 5\n const presentationHash = readArray(); // 6\n const recoveryHash = readArray(); // 7\n const presentationKeyEncrypted = readArray(); // 8\n const passwordKeyEncrypted = readArray(); // 9\n const recoveryKeyEncrypted = readArray(); // 10\n // Read optional profiles (only in V2)\n let profilesEncrypted;\n if (version === 2) {\n const profilesFlag = reader.readUInt8();\n if (profilesFlag === 1) {\n profilesEncrypted = readArray();\n }\n }\n // Read outpoint string\n const outpointLen = reader.readVarIntNum();\n const outpointBytes = reader.read(outpointLen);\n const currentOutpoint = sdk_1.Utils.toUTF8(outpointBytes);\n const token = {\n passwordSalt,\n passwordPresentationPrimary,\n passwordRecoveryPrimary,\n presentationRecoveryPrimary,\n passwordPrimaryPrivileged,\n presentationRecoveryPrivileged,\n presentationHash,\n recoveryHash,\n presentationKeyEncrypted,\n passwordKeyEncrypted, // Corrected order\n recoveryKeyEncrypted,\n profilesEncrypted, // May be undefined\n currentOutpoint\n };\n return token;\n }\n /**\n * Sets up the root key infrastructure after authentication or loading from snapshot.\n * Initializes the root primary key, root privileged key manager, loads profiles,\n * and sets the authenticated flag. Does NOT switch profile initially.\n *\n * @param rootPrimaryKey The user's root primary key (32 bytes).\n * @param ephemeralRootPrivilegedKey Optional root privileged key (e.g., during recovery flows).\n */\n async setupRootInfrastructure(rootPrimaryKey, ephemeralRootPrivilegedKey) {\n if (!this.currentUMPToken) {\n throw new Error('A UMP token must exist before setting up root infrastructure!');\n }\n this.rootPrimaryKey = rootPrimaryKey;\n // Store ephemeral key if provided, for one-time use by the manager\n let oneTimePrivilegedKey = ephemeralRootPrivilegedKey\n ? new sdk_1.PrivateKey(ephemeralRootPrivilegedKey)\n : undefined;\n // Create the ROOT PrivilegedKeyManager\n this.rootPrivilegedKeyManager = new PrivilegedKeyManager_1.PrivilegedKeyManager(async (reason) => {\n // 1. Use one-time key if available (for recovery)\n if (oneTimePrivilegedKey) {\n const tempKey = oneTimePrivilegedKey;\n oneTimePrivilegedKey = undefined; // Consume it\n return tempKey;\n }\n // 2. Otherwise, derive from password\n const password = await this.passwordRetriever(reason, (passwordCandidate) => {\n try {\n const derivedPasswordKey = sdk_1.Hash.pbkdf2(sdk_1.Utils.toArray(passwordCandidate, 'utf8'), this.currentUMPToken.passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n const privilegedDecryptor = this.XOR(this.rootPrimaryKey, derivedPasswordKey);\n const decryptedPrivileged = new sdk_1.SymmetricKey(privilegedDecryptor).decrypt(this.currentUMPToken.passwordPrimaryPrivileged);\n return !!decryptedPrivileged; // Test passes if decryption works\n }\n catch (e) {\n return false;\n }\n });\n // Decrypt the root privileged key using the confirmed password\n const derivedPasswordKey = await pbkdf2NativeOrJs(sdk_1.Utils.toArray(password, 'utf8'), this.currentUMPToken.passwordSalt, exports.PBKDF2_NUM_ROUNDS, 32, 'sha512');\n const privilegedDecryptor = this.XOR(this.rootPrimaryKey, derivedPasswordKey);\n const rootPrivilegedBytes = new sdk_1.SymmetricKey(privilegedDecryptor).decrypt(this.currentUMPToken.passwordPrimaryPrivileged);\n return new sdk_1.PrivateKey(rootPrivilegedBytes); // Return the ROOT key object\n });\n // Decrypt and load profiles if present in the token\n this.profiles = []; // Clear existing profiles before loading\n if (this.currentUMPToken.profilesEncrypted && this.currentUMPToken.profilesEncrypted.length > 0) {\n try {\n const decryptedProfileBytes = new sdk_1.SymmetricKey(rootPrimaryKey).decrypt(this.currentUMPToken.profilesEncrypted);\n const profilesJson = sdk_1.Utils.toUTF8(decryptedProfileBytes);\n this.profiles = JSON.parse(profilesJson);\n }\n catch (error) {\n console.error('Failed to decrypt or parse profiles:', error);\n // Decide if this should be fatal or just log and continue without profiles\n this.profiles = []; // Ensure profiles are empty on error\n // Optionally re-throw or handle more gracefully\n throw new Error(`Failed to load profiles: ${error.message}`);\n }\n }\n this.authenticated = true;\n // Note: We don't call switchProfile here anymore.\n // It's called by the auth methods (providePassword/provideRecoveryKey) or loadSnapshot after this.\n }\n /*\n * ---------------------------------------------------------------------------------------\n * Standard WalletInterface methods proxying to the *active* underlying wallet.\n * Includes authentication checks and admin originator protection.\n * ---------------------------------------------------------------------------------------\n */\n checkAuthAndUnderlying(originator) {\n if (!this.authenticated) {\n throw new Error('User is not authenticated.');\n }\n if (!this.underlying) {\n // This might happen if authentication succeeded but profile switching failed\n throw new Error('Underlying wallet for the active profile is not initialized.');\n }\n if (originator === this.adminOriginator) {\n throw new Error('External applications are not allowed to use the admin originator.');\n }\n }\n // Example proxy method (repeat pattern for all others)\n async getPublicKey(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getPublicKey(args, originator);\n }\n async revealCounterpartyKeyLinkage(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.revealCounterpartyKeyLinkage(args, originator);\n }\n async revealSpecificKeyLinkage(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.revealSpecificKeyLinkage(args, originator);\n }\n async encrypt(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.encrypt(args, originator);\n }\n async decrypt(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.decrypt(args, originator);\n }\n async createHmac(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.createHmac(args, originator);\n }\n async verifyHmac(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.verifyHmac(args, originator);\n }\n async createSignature(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.createSignature(args, originator);\n }\n async verifySignature(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.verifySignature(args, originator);\n }\n async createAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.createAction(args, originator);\n }\n async signAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.signAction(args, originator);\n }\n async abortAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.abortAction(args, originator);\n }\n async listActions(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.listActions(args, originator);\n }\n async internalizeAction(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.internalizeAction(args, originator);\n }\n async listOutputs(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.listOutputs(args, originator);\n }\n async relinquishOutput(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.relinquishOutput(args, originator);\n }\n async acquireCertificate(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.acquireCertificate(args, originator);\n }\n async listCertificates(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.listCertificates(args, originator);\n }\n async proveCertificate(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.proveCertificate(args, originator);\n }\n async relinquishCertificate(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.relinquishCertificate(args, originator);\n }\n async discoverByIdentityKey(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.discoverByIdentityKey(args, originator);\n }\n async discoverByAttributes(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.discoverByAttributes(args, originator);\n }\n async isAuthenticated(_, originator) {\n if (!this.authenticated) {\n throw new Error('User is not authenticated.');\n }\n if (originator === this.adminOriginator) {\n throw new Error('External applications are not allowed to use the admin originator.');\n }\n return { authenticated: true };\n }\n async waitForAuthentication(_, originator) {\n if (originator === this.adminOriginator) {\n throw new Error('External applications are not allowed to use the admin originator.');\n }\n while (!this.authenticated || !this.underlying) {\n await new Promise(resolve => setTimeout(resolve, 100));\n }\n return await this.underlying.waitForAuthentication({}, originator);\n }\n async getHeight(_, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getHeight({}, originator);\n }\n async getHeaderForHeight(args, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getHeaderForHeight(args, originator);\n }\n async getNetwork(_, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getNetwork({}, originator);\n }\n async getVersion(_, originator) {\n this.checkAuthAndUnderlying(originator);\n return this.underlying.getVersion({}, originator);\n }\n}\nexports.CWIStyleWalletManager = CWIStyleWalletManager;\n//# sourceMappingURL=CWIStyleWalletManager.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/CWIStyleWalletManager.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupClient.js": +/*!************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupClient.js ***! + \************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.SetupClient = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ./index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst StorageIdb_1 = __webpack_require__(/*! ./storage/StorageIdb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageIdb.js\");\n/**\n * The 'Setup` class provides static setup functions to construct BRC-100 compatible\n * wallets in a variety of configurations.\n *\n * It serves as a starting point for experimentation and customization.\n */\nclass SetupClient {\n /**\n * Create a `Wallet`. Storage can optionally be provided or configured later.\n *\n * The following components are configured: KeyDeriver, WalletStorageManager, WalletService, WalletStorage.\n * Optionally, PrivilegedKeyManager is also configured.\n *\n * @publicbody\n */\n static async createWallet(args) {\n const chain = args.chain;\n const rootKey = sdk_1.PrivateKey.fromHex(args.rootKeyHex);\n const identityKey = rootKey.toPublicKey().toString();\n const keyDeriver = new sdk_1.CachedKeyDeriver(rootKey);\n const storage = new index_client_1.WalletStorageManager(identityKey, args.active, args.backups);\n if (storage.canMakeAvailable())\n await storage.makeAvailable();\n const serviceOptions = index_client_1.Services.createDefaultOptions(chain);\n serviceOptions.taalApiKey = args.taalApiKey;\n const services = new index_client_1.Services(serviceOptions);\n const monopts = index_client_1.Monitor.createDefaultWalletMonitorOptions(chain, storage, services);\n const monitor = new index_client_1.Monitor(monopts);\n monitor.addDefaultTasks();\n const privilegedKeyManager = args.privilegedKeyGetter\n ? new index_client_1.sdk.PrivilegedKeyManager(args.privilegedKeyGetter)\n : undefined;\n const wallet = new index_client_1.Wallet({\n chain,\n keyDeriver,\n storage,\n services,\n monitor,\n privilegedKeyManager\n });\n const r = {\n rootKey,\n identityKey,\n keyDeriver,\n chain,\n storage,\n services,\n monitor,\n wallet\n };\n return r;\n }\n /**\n * Setup a new `Wallet` without requiring a .env file.\n *\n * @param args.chain - 'main' or 'test'\n * @param args.rootKeyHex - Root private key for wallet's key deriver.\n * @param args.storageUrl - Optional. `StorageClient` and `chain` compatible endpoint URL.\n * @param args.privilegedKeyGetter - Optional. Method that will return the privileged `PrivateKey`, on demand.\n */\n static async createWalletClientNoEnv(args) {\n const chain = args.chain;\n const endpointUrl = args.storageUrl || `https://${args.chain !== 'main' ? 'staging-' : ''}storage.babbage.systems`;\n const rootKey = sdk_1.PrivateKey.fromHex(args.rootKeyHex);\n const keyDeriver = new sdk_1.CachedKeyDeriver(rootKey);\n const storage = new index_client_1.WalletStorageManager(keyDeriver.identityKey);\n const services = new index_client_1.Services(chain);\n const privilegedKeyManager = args.privilegedKeyGetter\n ? new index_client_1.sdk.PrivilegedKeyManager(args.privilegedKeyGetter)\n : undefined;\n const wallet = new index_client_1.Wallet({\n chain,\n keyDeriver,\n storage,\n services,\n privilegedKeyManager\n });\n const client = new index_client_1.StorageClient(wallet, endpointUrl);\n await storage.addWalletStorageProvider(client);\n await storage.makeAvailable();\n return wallet;\n }\n /**\n * @publicbody\n */\n static async createWalletClient(args) {\n const wo = await SetupClient.createWallet(args);\n const endpointUrl = args.endpointUrl || `https://${args.chain !== 'main' ? 'staging-' : ''}storage.babbage.systems`;\n const client = new index_client_1.StorageClient(wo.wallet, endpointUrl);\n await wo.storage.addWalletStorageProvider(client);\n await wo.storage.makeAvailable();\n return {\n ...wo,\n endpointUrl\n };\n }\n /**\n * @publicbody\n */\n static getKeyPair(priv) {\n if (priv === undefined)\n priv = sdk_1.PrivateKey.fromRandom();\n else if (typeof priv === 'string')\n priv = new sdk_1.PrivateKey(priv, 'hex');\n const pub = sdk_1.PublicKey.fromPrivateKey(priv);\n const address = pub.toAddress();\n return { privateKey: priv, publicKey: pub, address };\n }\n /**\n * @publicbody\n */\n static getLockP2PKH(address) {\n const p2pkh = new sdk_1.P2PKH();\n const lock = p2pkh.lock(address);\n return lock;\n }\n /**\n * @publicbody\n */\n static getUnlockP2PKH(priv, satoshis) {\n const p2pkh = new sdk_1.P2PKH();\n const lock = SetupClient.getLockP2PKH(SetupClient.getKeyPair(priv).address);\n // Prepare to pay with SIGHASH_ALL and without ANYONE_CAN_PAY.\n // In otherwords:\n // - all outputs must remain in the current order, amount and locking scripts.\n // - all inputs must remain from the current outpoints and sequence numbers.\n // (unlock scripts are never signed)\n const unlock = p2pkh.unlock(priv, 'all', false, satoshis, lock);\n return unlock;\n }\n /**\n * @publicbody\n */\n static createP2PKHOutputs(outputs) {\n const os = [];\n const count = outputs.length;\n for (let i = 0; i < count; i++) {\n const o = outputs[i];\n os.push({\n basket: o.basket,\n tags: o.tags,\n satoshis: o.satoshis,\n lockingScript: SetupClient.getLockP2PKH(o.address).toHex(),\n outputDescription: o.outputDescription || `p2pkh ${i}`\n });\n }\n return os;\n }\n /**\n * @publicbody\n */\n static async createP2PKHOutputsAction(wallet, outputs, options) {\n const os = SetupClient.createP2PKHOutputs(outputs);\n const createArgs = {\n description: `createP2PKHOutputs`,\n outputs: os,\n options: {\n ...options,\n // Don't randomize so we can simplify outpoint creation\n randomizeOutputs: false\n }\n };\n const cr = await wallet.createAction(createArgs);\n let outpoints;\n if (cr.txid) {\n outpoints = os.map((o, i) => `${cr.txid}.${i}`);\n }\n return { cr, outpoints };\n }\n /**\n * @publicbody\n */\n static async fundWalletFromP2PKHOutpoints(wallet, outpoints, p2pkhKey, inputBEEF) {\n // TODO\n }\n /**\n * Adds `indexedDB` based storage to a `Wallet` configured by `SetupClient.createWalletOnly`\n *\n * @param args.databaseName Name for this storage. For MySQL, the schema name within the MySQL instance.\n * @param args.chain Which chain this wallet is on: 'main' or 'test'. Defaults to 'test'.\n * @param args.rootKeyHex\n *\n * @publicbody\n */\n static async createWalletIdb(args) {\n const wo = await SetupClient.createWallet(args);\n const activeStorage = await SetupClient.createStorageIdb(args);\n await wo.storage.addWalletStorageProvider(activeStorage);\n const { user, isNew } = await activeStorage.findOrInsertUser(wo.identityKey);\n const userId = user.userId;\n const r = {\n ...wo,\n activeStorage,\n userId\n };\n return r;\n }\n /**\n * @returns {StorageIdb} - `Knex` based storage provider for a wallet. May be used for either active storage or backup storage.\n */\n static async createStorageIdb(args) {\n const storage = new StorageIdb_1.StorageIdb({\n chain: args.chain,\n commissionSatoshis: 0,\n commissionPubKeyHex: undefined,\n feeModel: { model: 'sat/kb', value: 1 }\n });\n await storage.migrate(args.databaseName, (0, index_client_1.randomBytesHex)(33));\n await storage.makeAvailable();\n return storage;\n }\n}\nexports.SetupClient = SetupClient;\n//# sourceMappingURL=SetupClient.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupClient.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupWallet.js": +/*!************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupWallet.js ***! + \************************************************************************/ +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=SetupWallet.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupWallet.js?\n}"); /***/ }), @@ -2758,10 +2791,10 @@ /*!*******************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/Wallet.js ***! \*******************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Wallet = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst acquireDirectCertificate_1 = __webpack_require__(/*! ./signer/methods/acquireDirectCertificate */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/acquireDirectCertificate.js\");\nconst proveCertificate_1 = __webpack_require__(/*! ./signer/methods/proveCertificate */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/proveCertificate.js\");\nconst createAction_1 = __webpack_require__(/*! ./signer/methods/createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js\");\nconst signAction_1 = __webpack_require__(/*! ./signer/methods/signAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/signAction.js\");\nconst internalizeAction_1 = __webpack_require__(/*! ./signer/methods/internalizeAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/internalizeAction.js\");\nconst WalletSettingsManager_1 = __webpack_require__(/*! ./WalletSettingsManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletSettingsManager.js\");\nconst identityUtils_1 = __webpack_require__(/*! ./utility/identityUtils */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/identityUtils.js\");\nconst generateChange_1 = __webpack_require__(/*! ./storage/methods/generateChange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/generateChange.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ./utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst ScriptTemplateBRC29_1 = __webpack_require__(/*! ./utility/ScriptTemplateBRC29 */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js\");\nconst types_1 = __webpack_require__(/*! ./sdk/types */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/types.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ./sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst validationHelpers_1 = __webpack_require__(/*! ./sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nconst WalletError_1 = __webpack_require__(/*! ./sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nfunction isWalletSigner(args) {\n return args['isWalletSigner'];\n}\nclass Wallet {\n constructor(argsOrSigner, services, monitor, privilegedKeyManager) {\n /**\n * If true, signableTransactions will include sourceTransaction for each input,\n * including those that do not require signature and those that were also contained\n * in the inputBEEF.\n */\n this.includeAllSourceTransactions = true;\n /**\n * If true, txids that are known to the wallet's party beef do not need to be returned from storage.\n */\n this.autoKnownTxids = false;\n /**\n * If true, beefs returned to the user may contain txidOnly transactions.\n */\n this.returnTxidOnly = false;\n /**\n * For repeatability testing, set to an array of random numbers from [0..1).\n */\n this.randomVals = undefined;\n const args = !isWalletSigner(argsOrSigner)\n ? argsOrSigner\n : {\n chain: argsOrSigner.chain,\n keyDeriver: argsOrSigner.keyDeriver,\n storage: argsOrSigner.storage,\n services,\n monitor,\n privilegedKeyManager\n };\n if (args.storage._authId.identityKey != args.keyDeriver.identityKey)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('storage', `authenticated as the same identityKey (${args.storage._authId.identityKey}) as the keyDeriver (${args.keyDeriver.identityKey}).`);\n this.settingsManager = args.settingsManager || new WalletSettingsManager_1.WalletSettingsManager(this);\n this.chain = args.chain;\n this.lookupResolver =\n args.lookupResolver ||\n new sdk_1.LookupResolver({\n networkPreset: (0, utilityHelpers_1.toWalletNetwork)(this.chain)\n });\n this.keyDeriver = args.keyDeriver;\n this.storage = args.storage;\n this.proto = new sdk_1.ProtoWallet(args.keyDeriver);\n this.services = args.services;\n this.monitor = args.monitor;\n this.privilegedKeyManager = args.privilegedKeyManager;\n this.identityKey = this.keyDeriver.identityKey;\n this.pendingSignActions = {};\n this.userParty = `user ${this.getClientChangeKeyPair().publicKey}`;\n this.beef = new sdk_1.BeefParty([this.userParty]);\n this.trustSelf = 'known';\n if (this.services) {\n this.storage.setServices(this.services);\n }\n }\n async destroy() {\n await this.storage.destroy();\n if (this.privilegedKeyManager)\n await this.privilegedKeyManager.destroyKey();\n }\n getClientChangeKeyPair() {\n const kp = {\n privateKey: this.keyDeriver.rootKey.toString(),\n publicKey: this.keyDeriver.rootKey.toPublicKey().toString()\n };\n return kp;\n }\n async getIdentityKey() {\n return (await this.getPublicKey({ identityKey: true })).publicKey;\n }\n getPublicKey(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.getPublicKey(args);\n }\n return this.proto.getPublicKey(args);\n }\n revealCounterpartyKeyLinkage(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.revealCounterpartyKeyLinkage(args);\n }\n return this.proto.revealCounterpartyKeyLinkage(args);\n }\n revealSpecificKeyLinkage(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.revealSpecificKeyLinkage(args);\n }\n return this.proto.revealSpecificKeyLinkage(args);\n }\n encrypt(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.encrypt(args);\n }\n return this.proto.encrypt(args);\n }\n decrypt(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.decrypt(args);\n }\n return this.proto.decrypt(args);\n }\n createHmac(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.createHmac(args);\n }\n return this.proto.createHmac(args);\n }\n verifyHmac(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.verifyHmac(args);\n }\n return this.proto.verifyHmac(args);\n }\n createSignature(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.createSignature(args);\n }\n return this.proto.createSignature(args);\n }\n verifySignature(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.verifySignature(args);\n }\n return this.proto.verifySignature(args);\n }\n getServices() {\n if (!this.services)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('services', 'valid in constructor arguments to be retreived here.');\n return this.services;\n }\n /**\n * @returns the full list of txids whose validity this wallet claims to know.\n *\n * @param newKnownTxids Optional. Additional new txids known to be valid by the caller to be merged.\n */\n getKnownTxids(newKnownTxids) {\n if (newKnownTxids) {\n for (const txid of newKnownTxids)\n this.beef.mergeTxidOnly(txid);\n }\n const r = this.beef.sortTxs();\n const knownTxids = r.valid;\n return knownTxids;\n }\n getStorageIdentity() {\n const s = this.storage.getSettings();\n return {\n storageIdentityKey: s.storageIdentityKey,\n storageName: s.storageName\n };\n }\n validateAuthAndArgs(args, validate) {\n const vargs = validate(args);\n const auth = { identityKey: this.identityKey };\n return { vargs, auth };\n }\n //////////////////\n // List Methods\n //////////////////\n async listActions(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const { vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateListActionsArgs);\n const r = await this.storage.listActions(vargs);\n return r;\n }\n get storageParty() {\n return `storage ${this.getStorageIdentity().storageIdentityKey}`;\n }\n async listOutputs(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const { vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateListOutputsArgs);\n if (this.autoKnownTxids && !vargs.knownTxids) {\n vargs.knownTxids = this.getKnownTxids();\n }\n const r = await this.storage.listOutputs(vargs);\n if (r.BEEF) {\n this.beef.mergeBeefFromParty(this.storageParty, r.BEEF);\n r.BEEF = this.verifyReturnedTxidOnlyBEEF(r.BEEF);\n }\n return r;\n }\n async listCertificates(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const { vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateListCertificatesArgs);\n const r = await this.storage.listCertificates(vargs);\n return r;\n }\n //////////////////\n // Certificates\n //////////////////\n async acquireCertificate(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n if (args.acquisitionProtocol === 'direct') {\n const { auth, vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateAcquireDirectCertificateArgs);\n vargs.subject = (await this.getPublicKey({\n identityKey: true,\n privileged: args.privileged,\n privilegedReason: args.privilegedReason\n })).publicKey;\n try {\n // Confirm that the information received adds up to a usable certificate...\n // TODO: Clean up MasterCertificate to support decrypt on instance\n const cert = new sdk_1.MasterCertificate(vargs.type, vargs.serialNumber, vargs.subject, vargs.certifier, vargs.revocationOutpoint, vargs.fields, vargs.keyringForSubject, vargs.signature);\n await cert.verify();\n // Verify certificate details\n await sdk_1.MasterCertificate.decryptFields(this, vargs.keyringForSubject, vargs.fields, vargs.certifier, vargs.privileged, vargs.privilegedReason);\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('args', `valid encrypted and signed certificate and keyring from revealer. ${e.name}: ${e.message}`);\n }\n const r = await (0, acquireDirectCertificate_1.acquireDirectCertificate)(this, auth, vargs);\n return r;\n }\n if (args.acquisitionProtocol === 'issuance') {\n const { auth, vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateAcquireIssuanceCertificateArgs);\n // Create a random nonce that the server can verify\n const clientNonce = await (0, sdk_1.createNonce)(this, vargs.certifier);\n // TODO: Consider adding support to request certificates from a certifier before acquiring a certificate.\n const authClient = new sdk_1.AuthFetch(this);\n // Create a certificate master keyring\n // The certifier is able to decrypt these fields as they are the counterparty\n const { certificateFields, masterKeyring } = await sdk_1.MasterCertificate.createCertificateFields(this, vargs.certifier, vargs.fields);\n // Make a Certificate Signing Request (CSR) to the certifier\n const response = await authClient.fetch(`${vargs.certifierUrl}/signCertificate`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json'\n },\n body: JSON.stringify({\n clientNonce,\n type: vargs.type,\n fields: certificateFields,\n masterKeyring\n })\n });\n if (response.headers.get('x-bsv-auth-identity-key') !== vargs.certifier) {\n throw new Error(`Invalid certifier! Expected: ${vargs.certifier}, Received: ${response.headers.get('x-bsv-auth-identity-key')}`);\n }\n const { certificate, serverNonce } = await response.json();\n // Validate the server response\n if (!certificate) {\n throw new Error('No certificate received from certifier!');\n }\n if (!serverNonce) {\n throw new Error('No serverNonce received from certifier!');\n }\n const signedCertificate = new sdk_1.Certificate(certificate.type, certificate.serialNumber, certificate.subject, certificate.certifier, certificate.revocationOutpoint, certificate.fields, certificate.signature);\n // Validate server nonce\n await (0, sdk_1.verifyNonce)(serverNonce, this, vargs.certifier);\n // Verify the server included our nonce\n const { valid } = await this.verifyHmac({\n hmac: sdk_1.Utils.toArray(signedCertificate.serialNumber, 'base64'),\n data: sdk_1.Utils.toArray(clientNonce + serverNonce, 'base64'),\n protocolID: [2, 'certificate issuance'],\n keyID: serverNonce + clientNonce,\n counterparty: vargs.certifier\n });\n if (!valid)\n throw new Error('Invalid serialNumber');\n // Validate the certificate received\n if (signedCertificate.type !== vargs.type) {\n throw new Error(`Invalid certificate type! Expected: ${vargs.type}, Received: ${signedCertificate.type}`);\n }\n if (signedCertificate.subject !== this.identityKey) {\n throw new Error(`Invalid certificate subject! Expected: ${this.identityKey}, Received: ${signedCertificate.subject}`);\n }\n if (signedCertificate.certifier !== vargs.certifier) {\n throw new Error(`Invalid certifier! Expected: ${vargs.certifier}, Received: ${signedCertificate.certifier}`);\n }\n if (!signedCertificate.revocationOutpoint) {\n throw new Error(`Invalid revocationOutpoint!`);\n }\n if (Object.keys(signedCertificate.fields).length !== Object.keys(certificateFields).length) {\n throw new Error(`Fields mismatch! Objects have different numbers of keys.`);\n }\n for (const field of Object.keys(certificateFields)) {\n if (!(field in signedCertificate.fields)) {\n throw new Error(`Missing field: ${field} in certificate.fields`);\n }\n if (signedCertificate.fields[field] !== certificateFields[field]) {\n throw new Error(`Invalid field! Expected: ${certificateFields[field]}, Received: ${signedCertificate.fields[field]}`);\n }\n }\n await signedCertificate.verify();\n // Test decryption works\n await sdk_1.MasterCertificate.decryptFields(this, masterKeyring, certificate.fields, vargs.certifier);\n // Store the newly issued certificate\n return await (0, acquireDirectCertificate_1.acquireDirectCertificate)(this, auth, {\n ...certificate,\n keyringRevealer: 'certifier',\n keyringForSubject: masterKeyring,\n privileged: vargs.privileged\n });\n }\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('acquisitionProtocol', `valid.${args.acquisitionProtocol} is unrecognized.`);\n }\n async relinquishCertificate(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n this.validateAuthAndArgs(args, validationHelpers_1.validateRelinquishCertificateArgs);\n const r = await this.storage.relinquishCertificate(args);\n return { relinquished: true };\n }\n async proveCertificate(args, originator) {\n originator = (0, validationHelpers_1.validateOriginator)(originator);\n const { auth, vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateProveCertificateArgs);\n const r = await (0, proveCertificate_1.proveCertificate)(this, auth, vargs);\n return r;\n }\n async discoverByIdentityKey(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n this.validateAuthAndArgs(args, validationHelpers_1.validateDiscoverByIdentityKeyArgs);\n const trustSettings = (await this.settingsManager.get()).trustSettings;\n const results = await (0, identityUtils_1.queryOverlay)({\n identityKey: args.identityKey,\n certifiers: trustSettings.trustedCertifiers.map(certifier => certifier.identityKey)\n }, this.lookupResolver);\n if (!results) {\n return {\n totalCertificates: 0,\n certificates: []\n };\n }\n return (0, identityUtils_1.transformVerifiableCertificatesWithTrust)(trustSettings, results);\n }\n async discoverByAttributes(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n this.validateAuthAndArgs(args, validationHelpers_1.validateDiscoverByAttributesArgs);\n const trustSettings = (await this.settingsManager.get()).trustSettings;\n const results = await (0, identityUtils_1.queryOverlay)({\n attributes: args.attributes,\n certifiers: trustSettings.trustedCertifiers.map(certifier => certifier.identityKey)\n }, this.lookupResolver);\n if (!results) {\n return {\n totalCertificates: 0,\n certificates: []\n };\n }\n return (0, identityUtils_1.transformVerifiableCertificatesWithTrust)(trustSettings, results);\n }\n verifyReturnedTxidOnly(beef, knownTxids) {\n if (this.returnTxidOnly)\n return beef;\n const onlyTxids = beef.txs.filter(btx => btx.isTxidOnly).map(btx => btx.txid);\n for (const txid of onlyTxids) {\n if (knownTxids && knownTxids.indexOf(txid) >= 0)\n continue;\n const btx = beef.findTxid(txid);\n const tx = this.beef.findAtomicTransaction(txid);\n if (!tx)\n throw new WERR_errors_1.WERR_INTERNAL(`unable to merge txid ${txid} into beef`);\n beef.mergeTransaction(tx);\n }\n for (const btx of beef.txs) {\n if (knownTxids && knownTxids.indexOf(btx.txid) >= 0)\n continue;\n if (btx.isTxidOnly)\n throw new WERR_errors_1.WERR_INTERNAL(`remaining txidOnly ${btx.txid} is not known`);\n }\n return beef;\n }\n verifyReturnedTxidOnlyAtomicBEEF(beef, knownTxids) {\n if (this.returnTxidOnly)\n return beef;\n const b = sdk_1.Beef.fromBinary(beef);\n if (!b.atomicTxid)\n throw new WERR_errors_1.WERR_INTERNAL();\n return this.verifyReturnedTxidOnly(b, knownTxids).toBinaryAtomic(b.atomicTxid);\n }\n verifyReturnedTxidOnlyBEEF(beef) {\n if (this.returnTxidOnly)\n return beef;\n const b = sdk_1.Beef.fromBinary(beef);\n return this.verifyReturnedTxidOnly(b).toBinary();\n }\n //////////////////\n // Actions\n //////////////////\n async createAction(args, originator) {\n var _a;\n var _b;\n (0, validationHelpers_1.validateOriginator)(originator);\n if (!args.options)\n args.options = {};\n (_b = args.options).trustSelf || (_b.trustSelf = this.trustSelf);\n if (this.autoKnownTxids && !args.options.knownTxids) {\n args.options.knownTxids = this.getKnownTxids(args.options.knownTxids);\n }\n const { auth, vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateCreateActionArgs);\n if (vargs.labels.indexOf(types_1.specOpThrowReviewActions) >= 0)\n throwDummyReviewActions();\n vargs.includeAllSourceTransactions = this.includeAllSourceTransactions;\n if (this.randomVals && this.randomVals.length > 1) {\n vargs.randomVals = [...this.randomVals];\n }\n const r = await (0, createAction_1.createAction)(this, auth, vargs);\n if (r.tx) {\n this.beef.mergeBeefFromParty(this.storageParty, r.tx);\n }\n if (r.tx)\n r.tx = this.verifyReturnedTxidOnlyAtomicBEEF(r.tx, (_a = args.options) === null || _a === void 0 ? void 0 : _a.knownTxids);\n if (!vargs.isDelayed)\n throwIfAnyUnsuccessfulCreateActions(r);\n return r;\n }\n async signAction(args, originator) {\n var _a;\n (0, validationHelpers_1.validateOriginator)(originator);\n const { auth, vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateSignActionArgs);\n // createAction options are merged with undefined signAction options before validation...\n const r = await (0, signAction_1.signAction)(this, auth, args);\n if (!vargs.isDelayed)\n throwIfAnyUnsuccessfulSignActions(r);\n const prior = this.pendingSignActions[args.reference];\n if (r.tx)\n r.tx = this.verifyReturnedTxidOnlyAtomicBEEF(r.tx, (_a = prior.args.options) === null || _a === void 0 ? void 0 : _a.knownTxids);\n return r;\n }\n async internalizeAction(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const { auth, vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateInternalizeActionArgs);\n if (vargs.labels.indexOf(types_1.specOpThrowReviewActions) >= 0)\n throwDummyReviewActions();\n const r = await (0, internalizeAction_1.internalizeAction)(this, auth, args);\n throwIfUnsuccessfulInternalizeAction(r);\n return r;\n }\n async abortAction(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const { auth } = this.validateAuthAndArgs(args, validationHelpers_1.validateAbortActionArgs);\n const r = await this.storage.abortAction(args);\n return r;\n }\n async relinquishOutput(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const { vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateRelinquishOutputArgs);\n const r = await this.storage.relinquishOutput(args);\n return { relinquished: true };\n }\n async isAuthenticated(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const r = {\n authenticated: true\n };\n return r;\n }\n async waitForAuthentication(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n return { authenticated: true };\n }\n async getHeight(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const height = await this.getServices().getHeight();\n return { height };\n }\n async getHeaderForHeight(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n const serializedHeader = await this.getServices().getHeaderForHeight(args.height);\n return { header: sdk_1.Utils.toHex(serializedHeader) };\n }\n async getNetwork(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n return { network: (0, utilityHelpers_1.toWalletNetwork)(this.chain) };\n }\n async getVersion(args, originator) {\n (0, validationHelpers_1.validateOriginator)(originator);\n return { version: 'wallet-brc100-1.0.0' };\n }\n /**\n * Transfer all possible satoshis held by this wallet to `toWallet`.\n *\n * @param toWallet wallet which will receive this wallet's satoshis.\n */\n async sweepTo(toWallet) {\n const derivationPrefix = (0, utilityHelpers_1.randomBytesBase64)(8);\n const derivationSuffix = (0, utilityHelpers_1.randomBytesBase64)(8);\n const keyDeriver = this.keyDeriver;\n const t = new ScriptTemplateBRC29_1.ScriptTemplateBRC29({\n derivationPrefix,\n derivationSuffix,\n keyDeriver\n });\n const label = 'sweep';\n const satoshis = generateChange_1.maxPossibleSatoshis;\n const car = await this.createAction({\n outputs: [\n {\n lockingScript: t.lock(keyDeriver.rootKey.toString(), toWallet.identityKey).toHex(),\n satoshis,\n outputDescription: label,\n tags: ['relinquish'],\n customInstructions: JSON.stringify({\n derivationPrefix,\n derivationSuffix,\n type: 'BRC29'\n })\n }\n ],\n options: {\n randomizeOutputs: false,\n acceptDelayedBroadcast: false\n },\n labels: [label],\n description: label\n });\n const iar = await toWallet.internalizeAction({\n tx: car.tx,\n outputs: [\n {\n outputIndex: 0,\n protocol: 'wallet payment',\n paymentRemittance: {\n derivationPrefix,\n derivationSuffix,\n senderIdentityKey: this.identityKey\n }\n }\n ],\n description: label,\n labels: [label]\n });\n }\n /**\n * Uses `listOutputs` to iterate over chunks of up to 1000 outputs to\n * compute the sum of output satoshis.\n *\n * @param {string} basket - Optional. Defaults to 'default', the wallet change basket.\n * @returns {WalletBalance} total sum of output satoshis and utxo details (satoshis and outpoints)\n */\n async balanceAndUtxos(basket = 'default') {\n const r = { total: 0, utxos: [] };\n let offset = 0;\n for (;;) {\n const change = await this.listOutputs({\n basket,\n limit: 1000,\n offset\n });\n if (change.totalOutputs === 0)\n break;\n for (const o of change.outputs) {\n r.total += o.satoshis;\n r.utxos.push({ satoshis: o.satoshis, outpoint: o.outpoint });\n }\n offset += change.outputs.length;\n }\n return r;\n }\n /**\n * Uses `listOutputs` special operation to compute the total value (of satoshis) for\n * all spendable outputs in the 'default' basket.\n *\n * @returns {number} sum of output satoshis\n */\n async balance() {\n const args = {\n basket: types_1.specOpWalletBalance\n };\n const r = await this.listOutputs(args);\n return r.totalOutputs;\n }\n /**\n * Uses `listOutputs` special operation to review the spendability via `Services` of\n * outputs currently considered spendable. Returns the outputs that fail to verify.\n *\n * Ignores the `limit` and `offset` properties.\n *\n * @param all Defaults to false. If false, only change outputs ('default' basket) are reviewed. If true, all spendable outputs are reviewed.\n * @param release Defaults to false. If true, sets outputs that fail to verify to un-spendable (spendable: false)\n * @param optionalArgs Optional. Additional tags will constrain the outputs processed.\n * @returns outputs which are/where considered spendable but currently fail to verify as spendable.\n */\n async reviewSpendableOutputs(all = false, release = false, optionalArgs) {\n const args = {\n ...(optionalArgs || {}),\n basket: types_1.specOpInvalidChange\n };\n args.tags || (args.tags = []);\n if (all)\n args.tags.push('all');\n if (release)\n args.tags.push('release');\n const r = await this.listOutputs(args);\n return r;\n }\n /**\n * Uses `listOutputs` special operation to update the 'default' basket's automatic\n * change generation parameters.\n *\n * @param count target number of change UTXOs to maintain.\n * @param satoshis target value for new change outputs.\n */\n async setWalletChangeParams(count, satoshis) {\n const args = {\n basket: types_1.specOpSetWalletChangeParams,\n tags: [count.toString(), satoshis.toString()]\n };\n await this.listOutputs(args);\n }\n /**\n * Uses `listActions` special operation to return only actions with status 'nosend'.\n *\n * @param abort Defaults to false. If true, runs `abortAction` on each 'nosend' action.\n * @returns {ListActionsResult} start `listActions` result restricted to 'nosend' (or 'failed' if aborted) actions.\n */\n async listNoSendActions(args, abort = false) {\n const { vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateListActionsArgs);\n vargs.labels.push(types_1.specOpNoSendActions);\n if (abort)\n vargs.labels.push('abort');\n const r = await this.storage.listActions(vargs);\n return r;\n }\n /**\n * Uses `listActions` special operation to return only actions with status 'failed'.\n *\n * @param unfail Defaults to false. If true, queues the action for attempted recovery.\n * @returns {ListActionsResult} start `listActions` result restricted to 'failed' status actions.\n */\n async listFailedActions(args, unfail = false) {\n const { vargs } = this.validateAuthAndArgs(args, validationHelpers_1.validateListActionsArgs);\n vargs.labels.push(types_1.specOpFailedActions);\n if (unfail)\n vargs.labels.push('unfail');\n const r = await this.storage.listActions(vargs);\n return r;\n }\n}\nexports.Wallet = Wallet;\nfunction throwIfAnyUnsuccessfulCreateActions(r) {\n const ndrs = r.notDelayedResults;\n const swrs = r.sendWithResults;\n if (!ndrs || !swrs || swrs.every(r => r.status === 'unproven'))\n return;\n throw new WERR_errors_1.WERR_REVIEW_ACTIONS(ndrs, swrs, r.txid, r.tx, r.noSendChange);\n}\nfunction throwIfAnyUnsuccessfulSignActions(r) {\n const ndrs = r.notDelayedResults;\n const swrs = r.sendWithResults;\n if (!ndrs || !swrs || swrs.every(r => r.status === 'unproven'))\n return;\n throw new WERR_errors_1.WERR_REVIEW_ACTIONS(ndrs, swrs, r.txid, r.tx);\n}\nfunction throwIfUnsuccessfulInternalizeAction(r) {\n const ndrs = r.notDelayedResults;\n const swrs = r.sendWithResults;\n if (!ndrs || !swrs || swrs.every(r => r.status === 'unproven'))\n return;\n throw new WERR_errors_1.WERR_REVIEW_ACTIONS(ndrs, swrs, r.txid);\n}\n/**\n * Throws a WERR_REVIEW_ACTIONS with a full set of properties to test data formats and propagation.\n */\nfunction throwDummyReviewActions() {\n const b58Beef = 'gno9MC7VXii1KoCkc2nsVyYJpqzN3dhBzYATETJcys62emMKfpBof4R7GozwYEaSapUtnNvqQ57aaYYjm3U2dv9eUJ1sV46boHkQgppYmAz9YH8FdZduV8aJayPViaKcyPmbDhEw6UW8TM5iFZLXNs7HBnJHUKCeTdNK4FUEL7vAugxAV9WUUZ43BZjJk2SmSeps9TCXjt1Ci9fKWp3d9QSoYvTpxwzyUFHjRKtbUgwq55ZfkBp5bV2Bpz9qSuKywKewW7Hh4S1nCUScwwzpKDozb3zic1V9p2k8rQxoPsRxjUJ8bjhNDdsN8d7KukFuc3n47fXzdWttvnxwsujLJRGnQbgJuknQqx3KLf5kJXHzwjG6TzigZk2t24qeB6d3hbYiaDr2fFkUJBL3tukTHhfNkQYRXuz3kucVDzvejHyqJaF51mXG8BjMN5aQj91ZJXCaPVqkMWCzmvyaqmXMdRiJdSAynhXbQK91xf6RwdNhz1tg5f9B6oJJMhsi9UYSVymmax8VLKD9AKzBCBDcfyD83m3jyS1VgKGZn3SkQmr6bsoWq88L3GsMnnmYUGogvdAYarTqg3pzkjCMxHzmJBMN6ofnUk8c1sRTXQue7BbyUaN5uZu3KW6CmFsEfpuqVvnqFW93TU1jrPP2S8yz8AexAnARPCKE8Yz7RfVaT6RCavwQKL3u5iookwRWEZXW1QWmM37yJWHD87SjVynyg327a1CLwcBxmE2CB48QeNVGyQki4CTQMqw2o8TMhDPJej1g68oniAjBcxBLSCs7KGvK3k7AfrHbCMULX9CTibYhCjdFjbsbBoocqJpxxcvkMo1fEEiAzZuiBVZQDYktDdTVbhKHvYkW25HcYX75NJrpNAhm7AjFeKLzEVxqAQkMfvTufpESNRZF4kQqg2Rg8h2ajcKTd5cpEPwXCrZLHm4EaZEmZVbg3QNfGhn7BJu1bHMtLqPD4y8eJxm2uGrW6saf6qKYmmu64F8A667NbD4yskPRQ1S863VzwGpxxmgLc1Ta3R46jEqsAoRDoZVUaCgBBZG3Yg1CTgi1EVBMXU7qvY4n3h8o2FLCEMWY4KadnV3iD4FbcdCmg4yxBosNAZgbPjhgGjCimjh4YsLd9zymGLmivmz2ZBg5m3xaiXT9NN81X9C1JUujd';\n const beef = sdk_1.Beef.fromBinary(sdk_1.Utils.fromBase58(b58Beef));\n const btx = beef.txs.slice(-1)[0];\n const txid = btx.txid;\n debugger;\n throw new WERR_errors_1.WERR_REVIEW_ACTIONS([\n {\n txid, // only care that it is syntactically a txid\n status: 'doubleSpend',\n competingTxs: [txid], // a txid in the beef\n competingBeef: beef.toBinary()\n }\n ], [\n {\n txid,\n status: 'failed'\n }\n ], txid, beef.toBinaryAtomic(txid), [`${txid}.0`]);\n}\n//# sourceMappingURL=Wallet.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/Wallet.js?\n}"); +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Wallet = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst sdk = __importStar(__webpack_require__(/*! ./sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\"));\nconst acquireDirectCertificate_1 = __webpack_require__(/*! ./signer/methods/acquireDirectCertificate */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/acquireDirectCertificate.js\");\nconst proveCertificate_1 = __webpack_require__(/*! ./signer/methods/proveCertificate */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/proveCertificate.js\");\nconst createAction_1 = __webpack_require__(/*! ./signer/methods/createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js\");\nconst signAction_1 = __webpack_require__(/*! ./signer/methods/signAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/signAction.js\");\nconst internalizeAction_1 = __webpack_require__(/*! ./signer/methods/internalizeAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/internalizeAction.js\");\nconst WalletSettingsManager_1 = __webpack_require__(/*! ./WalletSettingsManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletSettingsManager.js\");\nconst identityUtils_1 = __webpack_require__(/*! ./utility/identityUtils */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/identityUtils.js\");\nconst generateChange_1 = __webpack_require__(/*! ./storage/methods/generateChange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/generateChange.js\");\nconst sdk_2 = __webpack_require__(/*! ./sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ./utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst ScriptTemplateBRC29_1 = __webpack_require__(/*! ./utility/ScriptTemplateBRC29 */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js\");\nfunction isWalletSigner(args) {\n return args['isWalletSigner'];\n}\nclass Wallet {\n constructor(argsOrSigner, services, monitor, privilegedKeyManager) {\n /**\n * If true, signableTransactions will include sourceTransaction for each input,\n * including those that do not require signature and those that were also contained\n * in the inputBEEF.\n */\n this.includeAllSourceTransactions = true;\n /**\n * If true, txids that are known to the wallet's party beef do not need to be returned from storage.\n */\n this.autoKnownTxids = false;\n /**\n * If true, beefs returned to the user may contain txidOnly transactions.\n */\n this.returnTxidOnly = false;\n /**\n * For repeatability testing, set to an array of random numbers from [0..1).\n */\n this.randomVals = undefined;\n const args = !isWalletSigner(argsOrSigner)\n ? argsOrSigner\n : {\n chain: argsOrSigner.chain,\n keyDeriver: argsOrSigner.keyDeriver,\n storage: argsOrSigner.storage,\n services,\n monitor,\n privilegedKeyManager\n };\n if (args.storage._authId.identityKey != args.keyDeriver.identityKey)\n throw new sdk.WERR_INVALID_PARAMETER('storage', `authenticated as the same identityKey (${args.storage._authId.identityKey}) as the keyDeriver (${args.keyDeriver.identityKey}).`);\n this.settingsManager = args.settingsManager || new WalletSettingsManager_1.WalletSettingsManager(this);\n this.chain = args.chain;\n this.lookupResolver =\n args.lookupResolver ||\n new sdk_1.LookupResolver({\n networkPreset: (0, utilityHelpers_1.toWalletNetwork)(this.chain)\n });\n this.keyDeriver = args.keyDeriver;\n this.storage = args.storage;\n this.proto = new sdk_1.ProtoWallet(args.keyDeriver);\n this.services = args.services;\n this.monitor = args.monitor;\n this.privilegedKeyManager = args.privilegedKeyManager;\n this.identityKey = this.keyDeriver.identityKey;\n this.pendingSignActions = {};\n this.userParty = `user ${this.getClientChangeKeyPair().publicKey}`;\n this.beef = new sdk_1.BeefParty([this.userParty]);\n this.trustSelf = 'known';\n if (this.services) {\n this.storage.setServices(this.services);\n }\n }\n async destroy() {\n await this.storage.destroy();\n if (this.privilegedKeyManager)\n await this.privilegedKeyManager.destroyKey();\n }\n getClientChangeKeyPair() {\n const kp = {\n privateKey: this.keyDeriver.rootKey.toString(),\n publicKey: this.keyDeriver.rootKey.toPublicKey().toString()\n };\n return kp;\n }\n async getIdentityKey() {\n return (await this.getPublicKey({ identityKey: true })).publicKey;\n }\n getPublicKey(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.getPublicKey(args);\n }\n return this.proto.getPublicKey(args);\n }\n revealCounterpartyKeyLinkage(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.revealCounterpartyKeyLinkage(args);\n }\n return this.proto.revealCounterpartyKeyLinkage(args);\n }\n revealSpecificKeyLinkage(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.revealSpecificKeyLinkage(args);\n }\n return this.proto.revealSpecificKeyLinkage(args);\n }\n encrypt(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.encrypt(args);\n }\n return this.proto.encrypt(args);\n }\n decrypt(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.decrypt(args);\n }\n return this.proto.decrypt(args);\n }\n createHmac(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.createHmac(args);\n }\n return this.proto.createHmac(args);\n }\n verifyHmac(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.verifyHmac(args);\n }\n return this.proto.verifyHmac(args);\n }\n createSignature(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.createSignature(args);\n }\n return this.proto.createSignature(args);\n }\n verifySignature(args, originator) {\n if (args.privileged) {\n if (!this.privilegedKeyManager) {\n throw new Error('Privileged operations require the Wallet to be configured with a privileged key manager.');\n }\n return this.privilegedKeyManager.verifySignature(args);\n }\n return this.proto.verifySignature(args);\n }\n getServices() {\n if (!this.services)\n throw new sdk.WERR_INVALID_PARAMETER('services', 'valid in constructor arguments to be retreived here.');\n return this.services;\n }\n /**\n * @returns the full list of txids whose validity this wallet claims to know.\n *\n * @param newKnownTxids Optional. Additional new txids known to be valid by the caller to be merged.\n */\n getKnownTxids(newKnownTxids) {\n if (newKnownTxids) {\n for (const txid of newKnownTxids)\n this.beef.mergeTxidOnly(txid);\n }\n const r = this.beef.sortTxs();\n const knownTxids = r.valid;\n return knownTxids;\n }\n getStorageIdentity() {\n const s = this.storage.getSettings();\n return {\n storageIdentityKey: s.storageIdentityKey,\n storageName: s.storageName\n };\n }\n validateAuthAndArgs(args, validate) {\n const vargs = validate(args);\n const auth = { identityKey: this.identityKey };\n return { vargs, auth };\n }\n //////////////////\n // List Methods\n //////////////////\n async listActions(args, originator) {\n sdk.validateOriginator(originator);\n const { vargs } = this.validateAuthAndArgs(args, sdk.validateListActionsArgs);\n const r = await this.storage.listActions(vargs);\n return r;\n }\n get storageParty() {\n return `storage ${this.getStorageIdentity().storageIdentityKey}`;\n }\n async listOutputs(args, originator) {\n sdk.validateOriginator(originator);\n const { vargs } = this.validateAuthAndArgs(args, sdk.validateListOutputsArgs);\n if (this.autoKnownTxids && !vargs.knownTxids) {\n vargs.knownTxids = this.getKnownTxids();\n }\n const r = await this.storage.listOutputs(vargs);\n if (r.BEEF) {\n this.beef.mergeBeefFromParty(this.storageParty, r.BEEF);\n r.BEEF = this.verifyReturnedTxidOnlyBEEF(r.BEEF);\n }\n return r;\n }\n async listCertificates(args, originator) {\n sdk.validateOriginator(originator);\n const { vargs } = this.validateAuthAndArgs(args, sdk.validateListCertificatesArgs);\n const r = await this.storage.listCertificates(vargs);\n return r;\n }\n //////////////////\n // Certificates\n //////////////////\n async acquireCertificate(args, originator) {\n sdk.validateOriginator(originator);\n if (args.acquisitionProtocol === 'direct') {\n const { auth, vargs } = this.validateAuthAndArgs(args, sdk.validateAcquireDirectCertificateArgs);\n vargs.subject = (await this.getPublicKey({\n identityKey: true,\n privileged: args.privileged,\n privilegedReason: args.privilegedReason\n })).publicKey;\n try {\n // Confirm that the information received adds up to a usable certificate...\n // TODO: Clean up MasterCertificate to support decrypt on instance\n const cert = new sdk_1.MasterCertificate(vargs.type, vargs.serialNumber, vargs.subject, vargs.certifier, vargs.revocationOutpoint, vargs.fields, vargs.keyringForSubject, vargs.signature);\n await cert.verify();\n // Verify certificate details\n await sdk_1.MasterCertificate.decryptFields(this, vargs.keyringForSubject, vargs.fields, vargs.certifier, vargs.privileged, vargs.privilegedReason);\n }\n catch (eu) {\n const e = sdk.WalletError.fromUnknown(eu);\n throw new sdk.WERR_INVALID_PARAMETER('args', `valid encrypted and signed certificate and keyring from revealer. ${e.name}: ${e.message}`);\n }\n const r = await (0, acquireDirectCertificate_1.acquireDirectCertificate)(this, auth, vargs);\n return r;\n }\n if (args.acquisitionProtocol === 'issuance') {\n const { auth, vargs } = this.validateAuthAndArgs(args, sdk.validateAcquireIssuanceCertificateArgs);\n // Create a random nonce that the server can verify\n const clientNonce = await (0, sdk_1.createNonce)(this, vargs.certifier);\n // TODO: Consider adding support to request certificates from a certifier before acquiring a certificate.\n const authClient = new sdk_1.AuthFetch(this);\n // Create a certificate master keyring\n // The certifier is able to decrypt these fields as they are the counterparty\n const { certificateFields, masterKeyring } = await sdk_1.MasterCertificate.createCertificateFields(this, vargs.certifier, vargs.fields);\n // Make a Certificate Signing Request (CSR) to the certifier\n const response = await authClient.fetch(`${vargs.certifierUrl}/signCertificate`, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json'\n },\n body: JSON.stringify({\n clientNonce,\n type: vargs.type,\n fields: certificateFields,\n masterKeyring\n })\n });\n if (response.headers.get('x-bsv-auth-identity-key') !== vargs.certifier) {\n throw new Error(`Invalid certifier! Expected: ${vargs.certifier}, Received: ${response.headers.get('x-bsv-auth-identity-key')}`);\n }\n const { certificate, serverNonce } = await response.json();\n // Validate the server response\n if (!certificate) {\n throw new Error('No certificate received from certifier!');\n }\n if (!serverNonce) {\n throw new Error('No serverNonce received from certifier!');\n }\n const signedCertificate = new sdk_1.Certificate(certificate.type, certificate.serialNumber, certificate.subject, certificate.certifier, certificate.revocationOutpoint, certificate.fields, certificate.signature);\n // Validate server nonce\n await (0, sdk_1.verifyNonce)(serverNonce, this, vargs.certifier);\n // Verify the server included our nonce\n const { valid } = await this.verifyHmac({\n hmac: sdk_1.Utils.toArray(signedCertificate.serialNumber, 'base64'),\n data: sdk_1.Utils.toArray(clientNonce + serverNonce, 'base64'),\n protocolID: [2, 'certificate issuance'],\n keyID: serverNonce + clientNonce,\n counterparty: vargs.certifier\n });\n if (!valid)\n throw new Error('Invalid serialNumber');\n // Validate the certificate received\n if (signedCertificate.type !== vargs.type) {\n throw new Error(`Invalid certificate type! Expected: ${vargs.type}, Received: ${signedCertificate.type}`);\n }\n if (signedCertificate.subject !== this.identityKey) {\n throw new Error(`Invalid certificate subject! Expected: ${this.identityKey}, Received: ${signedCertificate.subject}`);\n }\n if (signedCertificate.certifier !== vargs.certifier) {\n throw new Error(`Invalid certifier! Expected: ${vargs.certifier}, Received: ${signedCertificate.certifier}`);\n }\n if (!signedCertificate.revocationOutpoint) {\n throw new Error(`Invalid revocationOutpoint!`);\n }\n if (Object.keys(signedCertificate.fields).length !== Object.keys(certificateFields).length) {\n throw new Error(`Fields mismatch! Objects have different numbers of keys.`);\n }\n for (const field of Object.keys(certificateFields)) {\n if (!(field in signedCertificate.fields)) {\n throw new Error(`Missing field: ${field} in certificate.fields`);\n }\n if (signedCertificate.fields[field] !== certificateFields[field]) {\n throw new Error(`Invalid field! Expected: ${certificateFields[field]}, Received: ${signedCertificate.fields[field]}`);\n }\n }\n await signedCertificate.verify();\n // Test decryption works\n await sdk_1.MasterCertificate.decryptFields(this, masterKeyring, certificate.fields, vargs.certifier);\n // Store the newly issued certificate\n return await (0, acquireDirectCertificate_1.acquireDirectCertificate)(this, auth, {\n ...certificate,\n keyringRevealer: 'certifier',\n keyringForSubject: masterKeyring,\n privileged: vargs.privileged\n });\n }\n throw new sdk.WERR_INVALID_PARAMETER('acquisitionProtocol', `valid.${args.acquisitionProtocol} is unrecognized.`);\n }\n async relinquishCertificate(args, originator) {\n sdk.validateOriginator(originator);\n this.validateAuthAndArgs(args, sdk.validateRelinquishCertificateArgs);\n const r = await this.storage.relinquishCertificate(args);\n return { relinquished: true };\n }\n async proveCertificate(args, originator) {\n originator = sdk.validateOriginator(originator);\n const { auth, vargs } = this.validateAuthAndArgs(args, sdk.validateProveCertificateArgs);\n const r = await (0, proveCertificate_1.proveCertificate)(this, auth, vargs);\n return r;\n }\n async discoverByIdentityKey(args, originator) {\n sdk.validateOriginator(originator);\n this.validateAuthAndArgs(args, sdk.validateDiscoverByIdentityKeyArgs);\n const trustSettings = (await this.settingsManager.get()).trustSettings;\n const results = await (0, identityUtils_1.queryOverlay)({\n identityKey: args.identityKey,\n certifiers: trustSettings.trustedCertifiers.map(certifier => certifier.identityKey)\n }, this.lookupResolver);\n if (!results) {\n return {\n totalCertificates: 0,\n certificates: []\n };\n }\n return (0, identityUtils_1.transformVerifiableCertificatesWithTrust)(trustSettings, results);\n }\n async discoverByAttributes(args, originator) {\n sdk.validateOriginator(originator);\n this.validateAuthAndArgs(args, sdk.validateDiscoverByAttributesArgs);\n const trustSettings = (await this.settingsManager.get()).trustSettings;\n const results = await (0, identityUtils_1.queryOverlay)({\n attributes: args.attributes,\n certifiers: trustSettings.trustedCertifiers.map(certifier => certifier.identityKey)\n }, this.lookupResolver);\n if (!results) {\n return {\n totalCertificates: 0,\n certificates: []\n };\n }\n return (0, identityUtils_1.transformVerifiableCertificatesWithTrust)(trustSettings, results);\n }\n verifyReturnedTxidOnly(beef, knownTxids) {\n if (this.returnTxidOnly)\n return beef;\n const onlyTxids = beef.txs.filter(btx => btx.isTxidOnly).map(btx => btx.txid);\n for (const txid of onlyTxids) {\n if (knownTxids && knownTxids.indexOf(txid) >= 0)\n continue;\n const btx = beef.findTxid(txid);\n const tx = this.beef.findAtomicTransaction(txid);\n if (!tx)\n throw new sdk.WERR_INTERNAL(`unable to merge txid ${txid} into beef`);\n beef.mergeTransaction(tx);\n }\n for (const btx of beef.txs) {\n if (knownTxids && knownTxids.indexOf(btx.txid) >= 0)\n continue;\n if (btx.isTxidOnly)\n throw new sdk.WERR_INTERNAL(`remaining txidOnly ${btx.txid} is not known`);\n }\n return beef;\n }\n verifyReturnedTxidOnlyAtomicBEEF(beef, knownTxids) {\n if (this.returnTxidOnly)\n return beef;\n const b = sdk_1.Beef.fromBinary(beef);\n if (!b.atomicTxid)\n throw new sdk.WERR_INTERNAL();\n return this.verifyReturnedTxidOnly(b, knownTxids).toBinaryAtomic(b.atomicTxid);\n }\n verifyReturnedTxidOnlyBEEF(beef) {\n if (this.returnTxidOnly)\n return beef;\n const b = sdk_1.Beef.fromBinary(beef);\n return this.verifyReturnedTxidOnly(b).toBinary();\n }\n //////////////////\n // Actions\n //////////////////\n async createAction(args, originator) {\n var _a;\n var _b;\n sdk.validateOriginator(originator);\n if (!args.options)\n args.options = {};\n (_b = args.options).trustSelf || (_b.trustSelf = this.trustSelf);\n if (this.autoKnownTxids && !args.options.knownTxids) {\n args.options.knownTxids = this.getKnownTxids(args.options.knownTxids);\n }\n const { auth, vargs } = this.validateAuthAndArgs(args, sdk.validateCreateActionArgs);\n if (vargs.labels.indexOf(sdk_2.specOpThrowReviewActions) >= 0)\n throwDummyReviewActions();\n vargs.includeAllSourceTransactions = this.includeAllSourceTransactions;\n if (this.randomVals && this.randomVals.length > 1) {\n vargs.randomVals = [...this.randomVals];\n }\n const r = await (0, createAction_1.createAction)(this, auth, vargs);\n if (r.tx) {\n this.beef.mergeBeefFromParty(this.storageParty, r.tx);\n }\n if (r.tx)\n r.tx = this.verifyReturnedTxidOnlyAtomicBEEF(r.tx, (_a = args.options) === null || _a === void 0 ? void 0 : _a.knownTxids);\n if (!vargs.isDelayed)\n throwIfAnyUnsuccessfulCreateActions(r);\n return r;\n }\n async signAction(args, originator) {\n var _a;\n sdk.validateOriginator(originator);\n const { auth, vargs } = this.validateAuthAndArgs(args, sdk.validateSignActionArgs);\n // createAction options are merged with undefined signAction options before validation...\n const r = await (0, signAction_1.signAction)(this, auth, args);\n if (!vargs.isDelayed)\n throwIfAnyUnsuccessfulSignActions(r);\n const prior = this.pendingSignActions[args.reference];\n if (r.tx)\n r.tx = this.verifyReturnedTxidOnlyAtomicBEEF(r.tx, (_a = prior.args.options) === null || _a === void 0 ? void 0 : _a.knownTxids);\n return r;\n }\n async internalizeAction(args, originator) {\n sdk.validateOriginator(originator);\n const { auth, vargs } = this.validateAuthAndArgs(args, sdk.validateInternalizeActionArgs);\n if (vargs.labels.indexOf(sdk_2.specOpThrowReviewActions) >= 0)\n throwDummyReviewActions();\n const r = await (0, internalizeAction_1.internalizeAction)(this, auth, args);\n throwIfUnsuccessfulInternalizeAction(r);\n return r;\n }\n async abortAction(args, originator) {\n sdk.validateOriginator(originator);\n const { auth } = this.validateAuthAndArgs(args, sdk.validateAbortActionArgs);\n const r = await this.storage.abortAction(args);\n return r;\n }\n async relinquishOutput(args, originator) {\n sdk.validateOriginator(originator);\n const { vargs } = this.validateAuthAndArgs(args, sdk.validateRelinquishOutputArgs);\n const r = await this.storage.relinquishOutput(args);\n return { relinquished: true };\n }\n async isAuthenticated(args, originator) {\n sdk.validateOriginator(originator);\n const r = {\n authenticated: true\n };\n return r;\n }\n async waitForAuthentication(args, originator) {\n sdk.validateOriginator(originator);\n return { authenticated: true };\n }\n async getHeight(args, originator) {\n sdk.validateOriginator(originator);\n const height = await this.getServices().getHeight();\n return { height };\n }\n async getHeaderForHeight(args, originator) {\n sdk.validateOriginator(originator);\n const serializedHeader = await this.getServices().getHeaderForHeight(args.height);\n return { header: sdk_1.Utils.toHex(serializedHeader) };\n }\n async getNetwork(args, originator) {\n sdk.validateOriginator(originator);\n return { network: (0, utilityHelpers_1.toWalletNetwork)(this.chain) };\n }\n async getVersion(args, originator) {\n sdk.validateOriginator(originator);\n return { version: 'wallet-brc100-1.0.0' };\n }\n /**\n * Transfer all possible satoshis held by this wallet to `toWallet`.\n *\n * @param toWallet wallet which will receive this wallet's satoshis.\n */\n async sweepTo(toWallet) {\n const derivationPrefix = (0, utilityHelpers_1.randomBytesBase64)(8);\n const derivationSuffix = (0, utilityHelpers_1.randomBytesBase64)(8);\n const keyDeriver = this.keyDeriver;\n const t = new ScriptTemplateBRC29_1.ScriptTemplateBRC29({\n derivationPrefix,\n derivationSuffix,\n keyDeriver\n });\n const label = 'sweep';\n const satoshis = generateChange_1.maxPossibleSatoshis;\n const car = await this.createAction({\n outputs: [\n {\n lockingScript: t.lock(keyDeriver.rootKey.toString(), toWallet.identityKey).toHex(),\n satoshis,\n outputDescription: label,\n tags: ['relinquish'],\n customInstructions: JSON.stringify({\n derivationPrefix,\n derivationSuffix,\n type: 'BRC29'\n })\n }\n ],\n options: {\n randomizeOutputs: false,\n acceptDelayedBroadcast: false\n },\n labels: [label],\n description: label\n });\n const iar = await toWallet.internalizeAction({\n tx: car.tx,\n outputs: [\n {\n outputIndex: 0,\n protocol: 'wallet payment',\n paymentRemittance: {\n derivationPrefix,\n derivationSuffix,\n senderIdentityKey: this.identityKey\n }\n }\n ],\n description: label,\n labels: [label]\n });\n }\n /**\n * Uses `listOutputs` to iterate over chunks of up to 1000 outputs to\n * compute the sum of output satoshis.\n *\n * @param {string} basket - Optional. Defaults to 'default', the wallet change basket.\n * @returns {sdk.WalletBalance} total sum of output satoshis and utxo details (satoshis and outpoints)\n */\n async balanceAndUtxos(basket = 'default') {\n const r = { total: 0, utxos: [] };\n let offset = 0;\n for (;;) {\n const change = await this.listOutputs({\n basket,\n limit: 1000,\n offset\n });\n if (change.totalOutputs === 0)\n break;\n for (const o of change.outputs) {\n r.total += o.satoshis;\n r.utxos.push({ satoshis: o.satoshis, outpoint: o.outpoint });\n }\n offset += change.outputs.length;\n }\n return r;\n }\n /**\n * Uses `listOutputs` special operation to compute the total value (of satoshis) for\n * all spendable outputs in the 'default' basket.\n *\n * @returns {number} sum of output satoshis\n */\n async balance() {\n const args = {\n basket: sdk_2.specOpWalletBalance\n };\n const r = await this.listOutputs(args);\n return r.totalOutputs;\n }\n /**\n * Uses `listOutputs` special operation to review the spendability via `Services` of\n * outputs currently considered spendable. Returns the outputs that fail to verify.\n *\n * Ignores the `limit` and `offset` properties.\n *\n * @param all Defaults to false. If false, only change outputs ('default' basket) are reviewed. If true, all spendable outputs are reviewed.\n * @param release Defaults to false. If true, sets outputs that fail to verify to un-spendable (spendable: false)\n * @param optionalArgs Optional. Additional tags will constrain the outputs processed.\n * @returns outputs which are/where considered spendable but currently fail to verify as spendable.\n */\n async reviewSpendableOutputs(all = false, release = false, optionalArgs) {\n const args = {\n ...(optionalArgs || {}),\n basket: sdk_2.specOpInvalidChange\n };\n args.tags || (args.tags = []);\n if (all)\n args.tags.push('all');\n if (release)\n args.tags.push('release');\n const r = await this.listOutputs(args);\n return r;\n }\n /**\n * Uses `listOutputs` special operation to update the 'default' basket's automatic\n * change generation parameters.\n *\n * @param count target number of change UTXOs to maintain.\n * @param satoshis target value for new change outputs.\n */\n async setWalletChangeParams(count, satoshis) {\n const args = {\n basket: sdk_2.specOpSetWalletChangeParams,\n tags: [count.toString(), satoshis.toString()]\n };\n await this.listOutputs(args);\n }\n /**\n * Uses `listActions` special operation to return only actions with status 'nosend'.\n *\n * @param abort Defaults to false. If true, runs `abortAction` on each 'nosend' action.\n * @returns {ListActionsResult} start `listActions` result restricted to 'nosend' (or 'failed' if aborted) actions.\n */\n async listNoSendActions(args, abort = false) {\n const { vargs } = this.validateAuthAndArgs(args, sdk.validateListActionsArgs);\n vargs.labels.push(sdk_2.specOpNoSendActions);\n if (abort)\n vargs.labels.push('abort');\n const r = await this.storage.listActions(vargs);\n return r;\n }\n /**\n * Uses `listActions` special operation to return only actions with status 'failed'.\n *\n * @param unfail Defaults to false. If true, queues the action for attempted recovery.\n * @returns {ListActionsResult} start `listActions` result restricted to 'failed' status actions.\n */\n async listFailedActions(args, unfail = false) {\n const { vargs } = this.validateAuthAndArgs(args, sdk.validateListActionsArgs);\n vargs.labels.push(sdk_2.specOpFailedActions);\n if (unfail)\n vargs.labels.push('unfail');\n const r = await this.storage.listActions(vargs);\n return r;\n }\n}\nexports.Wallet = Wallet;\nfunction throwIfAnyUnsuccessfulCreateActions(r) {\n const ndrs = r.notDelayedResults;\n const swrs = r.sendWithResults;\n if (!ndrs || !swrs || swrs.every(r => r.status === 'unproven'))\n return;\n throw new sdk.WERR_REVIEW_ACTIONS(ndrs, swrs, r.txid, r.tx, r.noSendChange);\n}\nfunction throwIfAnyUnsuccessfulSignActions(r) {\n const ndrs = r.notDelayedResults;\n const swrs = r.sendWithResults;\n if (!ndrs || !swrs || swrs.every(r => r.status === 'unproven'))\n return;\n throw new sdk.WERR_REVIEW_ACTIONS(ndrs, swrs, r.txid, r.tx);\n}\nfunction throwIfUnsuccessfulInternalizeAction(r) {\n const ndrs = r.notDelayedResults;\n const swrs = r.sendWithResults;\n if (!ndrs || !swrs || swrs.every(r => r.status === 'unproven'))\n return;\n throw new sdk.WERR_REVIEW_ACTIONS(ndrs, swrs, r.txid);\n}\n/**\n * Throws a WERR_REVIEW_ACTIONS with a full set of properties to test data formats and propagation.\n */\nfunction throwDummyReviewActions() {\n const b58Beef = 'gno9MC7VXii1KoCkc2nsVyYJpqzN3dhBzYATETJcys62emMKfpBof4R7GozwYEaSapUtnNvqQ57aaYYjm3U2dv9eUJ1sV46boHkQgppYmAz9YH8FdZduV8aJayPViaKcyPmbDhEw6UW8TM5iFZLXNs7HBnJHUKCeTdNK4FUEL7vAugxAV9WUUZ43BZjJk2SmSeps9TCXjt1Ci9fKWp3d9QSoYvTpxwzyUFHjRKtbUgwq55ZfkBp5bV2Bpz9qSuKywKewW7Hh4S1nCUScwwzpKDozb3zic1V9p2k8rQxoPsRxjUJ8bjhNDdsN8d7KukFuc3n47fXzdWttvnxwsujLJRGnQbgJuknQqx3KLf5kJXHzwjG6TzigZk2t24qeB6d3hbYiaDr2fFkUJBL3tukTHhfNkQYRXuz3kucVDzvejHyqJaF51mXG8BjMN5aQj91ZJXCaPVqkMWCzmvyaqmXMdRiJdSAynhXbQK91xf6RwdNhz1tg5f9B6oJJMhsi9UYSVymmax8VLKD9AKzBCBDcfyD83m3jyS1VgKGZn3SkQmr6bsoWq88L3GsMnnmYUGogvdAYarTqg3pzkjCMxHzmJBMN6ofnUk8c1sRTXQue7BbyUaN5uZu3KW6CmFsEfpuqVvnqFW93TU1jrPP2S8yz8AexAnARPCKE8Yz7RfVaT6RCavwQKL3u5iookwRWEZXW1QWmM37yJWHD87SjVynyg327a1CLwcBxmE2CB48QeNVGyQki4CTQMqw2o8TMhDPJej1g68oniAjBcxBLSCs7KGvK3k7AfrHbCMULX9CTibYhCjdFjbsbBoocqJpxxcvkMo1fEEiAzZuiBVZQDYktDdTVbhKHvYkW25HcYX75NJrpNAhm7AjFeKLzEVxqAQkMfvTufpESNRZF4kQqg2Rg8h2ajcKTd5cpEPwXCrZLHm4EaZEmZVbg3QNfGhn7BJu1bHMtLqPD4y8eJxm2uGrW6saf6qKYmmu64F8A667NbD4yskPRQ1S863VzwGpxxmgLc1Ta3R46jEqsAoRDoZVUaCgBBZG3Yg1CTgi1EVBMXU7qvY4n3h8o2FLCEMWY4KadnV3iD4FbcdCmg4yxBosNAZgbPjhgGjCimjh4YsLd9zymGLmivmz2ZBg5m3xaiXT9NN81X9C1JUujd';\n const beef = sdk_1.Beef.fromBinary(sdk_1.Utils.fromBase58(b58Beef));\n const btx = beef.txs.slice(-1)[0];\n const txid = btx.txid;\n debugger;\n throw new sdk.WERR_REVIEW_ACTIONS([\n {\n txid, // only care that it is syntactically a txid\n status: 'doubleSpend',\n competingTxs: [txid], // a txid in the beef\n competingBeef: beef.toBinary()\n }\n ], [\n {\n txid,\n status: 'failed'\n }\n ], txid, beef.toBinaryAtomic(txid), [`${txid}.0`]);\n}\n//# sourceMappingURL=Wallet.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/Wallet.js?\n}"); /***/ }), @@ -2798,6 +2831,17 @@ /***/ }), +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js": +/*!*************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js ***! + \*************************************************************************/ +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.sdk = void 0;\nexports.sdk = __importStar(__webpack_require__(/*! ./sdk/index */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\"));\n__exportStar(__webpack_require__(/*! ./utility/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/index.client.js\"), exports);\n__exportStar(__webpack_require__(/*! ./SetupClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupClient.js\"), exports);\n__exportStar(__webpack_require__(/*! ./SetupWallet */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/SetupWallet.js\"), exports);\n__exportStar(__webpack_require__(/*! ./signer/WalletSigner */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/WalletSigner.js\"), exports);\n__exportStar(__webpack_require__(/*! ./WalletPermissionsManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletPermissionsManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./CWIStyleWalletManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/CWIStyleWalletManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./WalletAuthenticationManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletAuthenticationManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/WABClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/WABClient.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/TwilioPhoneInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/TwilioPhoneInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/PersonaIDInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/PersonaIDInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/AuthMethodInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/AuthMethodInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./services/Services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js\"), exports);\n__exportStar(__webpack_require__(/*! ./sdk/PrivilegedKeyManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/PrivilegedKeyManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./SimpleWalletManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/SimpleWalletManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./storage/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.client.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Wallet */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/Wallet.js\"), exports);\n__exportStar(__webpack_require__(/*! ./monitor/Monitor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/Monitor.js\"), exports);\n//# sourceMappingURL=index.client.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js?\n}"); + +/***/ }), + /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js": /*!*************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js ***! @@ -2805,7 +2849,7 @@ /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.sdk = void 0;\nexports.sdk = __importStar(__webpack_require__(/*! ./sdk/index */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\"));\n__exportStar(__webpack_require__(/*! ./utility/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/index.client.js\"), exports);\n__exportStar(__webpack_require__(/*! ./storage/index.mobile */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.mobile.js\"), exports);\n__exportStar(__webpack_require__(/*! ./services/chaintracker/chaintracks/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.client.js\"), exports);\n__exportStar(__webpack_require__(/*! ./CWIStyleWalletManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/CWIStyleWalletManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./monitor/Monitor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/Monitor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./sdk/PrivilegedKeyManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/PrivilegedKeyManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./services/Services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js\"), exports);\n__exportStar(__webpack_require__(/*! ./signer/WalletSigner */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/WalletSigner.js\"), exports);\n__exportStar(__webpack_require__(/*! ./SimpleWalletManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/SimpleWalletManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/AuthMethodInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/AuthMethodInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/PersonaIDInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/PersonaIDInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/TwilioPhoneInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/TwilioPhoneInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/WABClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/WABClient.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Wallet */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/Wallet.js\"), exports);\n__exportStar(__webpack_require__(/*! ./WalletAuthenticationManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletAuthenticationManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./WalletPermissionsManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletPermissionsManager.js\"), exports);\n//# sourceMappingURL=index.mobile.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js?\n}"); +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.sdk = void 0;\nexports.sdk = __importStar(__webpack_require__(/*! ./sdk/index */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\"));\n__exportStar(__webpack_require__(/*! ./utility/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/index.client.js\"), exports);\n__exportStar(__webpack_require__(/*! ./signer/WalletSigner */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/WalletSigner.js\"), exports);\n__exportStar(__webpack_require__(/*! ./WalletPermissionsManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletPermissionsManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./CWIStyleWalletManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/CWIStyleWalletManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./WalletAuthenticationManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/WalletAuthenticationManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/WABClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/WABClient.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/TwilioPhoneInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/TwilioPhoneInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/PersonaIDInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/PersonaIDInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./wab-client/auth-method-interactors/AuthMethodInteractor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/wab-client/auth-method-interactors/AuthMethodInteractor.js\"), exports);\n__exportStar(__webpack_require__(/*! ./services/Services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js\"), exports);\n__exportStar(__webpack_require__(/*! ./sdk/PrivilegedKeyManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/PrivilegedKeyManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./SimpleWalletManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/SimpleWalletManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./storage/index.mobile */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.mobile.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Wallet */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/Wallet.js\"), exports);\n__exportStar(__webpack_require__(/*! ./monitor/Monitor */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/Monitor.js\"), exports);\n//# sourceMappingURL=index.mobile.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js?\n}"); /***/ }), @@ -2813,10 +2857,10 @@ /*!****************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/Monitor.js ***! \****************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Monitor = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst TaskPurge_1 = __webpack_require__(/*! ./tasks/TaskPurge */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskPurge.js\");\nconst TaskReviewStatus_1 = __webpack_require__(/*! ./tasks/TaskReviewStatus */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskReviewStatus.js\");\nconst TaskSyncWhenIdle_1 = __webpack_require__(/*! ./tasks/TaskSyncWhenIdle */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskSyncWhenIdle.js\");\nconst TaskFailAbandoned_1 = __webpack_require__(/*! ./tasks/TaskFailAbandoned */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskFailAbandoned.js\");\nconst TaskCheckForProofs_1 = __webpack_require__(/*! ./tasks/TaskCheckForProofs */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskCheckForProofs.js\");\nconst TaskClock_1 = __webpack_require__(/*! ./tasks/TaskClock */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskClock.js\");\nconst TaskNewHeader_1 = __webpack_require__(/*! ./tasks/TaskNewHeader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskNewHeader.js\");\nconst TaskMonitorCallHistory_1 = __webpack_require__(/*! ./tasks/TaskMonitorCallHistory */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskMonitorCallHistory.js\");\nconst TaskSendWaiting_1 = __webpack_require__(/*! ./tasks/TaskSendWaiting */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskSendWaiting.js\");\nconst TaskCheckNoSends_1 = __webpack_require__(/*! ./tasks/TaskCheckNoSends */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskCheckNoSends.js\");\nconst TaskUnFail_1 = __webpack_require__(/*! ./tasks/TaskUnFail */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskUnFail.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst WalletError_1 = __webpack_require__(/*! ../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst Services_1 = __webpack_require__(/*! ../services/Services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js\");\n/**\n * Background task to make sure transactions are processed, transaction proofs are received and propagated,\n * and potentially that reorgs update proofs that were already received.\n */\nclass Monitor {\n static createDefaultWalletMonitorOptions(chain, storage, services) {\n services || (services = new Services_1.Services(chain));\n if (!services.options.chaintracks)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('services.options.chaintracks', 'valid');\n const o = {\n chain,\n services,\n storage,\n msecsWaitPerMerkleProofServiceReq: 500,\n taskRunWaitMsecs: 5000,\n abandonedMsecs: 1000 * 60 * 5,\n unprovenAttemptsLimitTest: 10,\n unprovenAttemptsLimitMain: 144,\n chaintracks: services.options.chaintracks\n };\n return o;\n }\n constructor(options) {\n this.oneSecond = 1000;\n this.oneMinute = 60 * this.oneSecond;\n this.oneHour = 60 * this.oneMinute;\n this.oneDay = 24 * this.oneHour;\n this.oneWeek = 7 * this.oneDay;\n /**\n * _tasks are typically run by the scheduler but may also be run by runTask.\n */\n this._tasks = [];\n /**\n * _otherTasks can be run by runTask but not by scheduler.\n */\n this._otherTasks = [];\n this._tasksRunning = false;\n this.defaultPurgeParams = {\n purgeSpent: false,\n purgeCompleted: false,\n purgeFailed: true,\n purgeSpentAge: 2 * this.oneWeek,\n purgeCompletedAge: 2 * this.oneWeek,\n purgeFailedAge: 5 * this.oneDay\n };\n this._runAsyncSetup = true;\n this.options = { ...options };\n this.services = options.services;\n this.chain = this.services.chain;\n this.storage = options.storage;\n this.chaintracks = options.chaintracks;\n this.onTransactionProven = options.onTransactionProven;\n this.onTransactionBroadcasted = options.onTransactionBroadcasted;\n }\n addAllTasksToOther() {\n this._otherTasks.push(new TaskClock_1.TaskClock(this));\n this._otherTasks.push(new TaskNewHeader_1.TaskNewHeader(this));\n this._otherTasks.push(new TaskMonitorCallHistory_1.TaskMonitorCallHistory(this));\n this._otherTasks.push(new TaskPurge_1.TaskPurge(this, this.defaultPurgeParams));\n this._otherTasks.push(new TaskReviewStatus_1.TaskReviewStatus(this));\n this._otherTasks.push(new TaskSendWaiting_1.TaskSendWaiting(this));\n this._otherTasks.push(new TaskCheckForProofs_1.TaskCheckForProofs(this));\n this._otherTasks.push(new TaskCheckNoSends_1.TaskCheckNoSends(this));\n this._otherTasks.push(new TaskUnFail_1.TaskUnFail(this));\n this._otherTasks.push(new TaskFailAbandoned_1.TaskFailAbandoned(this));\n this._otherTasks.push(new TaskSyncWhenIdle_1.TaskSyncWhenIdle(this));\n }\n /**\n * Default tasks with settings appropriate for a single user storage\n * possibly with sync'ing enabled\n */\n addDefaultTasks() {\n this._tasks.push(new TaskClock_1.TaskClock(this));\n this._tasks.push(new TaskNewHeader_1.TaskNewHeader(this));\n this._tasks.push(new TaskMonitorCallHistory_1.TaskMonitorCallHistory(this));\n this._tasks.push(new TaskSendWaiting_1.TaskSendWaiting(this, 8 * this.oneSecond, 7 * this.oneSecond)); // Check every 8 seconds but must be 7 seconds old\n this._tasks.push(new TaskCheckForProofs_1.TaskCheckForProofs(this, 2 * this.oneHour)); // Every two hours if no block found\n this._tasks.push(new TaskCheckNoSends_1.TaskCheckNoSends(this));\n this._tasks.push(new TaskFailAbandoned_1.TaskFailAbandoned(this, 8 * this.oneMinute));\n this._tasks.push(new TaskUnFail_1.TaskUnFail(this));\n //this._tasks.push(new TaskPurge(this, this.defaultPurgeParams, 6 * this.oneHour))\n this._tasks.push(new TaskReviewStatus_1.TaskReviewStatus(this));\n }\n /**\n * Tasks appropriate for multi-user storage\n * without sync'ing enabled.\n */\n addMultiUserTasks() {\n this._tasks.push(new TaskClock_1.TaskClock(this));\n this._tasks.push(new TaskNewHeader_1.TaskNewHeader(this));\n this._tasks.push(new TaskMonitorCallHistory_1.TaskMonitorCallHistory(this));\n this._tasks.push(new TaskSendWaiting_1.TaskSendWaiting(this, 8 * this.oneSecond, 7 * this.oneSecond)); // Check every 8 seconds but must be 7 seconds old\n this._tasks.push(new TaskCheckForProofs_1.TaskCheckForProofs(this, 2 * this.oneHour)); // Every two hours if no block found\n this._tasks.push(new TaskCheckNoSends_1.TaskCheckNoSends(this));\n this._tasks.push(new TaskFailAbandoned_1.TaskFailAbandoned(this, 8 * this.oneMinute));\n this._tasks.push(new TaskUnFail_1.TaskUnFail(this));\n //this._tasks.push(new TaskPurge(this, this.defaultPurgeParams, 6 * this.oneHour))\n this._tasks.push(new TaskReviewStatus_1.TaskReviewStatus(this));\n }\n addTask(task) {\n if (this._tasks.some(t => t.name === task.name))\n throw new WERR_errors_1.WERR_BAD_REQUEST(`task ${task.name} has already been added.`);\n this._tasks.push(task);\n }\n removeTask(name) {\n this._tasks = this._tasks.filter(t => t.name !== name);\n }\n async setupChaintracksListeners() {\n try {\n // TODO: Use a task monitoring the newest block headere to trigger processNewHeader and reorg handling.\n }\n catch (err) {\n /* this chaintracks doesn't support event subscriptions */\n }\n }\n async runTask(name) {\n let task = this._tasks.find(t => t.name === name);\n let log = '';\n if (!task)\n task = this._otherTasks.find(t => t.name === name);\n if (task) {\n await task.asyncSetup();\n log = await task.runTask();\n }\n return log;\n }\n async runOnce() {\n if (this._runAsyncSetup) {\n for (const t of this._tasks) {\n try {\n await t.asyncSetup();\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n const details = `monitor task ${t.name} asyncSetup error ${e.code} ${e.description}`;\n console.log(details);\n await this.logEvent('error0', details);\n }\n if (!this._tasksRunning)\n break;\n }\n this._runAsyncSetup = false;\n }\n if (this.storage.getActive().isStorageProvider()) {\n const tasksToRun = [];\n const now = new Date().getTime();\n for (const t of this._tasks) {\n try {\n if (t.trigger(now).run)\n tasksToRun.push(t);\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n const details = `monitor task ${t.name} trigger error ${e.code} ${e.description}`;\n console.log(details);\n await this.logEvent('error0', details);\n }\n }\n for (const ttr of tasksToRun) {\n try {\n if (this.storage.getActive().isStorageProvider()) {\n const log = await ttr.runTask();\n if (log && log.length > 0) {\n console.log(`Task${ttr.name} ${log.slice(0, 256)}`);\n await this.logEvent(ttr.name, log);\n }\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n const details = `monitor task ${ttr.name} runTask error ${e.code} ${e.description}\\n${e.stack}`;\n console.log(details);\n await this.logEvent('error1', details);\n }\n finally {\n ttr.lastRunMsecsSinceEpoch = new Date().getTime();\n }\n }\n }\n }\n async startTasks() {\n if (this._tasksRunning)\n throw new WERR_errors_1.WERR_BAD_REQUEST('monitor tasks are already runnining.');\n this._tasksRunning = true;\n for (; this._tasksRunning;) {\n await this.runOnce();\n // console.log(`${new Date().toISOString()} tasks run, waiting...`)\n await (0, utilityHelpers_1.wait)(this.options.taskRunWaitMsecs);\n }\n }\n async logEvent(event, details) {\n await this.storage.runAsStorageProvider(async (sp) => {\n await sp.insertMonitorEvent({\n created_at: new Date(),\n updated_at: new Date(),\n id: 0,\n event,\n details\n });\n });\n }\n stopTasks() {\n this._tasksRunning = false;\n }\n /**\n * Process new chain header event received from Chaintracks\n *\n * Kicks processing 'unconfirmed' and 'unmined' request processing.\n *\n * @param reqs\n */\n processNewBlockHeader(header) {\n const h = header;\n this.lastNewHeader = h;\n this.lastNewHeaderWhen = new Date();\n // console.log(`WalletMonitor notified of new block header ${h.height}`)\n // Nudge the proof checker to try again.\n TaskCheckForProofs_1.TaskCheckForProofs.checkNow = true;\n }\n /**\n * This is a function run from a TaskSendWaiting Monitor task.\n *\n * This allows the user of wallet-toolbox to 'subscribe' for transaction broadcast updates.\n *\n * @param broadcastResult\n */\n callOnBroadcastedTransaction(broadcastResult) {\n if (this.onTransactionBroadcasted) {\n this.onTransactionBroadcasted(broadcastResult);\n }\n }\n /**\n * This is a function run from a TaskCheckForProofs Monitor task.\n *\n * This allows the user of wallet-toolbox to 'subscribe' for transaction updates.\n *\n * @param txStatus\n */\n callOnProvenTransaction(txStatus) {\n if (this.onTransactionProven) {\n this.onTransactionProven(txStatus);\n }\n }\n /**\n * Process reorg event received from Chaintracks\n *\n * Reorgs can move recent transactions to new blocks at new index positions.\n * Affected transaction proofs become invalid and must be updated.\n *\n * It is possible for a transaction to become invalid.\n *\n * Coinbase transactions always become invalid.\n */\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n processReorg(depth, oldTip, newTip) {\n /* */\n }\n}\nexports.Monitor = Monitor;\n//# sourceMappingURL=Monitor.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/Monitor.js?\n}"); +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Monitor = void 0;\nconst sdk = __importStar(__webpack_require__(/*! ../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\"));\nconst services_1 = __webpack_require__(/*! ../services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/index.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst TaskPurge_1 = __webpack_require__(/*! ./tasks/TaskPurge */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskPurge.js\");\nconst TaskReviewStatus_1 = __webpack_require__(/*! ./tasks/TaskReviewStatus */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskReviewStatus.js\");\nconst TaskSyncWhenIdle_1 = __webpack_require__(/*! ./tasks/TaskSyncWhenIdle */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskSyncWhenIdle.js\");\nconst TaskFailAbandoned_1 = __webpack_require__(/*! ./tasks/TaskFailAbandoned */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskFailAbandoned.js\");\nconst TaskCheckForProofs_1 = __webpack_require__(/*! ./tasks/TaskCheckForProofs */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskCheckForProofs.js\");\nconst TaskClock_1 = __webpack_require__(/*! ./tasks/TaskClock */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskClock.js\");\nconst TaskNewHeader_1 = __webpack_require__(/*! ./tasks/TaskNewHeader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskNewHeader.js\");\nconst TaskMonitorCallHistory_1 = __webpack_require__(/*! ./tasks/TaskMonitorCallHistory */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskMonitorCallHistory.js\");\nconst TaskSendWaiting_1 = __webpack_require__(/*! ./tasks/TaskSendWaiting */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskSendWaiting.js\");\nconst TaskCheckNoSends_1 = __webpack_require__(/*! ./tasks/TaskCheckNoSends */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskCheckNoSends.js\");\nconst TaskUnFail_1 = __webpack_require__(/*! ./tasks/TaskUnFail */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskUnFail.js\");\n/**\n * Background task to make sure transactions are processed, transaction proofs are received and propagated,\n * and potentially that reorgs update proofs that were already received.\n */\nclass Monitor {\n static createDefaultWalletMonitorOptions(chain, storage, services) {\n services || (services = new services_1.Services(chain));\n if (!services.options.chaintracks)\n throw new sdk.WERR_INVALID_PARAMETER('services.options.chaintracks', 'valid');\n const o = {\n chain,\n services,\n storage,\n msecsWaitPerMerkleProofServiceReq: 500,\n taskRunWaitMsecs: 5000,\n abandonedMsecs: 1000 * 60 * 5,\n unprovenAttemptsLimitTest: 10,\n unprovenAttemptsLimitMain: 144,\n chaintracks: services.options.chaintracks\n };\n return o;\n }\n constructor(options) {\n this.oneSecond = 1000;\n this.oneMinute = 60 * this.oneSecond;\n this.oneHour = 60 * this.oneMinute;\n this.oneDay = 24 * this.oneHour;\n this.oneWeek = 7 * this.oneDay;\n /**\n * _tasks are typically run by the scheduler but may also be run by runTask.\n */\n this._tasks = [];\n /**\n * _otherTasks can be run by runTask but not by scheduler.\n */\n this._otherTasks = [];\n this._tasksRunning = false;\n this.defaultPurgeParams = {\n purgeSpent: false,\n purgeCompleted: false,\n purgeFailed: true,\n purgeSpentAge: 2 * this.oneWeek,\n purgeCompletedAge: 2 * this.oneWeek,\n purgeFailedAge: 5 * this.oneDay\n };\n this._runAsyncSetup = true;\n this.options = { ...options };\n this.services = options.services;\n this.chain = this.services.chain;\n this.storage = options.storage;\n this.chaintracks = options.chaintracks;\n this.onTransactionProven = options.onTransactionProven;\n this.onTransactionBroadcasted = options.onTransactionBroadcasted;\n }\n addAllTasksToOther() {\n this._otherTasks.push(new TaskClock_1.TaskClock(this));\n this._otherTasks.push(new TaskNewHeader_1.TaskNewHeader(this));\n this._otherTasks.push(new TaskMonitorCallHistory_1.TaskMonitorCallHistory(this));\n this._otherTasks.push(new TaskPurge_1.TaskPurge(this, this.defaultPurgeParams));\n this._otherTasks.push(new TaskReviewStatus_1.TaskReviewStatus(this));\n this._otherTasks.push(new TaskSendWaiting_1.TaskSendWaiting(this));\n this._otherTasks.push(new TaskCheckForProofs_1.TaskCheckForProofs(this));\n this._otherTasks.push(new TaskCheckNoSends_1.TaskCheckNoSends(this));\n this._otherTasks.push(new TaskUnFail_1.TaskUnFail(this));\n this._otherTasks.push(new TaskFailAbandoned_1.TaskFailAbandoned(this));\n this._otherTasks.push(new TaskSyncWhenIdle_1.TaskSyncWhenIdle(this));\n }\n /**\n * Default tasks with settings appropriate for a single user storage\n * possibly with sync'ing enabled\n */\n addDefaultTasks() {\n this._tasks.push(new TaskClock_1.TaskClock(this));\n this._tasks.push(new TaskNewHeader_1.TaskNewHeader(this));\n this._tasks.push(new TaskMonitorCallHistory_1.TaskMonitorCallHistory(this));\n this._tasks.push(new TaskSendWaiting_1.TaskSendWaiting(this, 8 * this.oneSecond, 7 * this.oneSecond)); // Check every 8 seconds but must be 7 seconds old\n this._tasks.push(new TaskCheckForProofs_1.TaskCheckForProofs(this, 2 * this.oneHour)); // Every two hours if no block found\n this._tasks.push(new TaskCheckNoSends_1.TaskCheckNoSends(this));\n this._tasks.push(new TaskFailAbandoned_1.TaskFailAbandoned(this, 8 * this.oneMinute));\n this._tasks.push(new TaskUnFail_1.TaskUnFail(this));\n //this._tasks.push(new TaskPurge(this, this.defaultPurgeParams, 6 * this.oneHour))\n this._tasks.push(new TaskReviewStatus_1.TaskReviewStatus(this));\n }\n /**\n * Tasks appropriate for multi-user storage\n * without sync'ing enabled.\n */\n addMultiUserTasks() {\n this._tasks.push(new TaskClock_1.TaskClock(this));\n this._tasks.push(new TaskNewHeader_1.TaskNewHeader(this));\n this._tasks.push(new TaskMonitorCallHistory_1.TaskMonitorCallHistory(this));\n this._tasks.push(new TaskSendWaiting_1.TaskSendWaiting(this, 8 * this.oneSecond, 7 * this.oneSecond)); // Check every 8 seconds but must be 7 seconds old\n this._tasks.push(new TaskCheckForProofs_1.TaskCheckForProofs(this, 2 * this.oneHour)); // Every two hours if no block found\n this._tasks.push(new TaskCheckNoSends_1.TaskCheckNoSends(this));\n this._tasks.push(new TaskFailAbandoned_1.TaskFailAbandoned(this, 8 * this.oneMinute));\n this._tasks.push(new TaskUnFail_1.TaskUnFail(this));\n //this._tasks.push(new TaskPurge(this, this.defaultPurgeParams, 6 * this.oneHour))\n this._tasks.push(new TaskReviewStatus_1.TaskReviewStatus(this));\n }\n addTask(task) {\n if (this._tasks.some(t => t.name === task.name))\n throw new sdk.WERR_BAD_REQUEST(`task ${task.name} has already been added.`);\n this._tasks.push(task);\n }\n removeTask(name) {\n this._tasks = this._tasks.filter(t => t.name !== name);\n }\n async setupChaintracksListeners() {\n try {\n // TODO: Use a task monitoring the newest block headere to trigger processNewHeader and reorg handling.\n }\n catch (err) {\n /* this chaintracks doesn't support event subscriptions */\n }\n }\n async runTask(name) {\n let task = this._tasks.find(t => t.name === name);\n let log = '';\n if (!task)\n task = this._otherTasks.find(t => t.name === name);\n if (task) {\n await task.asyncSetup();\n log = await task.runTask();\n }\n return log;\n }\n async runOnce() {\n if (this._runAsyncSetup) {\n for (const t of this._tasks) {\n try {\n await t.asyncSetup();\n }\n catch (eu) {\n const e = sdk.WalletError.fromUnknown(eu);\n const details = `monitor task ${t.name} asyncSetup error ${e.code} ${e.description}`;\n console.log(details);\n await this.logEvent('error0', details);\n }\n if (!this._tasksRunning)\n break;\n }\n this._runAsyncSetup = false;\n }\n if (this.storage.getActive().isStorageProvider()) {\n const tasksToRun = [];\n const now = new Date().getTime();\n for (const t of this._tasks) {\n try {\n if (t.trigger(now).run)\n tasksToRun.push(t);\n }\n catch (eu) {\n const e = sdk.WalletError.fromUnknown(eu);\n const details = `monitor task ${t.name} trigger error ${e.code} ${e.description}`;\n console.log(details);\n await this.logEvent('error0', details);\n }\n }\n for (const ttr of tasksToRun) {\n try {\n if (this.storage.getActive().isStorageProvider()) {\n const log = await ttr.runTask();\n if (log && log.length > 0) {\n console.log(`Task${ttr.name} ${log.slice(0, 256)}`);\n await this.logEvent(ttr.name, log);\n }\n }\n }\n catch (eu) {\n const e = sdk.WalletError.fromUnknown(eu);\n const details = `monitor task ${ttr.name} runTask error ${e.code} ${e.description}\\n${e.stack}`;\n console.log(details);\n await this.logEvent('error1', details);\n }\n finally {\n ttr.lastRunMsecsSinceEpoch = new Date().getTime();\n }\n }\n }\n }\n async startTasks() {\n if (this._tasksRunning)\n throw new sdk.WERR_BAD_REQUEST('monitor tasks are already runnining.');\n this._tasksRunning = true;\n for (; this._tasksRunning;) {\n await this.runOnce();\n // console.log(`${new Date().toISOString()} tasks run, waiting...`)\n await (0, utilityHelpers_1.wait)(this.options.taskRunWaitMsecs);\n }\n }\n async logEvent(event, details) {\n await this.storage.runAsStorageProvider(async (sp) => {\n await sp.insertMonitorEvent({\n created_at: new Date(),\n updated_at: new Date(),\n id: 0,\n event,\n details\n });\n });\n }\n stopTasks() {\n this._tasksRunning = false;\n }\n /**\n * Process new chain header event received from Chaintracks\n *\n * Kicks processing 'unconfirmed' and 'unmined' request processing.\n *\n * @param reqs\n */\n processNewBlockHeader(header) {\n const h = header;\n this.lastNewHeader = h;\n this.lastNewHeaderWhen = new Date();\n console.log(`WalletMonitor notified of new block header ${h.height}`);\n // Nudge the proof checker to try again.\n TaskCheckForProofs_1.TaskCheckForProofs.checkNow = true;\n }\n /**\n * This is a function run from a TaskSendWaiting Monitor task.\n *\n * This allows the user of wallet-toolbox to 'subscribe' for transaction broadcast updates.\n *\n * @param broadcastResult\n */\n callOnBroadcastedTransaction(broadcastResult) {\n if (this.onTransactionBroadcasted) {\n this.onTransactionBroadcasted(broadcastResult);\n }\n }\n /**\n * This is a function run from a TaskCheckForProofs Monitor task.\n *\n * This allows the user of wallet-toolbox to 'subscribe' for transaction updates.\n *\n * @param txStatus\n */\n callOnProvenTransaction(txStatus) {\n if (this.onTransactionProven) {\n this.onTransactionProven(txStatus);\n }\n }\n /**\n * Process reorg event received from Chaintracks\n *\n * Reorgs can move recent transactions to new blocks at new index positions.\n * Affected transaction proofs become invalid and must be updated.\n *\n * It is possible for a transaction to become invalid.\n *\n * Coinbase transactions always become invalid.\n */\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n processReorg(depth, oldTip, newTip) {\n /* */\n }\n}\nexports.Monitor = Monitor;\n//# sourceMappingURL=Monitor.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/Monitor.js?\n}"); /***/ }), @@ -2827,7 +2871,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.TaskCheckForProofs = void 0;\nexports.getProofs = getProofs;\nconst entities_1 = __webpack_require__(/*! ../../storage/schema/entities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst WalletMonitorTask_1 = __webpack_require__(/*! ./WalletMonitorTask */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/WalletMonitorTask.js\");\n/**\n * `TaskCheckForProofs` is a WalletMonitor task that retreives merkle proofs for\n * transactions.\n *\n * It is normally triggered by the Chaintracks new block header event.\n *\n * When a new block is found, cwi-external-services are used to obtain proofs for\n * any transactions that are currently in the 'unmined' or 'unknown' state.\n *\n * If a proof is obtained and validated, a new ProvenTx record is created and\n * the original ProvenTxReq status is advanced to 'notifying'.\n */\nclass TaskCheckForProofs extends WalletMonitorTask_1.WalletMonitorTask {\n constructor(monitor, triggerMsecs = 0) {\n super(monitor, TaskCheckForProofs.taskName);\n this.triggerMsecs = triggerMsecs;\n }\n /**\n * Normally triggered by checkNow getting set by new block header found event from chaintracks\n */\n trigger(nowMsecsSinceEpoch) {\n return {\n run: TaskCheckForProofs.checkNow\n // Check only when checkNow flag is set.\n // || (this.triggerMsecs > 0 && nowMsecsSinceEpoch - this.lastRunMsecsSinceEpoch > this.triggerMsecs)\n };\n }\n async runTask() {\n var _a;\n let log = '';\n const countsAsAttempt = TaskCheckForProofs.checkNow;\n TaskCheckForProofs.checkNow = false;\n const maxAcceptableHeight = (_a = this.monitor.lastNewHeader) === null || _a === void 0 ? void 0 : _a.height;\n if (maxAcceptableHeight === undefined) {\n return log;\n }\n const limit = 100;\n let offset = 0;\n for (;;) {\n const reqs = await this.storage.findProvenTxReqs({\n partial: {},\n status: ['callback', 'unmined', 'sending', 'unknown', 'unconfirmed'],\n paged: { limit, offset }\n });\n if (reqs.length === 0)\n break;\n log += `${reqs.length} reqs with status 'callback', 'unmined', 'sending', 'unknown', or 'unconfirmed'\\n`;\n const r = await getProofs(this, reqs, 2, countsAsAttempt, false, maxAcceptableHeight);\n log += `${r.log}\\n`;\n //console.log(log);\n if (reqs.length < limit)\n break;\n offset += limit;\n }\n return log;\n }\n}\nexports.TaskCheckForProofs = TaskCheckForProofs;\nTaskCheckForProofs.taskName = 'CheckForProofs';\n/**\n * An external service such as the chaintracks new block header\n * listener can set this true to cause\n */\nTaskCheckForProofs.checkNow = false;\n/**\n * Process an array of table.ProvenTxReq (typically with status 'unmined' or 'unknown')\n *\n * If req is invalid, set status 'invalid'\n *\n * Verify the requests are valid, lookup proofs or updated transaction status using the array of getProofServices,\n *\n * When proofs are found, create new ProvenTxApi records and transition the requests' status to 'unconfirmed' or 'notifying',\n * depending on chaintracks succeeding on proof verification.\n *\n * Increments attempts if proofs where requested.\n *\n * @param reqs\n * @returns reqs partitioned by status\n */\nasync function getProofs(task, reqs, indent = 0, countsAsAttempt = false, ignoreStatus = false, maxAcceptableHeight) {\n const proven = [];\n const invalid = [];\n let log = '';\n for (const reqApi of reqs) {\n log += ' '.repeat(indent);\n log += `reqId ${reqApi.provenTxReqId} txid ${reqApi.txid}: `;\n if (!ignoreStatus &&\n reqApi.status !== 'callback' &&\n reqApi.status !== 'unmined' &&\n reqApi.status !== 'unknown' &&\n reqApi.status !== 'unconfirmed' &&\n reqApi.status !== 'nosend' &&\n reqApi.status !== 'sending') {\n log += `status of '${reqApi.status}' is not ready to be proven.\\n`;\n continue;\n }\n const req = new entities_1.EntityProvenTxReq(reqApi);\n if (Number.isInteger(req.provenTxId)) {\n log += `Already linked to provenTxId ${req.provenTxId}.\\n`;\n req.notified = false;\n req.status = 'completed';\n await req.updateStorageDynamicProperties(task.storage);\n proven.push(reqApi);\n continue;\n }\n log += '\\n';\n let reqIsValid = false;\n if (req.rawTx) {\n const txid = (0, utilityHelpers_noBuffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(req.rawTx));\n if (txid === req.txid)\n reqIsValid = true;\n }\n if (!reqIsValid) {\n log += ` rawTx doesn't hash to txid. status => invalid.\\n`;\n req.notified = false;\n req.status = 'invalid';\n await req.updateStorageDynamicProperties(task.storage);\n invalid.push(reqApi);\n continue;\n }\n const limit = task.monitor.chain === 'main'\n ? task.monitor.options.unprovenAttemptsLimitMain\n : task.monitor.options.unprovenAttemptsLimitTest;\n if (!ignoreStatus && req.attempts > limit) {\n log += ` too many failed attempts ${req.attempts}\\n`;\n req.notified = false;\n req.status = 'invalid';\n await req.updateStorageDynamicProperties(task.storage);\n invalid.push(reqApi);\n continue;\n }\n const since = new Date();\n let r;\n let ptx;\n // External services will try multiple providers until one returns a proof,\n // or they all fail.\n // There may also be an array of proofs to consider when a transaction\n // is recently mined and appears in orphan blocks in addition to active chain blocks.\n // Since orphan blocks can end up on chain again, multiple proofs has value.\n //\n // On failure, there may be a mapi response, or an error.\n //\n // The proofs returned are considered sequentially, validating and chaintracks confirming.\n //\n // If a good proof is found, proceed to using it.\n //\n // When all received proofs fail, force a bump to the next service provider and try\n // one more time.\n //\n r = await task.monitor.services.getMerklePath(req.txid);\n if (r.header && r.header.height > maxAcceptableHeight) {\n // Ignore proofs from bleeding edge of new blocks as these are the most often re-orged.\n log += ` ignoring possible proof from very new block at height ${r.header.height} ${r.header.hash}\\n`;\n continue;\n }\n ptx = await entities_1.EntityProvenTx.fromReq(req, r, countsAsAttempt && req.status !== 'nosend');\n if (ptx) {\n // We have a merklePath proof for the request (and a block header)\n await req.updateStorageDynamicProperties(task.storage);\n await req.refreshFromStorage(task.storage);\n const { provenTxReqId, status, txid, attempts, history } = req.toApi();\n const { index, height, blockHash, merklePath, merkleRoot } = ptx.toApi();\n const r = await task.storage.runAsStorageProvider(async (sp) => {\n return await sp.updateProvenTxReqWithNewProvenTx({\n provenTxReqId,\n status,\n txid,\n attempts,\n history,\n index,\n height,\n blockHash,\n merklePath,\n merkleRoot\n });\n });\n req.status = r.status;\n req.apiHistory = r.history;\n req.provenTxId = r.provenTxId;\n req.notified = true;\n task.monitor.callOnProvenTransaction({\n txid,\n txIndex: index,\n blockHeight: height,\n blockHash,\n merklePath,\n merkleRoot\n });\n }\n else {\n if (countsAsAttempt && req.status !== 'nosend') {\n req.attempts++;\n }\n }\n await req.updateStorageDynamicProperties(task.storage);\n await req.refreshFromStorage(task.storage);\n log += req.historyPretty(since, indent + 2) + '\\n';\n if (req.status === 'completed')\n proven.push(req.api);\n if (req.status === 'invalid')\n invalid.push(req.api);\n }\n return { proven, invalid, log };\n}\n//# sourceMappingURL=TaskCheckForProofs.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskCheckForProofs.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.TaskCheckForProofs = void 0;\nexports.getProofs = getProofs;\nconst entities_1 = __webpack_require__(/*! ../../storage/schema/entities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst WalletMonitorTask_1 = __webpack_require__(/*! ./WalletMonitorTask */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/WalletMonitorTask.js\");\n/**\n * `TaskCheckForProofs` is a WalletMonitor task that retreives merkle proofs for\n * transactions.\n *\n * It is normally triggered by the Chaintracks new block header event.\n *\n * When a new block is found, cwi-external-services are used to obtain proofs for\n * any transactions that are currently in the 'unmined' or 'unknown' state.\n *\n * If a proof is obtained and validated, a new ProvenTx record is created and\n * the original ProvenTxReq status is advanced to 'notifying'.\n */\nclass TaskCheckForProofs extends WalletMonitorTask_1.WalletMonitorTask {\n constructor(monitor, triggerMsecs = 0) {\n super(monitor, TaskCheckForProofs.taskName);\n this.triggerMsecs = triggerMsecs;\n }\n /**\n * Normally triggered by checkNow getting set by new block header found event from chaintracks\n */\n trigger(nowMsecsSinceEpoch) {\n return {\n run: TaskCheckForProofs.checkNow ||\n (this.triggerMsecs > 0 && nowMsecsSinceEpoch - this.lastRunMsecsSinceEpoch > this.triggerMsecs)\n };\n }\n async runTask() {\n var _a;\n let log = '';\n const countsAsAttempt = TaskCheckForProofs.checkNow;\n TaskCheckForProofs.checkNow = false;\n const maxAcceptableHeight = (_a = this.monitor.lastNewHeader) === null || _a === void 0 ? void 0 : _a.height;\n if (maxAcceptableHeight === undefined) {\n return log;\n }\n const limit = 100;\n let offset = 0;\n for (;;) {\n const reqs = await this.storage.findProvenTxReqs({\n partial: {},\n status: ['callback', 'unmined', 'sending', 'unknown', 'unconfirmed'],\n paged: { limit, offset }\n });\n if (reqs.length === 0)\n break;\n log += `${reqs.length} reqs with status 'callback', 'unmined', 'sending', 'unknown', or 'unconfirmed'\\n`;\n const r = await getProofs(this, reqs, 2, countsAsAttempt, false, maxAcceptableHeight);\n log += `${r.log}\\n`;\n //console.log(log);\n if (reqs.length < limit)\n break;\n offset += limit;\n }\n return log;\n }\n}\nexports.TaskCheckForProofs = TaskCheckForProofs;\nTaskCheckForProofs.taskName = 'CheckForProofs';\n/**\n * An external service such as the chaintracks new block header\n * listener can set this true to cause\n */\nTaskCheckForProofs.checkNow = false;\n/**\n * Process an array of table.ProvenTxReq (typically with status 'unmined' or 'unknown')\n *\n * If req is invalid, set status 'invalid'\n *\n * Verify the requests are valid, lookup proofs or updated transaction status using the array of getProofServices,\n *\n * When proofs are found, create new ProvenTxApi records and transition the requests' status to 'unconfirmed' or 'notifying',\n * depending on chaintracks succeeding on proof verification.\n *\n * Increments attempts if proofs where requested.\n *\n * @param reqs\n * @returns reqs partitioned by status\n */\nasync function getProofs(task, reqs, indent = 0, countsAsAttempt = false, ignoreStatus = false, maxAcceptableHeight) {\n const proven = [];\n const invalid = [];\n let log = '';\n for (const reqApi of reqs) {\n log += ' '.repeat(indent);\n log += `reqId ${reqApi.provenTxReqId} txid ${reqApi.txid}: `;\n if (!ignoreStatus &&\n reqApi.status !== 'callback' &&\n reqApi.status !== 'unmined' &&\n reqApi.status !== 'unknown' &&\n reqApi.status !== 'unconfirmed' &&\n reqApi.status !== 'nosend' &&\n reqApi.status !== 'sending') {\n log += `status of '${reqApi.status}' is not ready to be proven.\\n`;\n continue;\n }\n const req = new entities_1.EntityProvenTxReq(reqApi);\n if (Number.isInteger(req.provenTxId)) {\n log += `Already linked to provenTxId ${req.provenTxId}.\\n`;\n req.notified = false;\n req.status = 'completed';\n await req.updateStorageDynamicProperties(task.storage);\n proven.push(reqApi);\n continue;\n }\n log += '\\n';\n let reqIsValid = false;\n if (req.rawTx) {\n const txid = (0, utilityHelpers_noBuffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(req.rawTx));\n if (txid === req.txid)\n reqIsValid = true;\n }\n if (!reqIsValid) {\n log += ` rawTx doesn't hash to txid. status => invalid.\\n`;\n req.notified = false;\n req.status = 'invalid';\n await req.updateStorageDynamicProperties(task.storage);\n invalid.push(reqApi);\n continue;\n }\n const limit = task.monitor.chain === 'main'\n ? task.monitor.options.unprovenAttemptsLimitMain\n : task.monitor.options.unprovenAttemptsLimitTest;\n if (!ignoreStatus && req.attempts > limit) {\n log += ` too many failed attempts ${req.attempts}\\n`;\n req.notified = false;\n req.status = 'invalid';\n await req.updateStorageDynamicProperties(task.storage);\n invalid.push(reqApi);\n continue;\n }\n const since = new Date();\n let r;\n let ptx;\n // External services will try multiple providers until one returns a proof,\n // or they all fail.\n // There may also be an array of proofs to consider when a transaction\n // is recently mined and appears in orphan blocks in addition to active chain blocks.\n // Since orphan blocks can end up on chain again, multiple proofs has value.\n //\n // On failure, there may be a mapi response, or an error.\n //\n // The proofs returned are considered sequentially, validating and chaintracks confirming.\n //\n // If a good proof is found, proceed to using it.\n //\n // When all received proofs fail, force a bump to the next service provider and try\n // one more time.\n //\n r = await task.monitor.services.getMerklePath(req.txid);\n if (r.header && r.header.height > maxAcceptableHeight) {\n // Ignore proofs from bleeding edge of new blocks as these are the most often re-orged.\n log += ` ignoring possible proof from very new block at height ${r.header.height} ${r.header.hash}\\n`;\n continue;\n }\n ptx = await entities_1.EntityProvenTx.fromReq(req, r, countsAsAttempt && req.status !== 'nosend');\n if (ptx) {\n // We have a merklePath proof for the request (and a block header)\n await req.updateStorageDynamicProperties(task.storage);\n await req.refreshFromStorage(task.storage);\n const { provenTxReqId, status, txid, attempts, history } = req.toApi();\n const { index, height, blockHash, merklePath, merkleRoot } = ptx.toApi();\n const r = await task.storage.runAsStorageProvider(async (sp) => {\n return await sp.updateProvenTxReqWithNewProvenTx({\n provenTxReqId,\n status,\n txid,\n attempts,\n history,\n index,\n height,\n blockHash,\n merklePath,\n merkleRoot\n });\n });\n req.status = r.status;\n req.apiHistory = r.history;\n req.provenTxId = r.provenTxId;\n req.notified = true;\n task.monitor.callOnProvenTransaction({\n txid,\n txIndex: index,\n blockHeight: height,\n blockHash,\n merklePath,\n merkleRoot\n });\n }\n else {\n if (countsAsAttempt && req.status !== 'nosend') {\n req.attempts++;\n }\n }\n await req.updateStorageDynamicProperties(task.storage);\n await req.refreshFromStorage(task.storage);\n log += req.historyPretty(since, indent + 2) + '\\n';\n if (req.status === 'completed')\n proven.push(req.api);\n if (req.status === 'invalid')\n invalid.push(req.api);\n }\n return { proven, invalid, log };\n}\n//# sourceMappingURL=TaskCheckForProofs.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskCheckForProofs.js?\n}"); /***/ }), @@ -2882,7 +2926,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.TaskNewHeader = void 0;\nconst WalletMonitorTask_1 = __webpack_require__(/*! ./WalletMonitorTask */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/WalletMonitorTask.js\");\n/**\n * This task polls for new block headers performing two essential functions:\n * 1. The arrival of a new block is the right time to check for proofs for recently broadcast transactions.\n * 2. The height of the block is used to limit which proofs are accepted with the aim of avoiding re-orged proofs.\n *\n * The most common new block orphan is one which is almost immediately orphaned.\n * Waiting a minute before pursuing proof requests avoids almost all the re-org work that could be done.\n * Thus this task queues new headers for one cycle.\n * If a new header arrives during that cycle, it replaces the queued header and delays again.\n * Only when there is an elapsed cycle without a new header does proof solicitation get triggered,\n * with that header height as the limit for which proofs are accepted.\n */\nclass TaskNewHeader extends WalletMonitorTask_1.WalletMonitorTask {\n constructor(monitor, triggerMsecs = 1 * monitor.oneMinute) {\n super(monitor, TaskNewHeader.taskName);\n this.triggerMsecs = triggerMsecs;\n }\n async getHeader() {\n return await this.monitor.chaintracks.findChainTipHeader();\n }\n /**\n * TODO: This is a temporary incomplete solution for which a full chaintracker\n * with new header and reorg event notification is required.\n *\n * New header events drive retrieving merklePaths for newly mined transactions.\n * This implementation performs this function.\n *\n * Reorg events are needed to know when previously retrieved mekrlePaths need to be\n * updated in the proven_txs table (and ideally notifications delivered to users).\n * Note that in-general, a reorg only shifts where in the block a transaction is mined,\n * and sometimes which block. In the case of coinbase transactions, a transaction may\n * also fail after a reorg.\n */\n async asyncSetup() { }\n trigger(nowMsecsSinceEpoch) {\n const run = true;\n return { run };\n }\n async runTask() {\n let log = '';\n const oldHeader = this.header;\n this.header = await this.getHeader();\n let isNew = true;\n if (!oldHeader) {\n log = `first header: ${this.header.height} ${this.header.hash}`;\n }\n else if (oldHeader.height > this.header.height) {\n log = `old header: ${this.header.height} vs ${oldHeader.height}`;\n this.header = oldHeader; // Revert to old header with the higher height\n isNew = false;\n }\n else if (oldHeader.height < this.header.height) {\n const skip = this.header.height - oldHeader.height - 1;\n const skipped = skip > 0 ? ` SKIPPED ${skip}` : '';\n log = `new header: ${this.header.height} ${this.header.hash}${skipped}`;\n }\n else if (oldHeader.height === this.header.height && oldHeader.hash != this.header.hash) {\n log = `reorg header: ${this.header.height} ${this.header.hash}`;\n }\n else {\n isNew = false;\n }\n if (isNew) {\n this.queuedHeader = this.header;\n this.queuedHeaderWhen = new Date();\n }\n else if (this.queuedHeader) {\n // Only process new block header if it has remained the chain tip for a full cycle\n const delay = (new Date().getTime() - this.queuedHeaderWhen.getTime()) / 1000; // seconds\n log = `process header: ${this.header.height} ${this.header.hash} delayed ${delay.toFixed(1)} secs`;\n this.monitor.processNewBlockHeader(this.queuedHeader);\n this.queuedHeader = undefined;\n }\n return log;\n }\n}\nexports.TaskNewHeader = TaskNewHeader;\nTaskNewHeader.taskName = 'NewHeader';\n//# sourceMappingURL=TaskNewHeader.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskNewHeader.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.TaskNewHeader = void 0;\nconst WalletMonitorTask_1 = __webpack_require__(/*! ./WalletMonitorTask */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/WalletMonitorTask.js\");\n/**\n * This task polls for new block headers performing two essential functions:\n * 1. The arrival of a new block is the right time to check for proofs for recently broadcast transactions.\n * 2. The height of the block is used to limit which proofs are accepted with the aim of avoiding re-orged proofs.\n *\n * The most common new block orphan is one which is almost immediately orphaned.\n * Waiting a minute before pursuing proof requests avoids almost all the re-org work that could be done.\n * Thus this task queues new headers for one cycle.\n * If a new header arrives during that cycle, it replaces the queued header and delays again.\n * Only when there is an elapsed cycle without a new header does proof solicitation get triggered,\n * with that header height as the limit for which proofs are accepted.\n */\nclass TaskNewHeader extends WalletMonitorTask_1.WalletMonitorTask {\n constructor(monitor, triggerMsecs = 1 * monitor.oneMinute) {\n super(monitor, TaskNewHeader.taskName);\n this.triggerMsecs = triggerMsecs;\n }\n async getHeader() {\n return await this.monitor.chaintracks.findChainTipHeader();\n }\n trigger(nowMsecsSinceEpoch) {\n const run = true;\n return { run };\n }\n async runTask() {\n let log = '';\n const oldHeader = this.header;\n this.header = await this.getHeader();\n let isNew = true;\n if (!oldHeader) {\n log = `first header: ${this.header.height} ${this.header.hash}`;\n }\n else if (oldHeader.height < this.header.height) {\n const skip = this.header.height - oldHeader.height - 1;\n const skipped = skip > 0 ? ` SKIPPED ${skip}` : '';\n log = `new header: ${this.header.height} ${this.header.hash}${skipped}`;\n }\n else if (oldHeader.height === this.header.height && oldHeader.hash != this.header.hash) {\n log = `reorg header: ${this.header.height} ${this.header.hash}`;\n }\n else {\n isNew = false;\n }\n if (isNew) {\n this.queuedHeader = this.header;\n }\n else if (this.queuedHeader) {\n // Only process new block header if it has remained the chain tip for a full cycle\n log = `process header: ${this.header.height} ${this.header.hash}`;\n this.monitor.processNewBlockHeader(this.queuedHeader);\n this.queuedHeader = undefined;\n }\n return log;\n }\n}\nexports.TaskNewHeader = TaskNewHeader;\nTaskNewHeader.taskName = 'NewHeader';\n//# sourceMappingURL=TaskNewHeader.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskNewHeader.js?\n}"); /***/ }), @@ -2915,7 +2959,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.TaskSendWaiting = void 0;\nconst WalletMonitorTask_1 = __webpack_require__(/*! ./WalletMonitorTask */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/WalletMonitorTask.js\");\nconst attemptToPostReqsToNetwork_1 = __webpack_require__(/*! ../../storage/methods/attemptToPostReqsToNetwork */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/attemptToPostReqsToNetwork.js\");\nconst aggregateResults_1 = __webpack_require__(/*! ../../utility/aggregateResults */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/aggregateResults.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityProvenTxReq_1 = __webpack_require__(/*! ../../storage/schema/entities/EntityProvenTxReq */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTxReq.js\");\nclass TaskSendWaiting extends WalletMonitorTask_1.WalletMonitorTask {\n constructor(monitor, triggerMsecs = monitor.oneSecond * 8, agedMsecs = monitor.oneSecond * 7, sendingMsecs = monitor.oneMinute * 5) {\n super(monitor, TaskSendWaiting.taskName);\n this.triggerMsecs = triggerMsecs;\n this.agedMsecs = agedMsecs;\n this.sendingMsecs = sendingMsecs;\n this.includeSending = true;\n }\n trigger(nowMsecsSinceEpoch) {\n this.includeSending =\n !this.lastSendingRunMsecsSinceEpoch || nowMsecsSinceEpoch > this.lastSendingRunMsecsSinceEpoch + this.sendingMsecs;\n if (this.includeSending)\n this.lastSendingRunMsecsSinceEpoch = nowMsecsSinceEpoch;\n return {\n run: nowMsecsSinceEpoch > this.lastRunMsecsSinceEpoch + this.triggerMsecs\n };\n }\n async runTask() {\n let log = '';\n const limit = 100;\n let offset = 0;\n const agedLimit = new Date(Date.now() - this.agedMsecs);\n const status = this.includeSending ? ['unsent', 'sending'] : ['unsent'];\n for (;;) {\n let reqs = await this.storage.findProvenTxReqs({\n partial: {},\n status,\n paged: { limit, offset }\n });\n const count = reqs.length;\n if (reqs.length === 0)\n break;\n log += `${reqs.length} reqs with status ${status.join(' or ')}\\n`;\n const agedReqs = reqs.filter(req => (0, utilityHelpers_1.verifyTruthy)(req.updated_at) < agedLimit);\n log += ` Of those reqs, ${agedReqs.length} where last updated before ${agedLimit.toISOString()}.\\n`;\n log += await this.processUnsent(agedReqs, 2);\n if (count < limit)\n break;\n offset += limit;\n }\n return log;\n }\n /**\n * Process an array of 'unsent' status table.ProvenTxReq\n *\n * Send rawTx to transaction processor(s), requesting proof callbacks when possible.\n *\n * Set status 'invalid' if req is invalid.\n *\n * Set status to 'callback' on successful network submission with callback service.\n *\n * Set status to 'unmined' on successful network submission without callback service.\n *\n * Add mapi responses to database table if received.\n *\n * Increments attempts if sending was attempted.\n *\n * @param reqApis\n */\n async processUnsent(reqApis, indent = 0) {\n let log = '';\n for (let i = 0; i < reqApis.length; i++) {\n const reqApi = reqApis[i];\n log += ' '.repeat(indent);\n log += `${i} reqId=${reqApi.provenTxReqId} attempts=${reqApi.attempts} txid=${reqApi.txid}: \\n`;\n if (reqApi.status !== 'unsent' && reqApi.status !== 'sending') {\n log += ` status now ${reqApi.status}\\n`;\n continue;\n }\n const req = new EntityProvenTxReq_1.EntityProvenTxReq(reqApi);\n const reqs = [];\n if (req.batch) {\n // Make sure wew process entire batch together for efficient beef generation\n const batchReqApis = await this.storage.findProvenTxReqs({\n partial: { batch: req.batch, status: 'unsent' }\n });\n for (const bra of batchReqApis) {\n // Remove any matching batchReqApis from reqApis\n const index = reqApis.findIndex(ra => ra.provenTxReqId === bra.provenTxReqId);\n if (index > -1)\n reqApis.slice(index, index + 1);\n // And add to reqs being processed now:\n reqs.push(new EntityProvenTxReq_1.EntityProvenTxReq(bra));\n }\n }\n else {\n // Just a single non-batched req...\n reqs.push(req);\n }\n const r = await this.storage.runAsStorageProvider(async (sp) => {\n return (0, attemptToPostReqsToNetwork_1.attemptToPostReqsToNetwork)(sp, reqs);\n });\n if (this.monitor.onTransactionBroadcasted) {\n const rar = await this.storage.runAsStorageProvider(async (sp) => {\n const ars = [{ txid: req.txid, status: 'sending' }];\n const { rar } = await (0, aggregateResults_1.aggregateActionResults)(sp, ars, r);\n return rar;\n });\n this.monitor.callOnBroadcastedTransaction(rar[0]);\n }\n log += r.log;\n }\n return log;\n }\n}\nexports.TaskSendWaiting = TaskSendWaiting;\nTaskSendWaiting.taskName = 'SendWaiting';\n//# sourceMappingURL=TaskSendWaiting.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskSendWaiting.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.TaskSendWaiting = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../storage/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.client.js\");\nconst index_client_2 = __webpack_require__(/*! ../../utility/index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/index.client.js\");\nconst WalletMonitorTask_1 = __webpack_require__(/*! ./WalletMonitorTask */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/WalletMonitorTask.js\");\nconst attemptToPostReqsToNetwork_1 = __webpack_require__(/*! ../../storage/methods/attemptToPostReqsToNetwork */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/attemptToPostReqsToNetwork.js\");\nconst aggregateResults_1 = __webpack_require__(/*! ../../utility/aggregateResults */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/aggregateResults.js\");\nclass TaskSendWaiting extends WalletMonitorTask_1.WalletMonitorTask {\n constructor(monitor, triggerMsecs = monitor.oneSecond * 8, agedMsecs = monitor.oneSecond * 7, sendingMsecs = monitor.oneMinute * 5) {\n super(monitor, TaskSendWaiting.taskName);\n this.triggerMsecs = triggerMsecs;\n this.agedMsecs = agedMsecs;\n this.sendingMsecs = sendingMsecs;\n this.includeSending = true;\n }\n trigger(nowMsecsSinceEpoch) {\n this.includeSending =\n !this.lastSendingRunMsecsSinceEpoch || nowMsecsSinceEpoch > this.lastSendingRunMsecsSinceEpoch + this.sendingMsecs;\n if (this.includeSending)\n this.lastSendingRunMsecsSinceEpoch = nowMsecsSinceEpoch;\n return {\n run: nowMsecsSinceEpoch > this.lastRunMsecsSinceEpoch + this.triggerMsecs\n };\n }\n async runTask() {\n let log = '';\n const limit = 100;\n let offset = 0;\n const agedLimit = new Date(Date.now() - this.agedMsecs);\n const status = this.includeSending ? ['unsent', 'sending'] : ['unsent'];\n for (;;) {\n let reqs = await this.storage.findProvenTxReqs({\n partial: {},\n status,\n paged: { limit, offset }\n });\n const count = reqs.length;\n if (reqs.length === 0)\n break;\n log += `${reqs.length} reqs with status ${status.join(' or ')}\\n`;\n const agedReqs = reqs.filter(req => (0, index_client_2.verifyTruthy)(req.updated_at) < agedLimit);\n log += ` Of those reqs, ${agedReqs.length} where last updated before ${agedLimit.toISOString()}.\\n`;\n log += await this.processUnsent(agedReqs, 2);\n if (count < limit)\n break;\n offset += limit;\n }\n return log;\n }\n /**\n * Process an array of 'unsent' status table.ProvenTxReq\n *\n * Send rawTx to transaction processor(s), requesting proof callbacks when possible.\n *\n * Set status 'invalid' if req is invalid.\n *\n * Set status to 'callback' on successful network submission with callback service.\n *\n * Set status to 'unmined' on successful network submission without callback service.\n *\n * Add mapi responses to database table if received.\n *\n * Increments attempts if sending was attempted.\n *\n * @param reqApis\n */\n async processUnsent(reqApis, indent = 0) {\n let log = '';\n for (let i = 0; i < reqApis.length; i++) {\n const reqApi = reqApis[i];\n log += ' '.repeat(indent);\n log += `${i} reqId=${reqApi.provenTxReqId} attempts=${reqApi.attempts} txid=${reqApi.txid}: \\n`;\n if (reqApi.status !== 'unsent' && reqApi.status !== 'sending') {\n log += ` status now ${reqApi.status}\\n`;\n continue;\n }\n const req = new index_client_1.EntityProvenTxReq(reqApi);\n const reqs = [];\n if (req.batch) {\n // Make sure wew process entire batch together for efficient beef generation\n const batchReqApis = await this.storage.findProvenTxReqs({\n partial: { batch: req.batch, status: 'unsent' }\n });\n for (const bra of batchReqApis) {\n // Remove any matching batchReqApis from reqApis\n const index = reqApis.findIndex(ra => ra.provenTxReqId === bra.provenTxReqId);\n if (index > -1)\n reqApis.slice(index, index + 1);\n // And add to reqs being processed now:\n reqs.push(new index_client_1.EntityProvenTxReq(bra));\n }\n }\n else {\n // Just a single non-batched req...\n reqs.push(req);\n }\n const r = await this.storage.runAsStorageProvider(async (sp) => {\n return (0, attemptToPostReqsToNetwork_1.attemptToPostReqsToNetwork)(sp, reqs);\n });\n if (this.monitor.onTransactionBroadcasted) {\n const rar = await this.storage.runAsStorageProvider(async (sp) => {\n const ars = [{ txid: req.txid, status: 'sending' }];\n const { rar } = await (0, aggregateResults_1.aggregateActionResults)(sp, ars, r);\n return rar;\n });\n this.monitor.callOnBroadcastedTransaction(rar[0]);\n }\n log += r.log;\n }\n return log;\n }\n}\nexports.TaskSendWaiting = TaskSendWaiting;\nTaskSendWaiting.taskName = 'SendWaiting';\n//# sourceMappingURL=TaskSendWaiting.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/monitor/tasks/TaskSendWaiting.js?\n}"); /***/ }), @@ -3058,7 +3102,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.parseWalletOutpoint = parseWalletOutpoint;\nexports.validateSatoshis = validateSatoshis;\nexports.validateOptionalInteger = validateOptionalInteger;\nexports.validateInteger = validateInteger;\nexports.validatePositiveIntegerOrZero = validatePositiveIntegerOrZero;\nexports.validateStringLength = validateStringLength;\nexports.isHexString = isHexString;\nexports.validateCreateActionInput = validateCreateActionInput;\nexports.validateCreateActionOutput = validateCreateActionOutput;\nexports.validateCreateActionOptions = validateCreateActionOptions;\nexports.validateCreateActionArgs = validateCreateActionArgs;\nexports.validateSignActionOptions = validateSignActionOptions;\nexports.validateSignActionArgs = validateSignActionArgs;\nexports.validateAbortActionArgs = validateAbortActionArgs;\nexports.validateWalletPayment = validateWalletPayment;\nexports.validateBasketInsertion = validateBasketInsertion;\nexports.validateInternalizeOutput = validateInternalizeOutput;\nexports.validateOriginator = validateOriginator;\nexports.validateInternalizeActionArgs = validateInternalizeActionArgs;\nexports.validateOptionalOutpointString = validateOptionalOutpointString;\nexports.validateOutpointString = validateOutpointString;\nexports.validateRelinquishOutputArgs = validateRelinquishOutputArgs;\nexports.validateRelinquishCertificateArgs = validateRelinquishCertificateArgs;\nexports.validateListCertificatesArgs = validateListCertificatesArgs;\nexports.validateAcquireCertificateArgs = validateAcquireCertificateArgs;\nexports.validateAcquireIssuanceCertificateArgs = validateAcquireIssuanceCertificateArgs;\nexports.validateAcquireDirectCertificateArgs = validateAcquireDirectCertificateArgs;\nexports.validateProveCertificateArgs = validateProveCertificateArgs;\nexports.validateDiscoverByIdentityKeyArgs = validateDiscoverByIdentityKeyArgs;\nexports.validateDiscoverByAttributesArgs = validateDiscoverByAttributesArgs;\nexports.validateListOutputsArgs = validateListOutputsArgs;\nexports.validateListActionsArgs = validateListActionsArgs;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ./WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nfunction parseWalletOutpoint(outpoint) {\n const [txid, vout] = outpoint.split('.');\n return { txid, vout: Number(vout) };\n}\nfunction defaultTrue(v) {\n return v !== null && v !== void 0 ? v : true;\n}\nfunction defaultFalse(v) {\n return v !== null && v !== void 0 ? v : false;\n}\nfunction defaultZero(v) {\n return v !== null && v !== void 0 ? v : 0;\n}\nfunction default0xffffffff(v) {\n return v !== null && v !== void 0 ? v : 0xffffffff;\n}\nfunction defaultOne(v) {\n return v !== null && v !== void 0 ? v : 1;\n}\nfunction defaultEmpty(v) {\n return v !== null && v !== void 0 ? v : [];\n}\nfunction validateOptionalStringLength(s, name, min, max) {\n if (s === undefined)\n return undefined;\n return validateStringLength(s, name, min, max);\n}\nfunction validateSatoshis(v, name, min) {\n if (v === undefined || !Number.isInteger(v) || v < 0 || v > 21e14)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, 'a valid number of satoshis');\n if (min !== undefined && v < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `at least ${min} satoshis.`);\n return v;\n}\nfunction validateOptionalInteger(v, name, min, max) {\n if (v === undefined)\n return undefined;\n return validateInteger(v, name, undefined, min, max);\n}\nfunction validateInteger(v, name, defaultValue, min, max) {\n if (v === undefined) {\n if (defaultValue !== undefined)\n return defaultValue;\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, 'a valid integer');\n }\n if (!Number.isInteger(v))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, 'an integer');\n v = Number(v);\n if (min !== undefined && v < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && v > max)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return v;\n}\nfunction validatePositiveIntegerOrZero(v, name) {\n return validateInteger(v, name, 0, 0);\n}\nfunction validateStringLength(s, name, min, max) {\n const bytes = sdk_1.Utils.toArray(s, 'utf8').length;\n if (min !== undefined && bytes < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && bytes > max)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction validateOptionalBasket(s) {\n if (s === undefined)\n return undefined;\n return validateBasket(s);\n}\nfunction validateBasket(s) {\n return validateIdentifier(s, 'basket', 1, 300);\n}\nfunction validateLabel(s) {\n return validateIdentifier(s, 'label', 1, 300);\n}\nfunction validateTag(s) {\n return validateIdentifier(s, 'tag', 1, 300);\n}\nfunction validateIdentifier(s, name, min, max) {\n s = s.trim().toLowerCase();\n const bytes = sdk_1.Utils.toArray(s, 'utf8').length;\n if (min !== undefined && bytes < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && bytes > max)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction validateOptionalBase64String(s, name, min, max) {\n if (s === undefined)\n return undefined;\n return validateBase64String(s, name, min, max);\n}\nfunction validateBase64String(s, name, min, max) {\n // Remove any whitespace and check if the string length is valid for Base64\n s = s.trim();\n const base64Regex = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/;\n const paddingMatch = /=+$/.exec(s);\n const paddingCount = paddingMatch ? paddingMatch[0].length : 0;\n if (paddingCount > 2 || (s.length % 4 !== 0 && paddingCount !== 0) || !base64Regex.test(s)) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `balid base64 string`);\n }\n const bytes = sdk_1.Utils.toArray(s, 'base64').length;\n if (min !== undefined && bytes < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && bytes > max)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction validateOptionalHexString(s, name, min, max) {\n if (s === undefined)\n return undefined;\n return validateHexString(s, name, min, max);\n}\n/**\n * @param s\n * @param name\n * @param min if valid, string length minimum (not bytes)\n * @param max if valid, string length maximum (not bytes)\n * @returns\n */\nfunction validateHexString(s, name, min, max) {\n s = s.trim().toLowerCase();\n if (s.length % 2 === 1)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `even length, not ${s.length}.`);\n const hexRegex = /^[0-9A-Fa-f]+$/;\n if (!hexRegex.test(s))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `hexadecimal string.`);\n if (min !== undefined && s.length < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && s.length > max)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction isHexString(s) {\n s = s.trim();\n if (s.length % 2 === 1)\n return false;\n const hexRegex = /^[0-9A-Fa-f]+$/;\n if (!hexRegex.test(s))\n return false;\n return true;\n}\nfunction validateCreateActionInput(i) {\n var _a;\n if (i.unlockingScript === undefined && i.unlockingScriptLength === undefined)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('unlockingScript, unlockingScriptLength', `at least one valid value.`);\n const unlockingScript = validateOptionalHexString(i.unlockingScript, 'unlockingScript');\n const unlockingScriptLength = (_a = i.unlockingScriptLength) !== null && _a !== void 0 ? _a : unlockingScript.length / 2;\n if (unlockingScript && unlockingScriptLength !== unlockingScript.length / 2)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('unlockingScriptLength', `length unlockingScript if both valid.`);\n const vi = {\n outpoint: parseWalletOutpoint(i.outpoint),\n inputDescription: validateStringLength(i.inputDescription, 'inputDescription', 5, 2000),\n unlockingScript,\n unlockingScriptLength,\n sequenceNumber: default0xffffffff(i.sequenceNumber)\n };\n return vi;\n}\nfunction validateCreateActionOutput(o) {\n const vo = {\n lockingScript: validateHexString(o.lockingScript, 'lockingScript'),\n satoshis: validateSatoshis(o.satoshis, 'satoshis'),\n outputDescription: validateStringLength(o.outputDescription, 'outputDescription', 5, 2000),\n basket: validateOptionalBasket(o.basket),\n customInstructions: o.customInstructions,\n tags: defaultEmpty(o.tags).map(t => validateTag(t))\n };\n return vo;\n}\n/**\n * Set all default true/false booleans to true or false if undefined.\n * Set all possibly undefined numbers to their default values.\n * Set all possibly undefined arrays to empty arrays.\n * Convert string outpoints to `{ txid: string, vout: number }`\n */\nfunction validateCreateActionOptions(options) {\n const o = options || {};\n const vo = {\n signAndProcess: defaultTrue(o.signAndProcess),\n acceptDelayedBroadcast: defaultTrue(o.acceptDelayedBroadcast),\n knownTxids: defaultEmpty(o.knownTxids),\n returnTXIDOnly: defaultFalse(o.returnTXIDOnly),\n noSend: defaultFalse(o.noSend),\n noSendChange: defaultEmpty(o.noSendChange).map(nsc => parseWalletOutpoint(nsc)),\n sendWith: defaultEmpty(o.sendWith),\n randomizeOutputs: defaultTrue(o.randomizeOutputs)\n };\n return vo;\n}\nfunction validateCreateActionArgs(args) {\n var _a;\n const vargs = {\n description: validateStringLength(args.description, 'description', 5, 2000),\n inputBEEF: args.inputBEEF,\n inputs: defaultEmpty(args.inputs).map(i => validateCreateActionInput(i)),\n outputs: defaultEmpty(args.outputs).map(o => validateCreateActionOutput(o)),\n lockTime: defaultZero(args.lockTime),\n version: defaultOne(args.version),\n labels: defaultEmpty((_a = args.labels) === null || _a === void 0 ? void 0 : _a.map(l => validateLabel(l))),\n options: validateCreateActionOptions(args.options),\n isSendWith: false,\n isDelayed: false,\n isNoSend: false,\n isNewTx: false,\n isRemixChange: false,\n isSignAction: false,\n randomVals: undefined,\n includeAllSourceTransactions: false\n };\n vargs.isSendWith = vargs.options.sendWith.length > 0;\n vargs.isRemixChange = !vargs.isSendWith && vargs.inputs.length === 0 && vargs.outputs.length === 0;\n vargs.isNewTx = vargs.isRemixChange || vargs.inputs.length > 0 || vargs.outputs.length > 0;\n vargs.isSignAction =\n vargs.isNewTx && (vargs.options.signAndProcess === false || vargs.inputs.some(i => i.unlockingScript === undefined));\n vargs.isDelayed = vargs.options.acceptDelayedBroadcast;\n vargs.isNoSend = vargs.options.noSend;\n return vargs;\n}\n/**\n * Set all default true/false booleans to true or false if undefined.\n * Set all possibly undefined numbers to their default values.\n * Set all possibly undefined arrays to empty arrays.\n * Convert string outpoints to `{ txid: string, vout: number }`\n */\nfunction validateSignActionOptions(options) {\n const o = options || {};\n const vo = {\n acceptDelayedBroadcast: defaultTrue(o.acceptDelayedBroadcast),\n returnTXIDOnly: defaultFalse(o.returnTXIDOnly),\n noSend: defaultFalse(o.noSend),\n sendWith: defaultEmpty(o.sendWith)\n };\n return vo;\n}\nfunction validateSignActionArgs(args) {\n const vargs = {\n spends: args.spends,\n reference: args.reference,\n options: validateSignActionOptions(args.options),\n isSendWith: false,\n isDelayed: false,\n isNoSend: false,\n isNewTx: true,\n isRemixChange: false\n };\n vargs.isSendWith = vargs.options.sendWith.length > 0;\n vargs.isDelayed = vargs.options.acceptDelayedBroadcast;\n vargs.isNoSend = vargs.options.noSend;\n return vargs;\n}\nfunction validateAbortActionArgs(args) {\n const vargs = {\n reference: validateBase64String(args.reference, 'reference')\n };\n return vargs;\n}\nfunction validateWalletPayment(args) {\n if (args === undefined)\n return undefined;\n const v = {\n derivationPrefix: validateBase64String(args.derivationPrefix, 'derivationPrefix'),\n derivationSuffix: validateBase64String(args.derivationSuffix, 'derivationSuffix'),\n senderIdentityKey: validateHexString(args.senderIdentityKey, 'senderIdentityKey')\n };\n return v;\n}\nfunction validateBasketInsertion(args) {\n if (args === undefined)\n return undefined;\n const v = {\n basket: validateBasket(args.basket),\n customInstructions: validateOptionalStringLength(args.customInstructions, 'customInstructions', 0, 1000), // TODO: real max??\n tags: defaultEmpty(args.tags).map(t => validateTag(t))\n };\n return v;\n}\nfunction validateInternalizeOutput(args) {\n if (args.protocol !== 'basket insertion' && args.protocol !== 'wallet payment')\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('protocol', `'basket insertion' or 'wallet payment'`);\n const v = {\n outputIndex: validatePositiveIntegerOrZero(args.outputIndex, 'outputIndex'),\n protocol: args.protocol,\n paymentRemittance: validateWalletPayment(args.paymentRemittance),\n insertionRemittance: validateBasketInsertion(args.insertionRemittance)\n };\n return v;\n}\nfunction validateOriginator(s) {\n if (s === undefined)\n return undefined;\n s = s.trim().toLowerCase();\n validateStringLength(s, 'originator', 1, 250);\n const sps = s.split('.');\n for (const sp of sps) {\n validateStringLength(sp, 'originator part', 1, 63);\n }\n}\nfunction validateInternalizeActionArgs(args) {\n const vargs = {\n tx: args.tx,\n outputs: args.outputs.map(o => validateInternalizeOutput(o)),\n description: validateStringLength(args.description, 'description', 5, 2000),\n labels: (args.labels || []).map(t => validateLabel(t)),\n seekPermission: defaultTrue(args.seekPermission)\n };\n try {\n const beef = sdk_1.Beef.fromBinary(vargs.tx);\n if (beef.txs.length < 1)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', `at least one transaction to internalize an output from`);\n }\n catch (_a) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', `valid with at least one transaction to internalize an output from`);\n }\n if (vargs.outputs.length < 1)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('outputs', `at least one output to internalize from the transaction`);\n return vargs;\n}\nfunction validateOptionalOutpointString(outpoint, name) {\n if (outpoint === undefined)\n return undefined;\n return validateOutpointString(outpoint, name);\n}\nfunction validateOutpointString(outpoint, name) {\n const s = outpoint.split('.');\n if (s.length !== 2 || !Number.isInteger(Number(s[1])))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(name, `txid as hex string and numeric output index joined with '.'`);\n const txid = validateHexString(s[0], `${name} txid`, undefined, 64);\n const vout = validatePositiveIntegerOrZero(Number(s[1]), `${name} vout`);\n return `${txid}.${vout}`;\n}\nfunction validateRelinquishOutputArgs(args) {\n const vargs = {\n basket: validateBasket(args.basket),\n output: validateOutpointString(args.output, 'output')\n };\n return vargs;\n}\nfunction validateRelinquishCertificateArgs(args) {\n const vargs = {\n type: validateBase64String(args.type, 'type'),\n serialNumber: validateBase64String(args.serialNumber, 'serialNumber'),\n certifier: validateHexString(args.certifier, 'certifier')\n };\n return vargs;\n}\nfunction validateListCertificatesArgs(args) {\n const vargs = {\n certifiers: defaultEmpty(args.certifiers.map(c => validateHexString(c.trim(), 'certifiers'))),\n types: defaultEmpty(args.types.map(t => validateBase64String(t.trim(), 'types'))),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validatePositiveIntegerOrZero(defaultZero(args.offset), 'offset'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50),\n partial: undefined\n };\n return vargs;\n}\nfunction validateCertificateFields(fields) {\n for (const fieldName of Object.keys(fields)) {\n validateStringLength(fieldName, 'field name', 1, 50);\n }\n return fields;\n}\nfunction validateKeyringRevealer(kr, name) {\n if (kr === 'certifier')\n return kr;\n return validateHexString(kr, name);\n}\nfunction validateOptionalKeyringRevealer(kr, name) {\n if (kr === undefined)\n return undefined;\n return validateKeyringRevealer(kr, name);\n}\nfunction validateKeyringForSubject(kr, name) {\n for (const fn of Object.keys(kr)) {\n validateStringLength(fn, `${name} field name`, 1, 50);\n validateBase64String(kr[fn], `${name} field value`);\n }\n return kr;\n}\nfunction validateOptionalKeyringForSubject(kr, name) {\n if (kr === undefined)\n return undefined;\n return validateKeyringForSubject(kr, name);\n}\n/**\n *\n * @param args\n * @param subject Must be valid for \"direct\" `acquisitionProtocol`. public key of the certificate subject.\n * @returns\n */\nasync function validateAcquireCertificateArgs(args) {\n const vargs = {\n acquisitionProtocol: args.acquisitionProtocol,\n type: validateBase64String(args.type, 'type'),\n serialNumber: validateOptionalBase64String(args.serialNumber, 'serialNumber'),\n certifier: validateHexString(args.certifier, 'certifier'),\n revocationOutpoint: validateOptionalOutpointString(args.revocationOutpoint, 'revocationOutpoint'),\n fields: validateCertificateFields(args.fields),\n signature: validateOptionalHexString(args.signature, 'signature'),\n certifierUrl: args.certifierUrl,\n keyringRevealer: validateOptionalKeyringRevealer(args.keyringRevealer, 'keyringRevealer'),\n keyringForSubject: validateOptionalKeyringForSubject(args.keyringForSubject, 'keyringForSubject'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50)\n };\n if (vargs.privileged && !vargs.privilegedReason)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n if (vargs.acquisitionProtocol === 'direct') {\n if (!vargs.serialNumber)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('serialNumber', 'valid when acquisitionProtocol is \"direct\"');\n if (!vargs.signature)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('signature', 'valid when acquisitionProtocol is \"direct\"');\n if (!vargs.revocationOutpoint)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('revocationOutpoint', 'valid when acquisitionProtocol is \"direct\"');\n }\n return vargs;\n}\nfunction validateAcquireIssuanceCertificateArgs(args) {\n if (args.acquisitionProtocol !== 'issuance')\n throw new WERR_errors_1.WERR_INTERNAL('Only acquire certificate via issuance requests allowed here.');\n if (args.serialNumber)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('serialNumber', 'valid when acquisitionProtocol is \"direct\"');\n if (args.signature)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('signature', 'valid when acquisitionProtocol is \"direct\"');\n if (args.revocationOutpoint)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('revocationOutpoint', 'valid when acquisitionProtocol is \"direct\"');\n if (args.keyringRevealer)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('keyringRevealer', 'valid when acquisitionProtocol is \"direct\"');\n if (args.keyringForSubject)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('keyringForSubject', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.certifierUrl)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('certifierUrl', 'valid when acquisitionProtocol is \"issuance\"');\n if (args.privileged && !args.privilegedReason)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n const vargs = {\n type: validateBase64String(args.type, 'type'),\n certifier: validateHexString(args.certifier, 'certifier'),\n certifierUrl: args.certifierUrl,\n fields: validateCertificateFields(args.fields),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50),\n subject: ''\n };\n return vargs;\n}\nfunction validateAcquireDirectCertificateArgs(args) {\n if (args.acquisitionProtocol !== 'direct')\n throw new WERR_errors_1.WERR_INTERNAL('Only acquire direct certificate requests allowed here.');\n if (!args.serialNumber)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('serialNumber', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.signature)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('signature', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.revocationOutpoint)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('revocationOutpoint', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.keyringRevealer)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('keyringRevealer', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.keyringForSubject)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('keyringForSubject', 'valid when acquisitionProtocol is \"direct\"');\n if (args.privileged && !args.privilegedReason)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n const vargs = {\n type: validateBase64String(args.type, 'type'),\n serialNumber: validateBase64String(args.serialNumber, 'serialNumber'),\n certifier: validateHexString(args.certifier, 'certifier'),\n revocationOutpoint: validateOutpointString(args.revocationOutpoint, 'revocationOutpoint'),\n fields: validateCertificateFields(args.fields),\n signature: validateHexString(args.signature, 'signature'),\n keyringRevealer: validateKeyringRevealer(args.keyringRevealer, 'keyringRevealer'),\n keyringForSubject: validateKeyringForSubject(args.keyringForSubject, 'keyringForSubject'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50),\n subject: ''\n };\n return vargs;\n}\nfunction validateProveCertificateArgs(args) {\n if (args.privileged && !args.privilegedReason)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n const vargs = {\n type: validateOptionalBase64String(args.certificate.type, 'certificate.type'),\n serialNumber: validateOptionalBase64String(args.certificate.serialNumber, 'certificate.serialNumber'),\n certifier: validateOptionalHexString(args.certificate.certifier, 'certificate.certifier'),\n subject: validateOptionalHexString(args.certificate.subject, 'certificate.subject'),\n revocationOutpoint: validateOptionalOutpointString(args.certificate.revocationOutpoint, 'certificate.revocationOutpoint'),\n signature: validateOptionalHexString(args.certificate.signature, 'certificate.signature'),\n fieldsToReveal: defaultEmpty(args.fieldsToReveal).map(fieldName => validateStringLength(fieldName, `fieldsToReveal ${fieldName}`, 1, 50)),\n verifier: validateHexString(args.verifier, 'verifier'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50)\n };\n return vargs;\n}\nfunction validateDiscoverByIdentityKeyArgs(args) {\n const vargs = {\n identityKey: validateHexString(args.identityKey, 'identityKey', 66, 66),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validatePositiveIntegerOrZero(defaultZero(args.offset), 'offset'),\n seekPermission: defaultFalse(args.seekPermission)\n };\n return vargs;\n}\nfunction validateAttributes(attributes) {\n for (const fieldName of Object.keys(attributes)) {\n validateStringLength(fieldName, `field name ${fieldName}`, 1, 50);\n }\n return attributes;\n}\nfunction validateDiscoverByAttributesArgs(args) {\n const vargs = {\n attributes: validateAttributes(args.attributes),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validatePositiveIntegerOrZero(defaultZero(args.offset), 'offset'),\n seekPermission: defaultFalse(args.seekPermission)\n };\n return vargs;\n}\n/**\n * @param {BasketStringUnder300Bytes} args.basket - Required. The associated basket name whose outputs should be listed.\n * @param {OutputTagStringUnder300Bytes[]} [args.tags] - Optional. Filter outputs based on these tags.\n * @param {'all' | 'any'} [args.tagQueryMode] - Optional. Filter mode, defining whether all or any of the tags must match. By default, any tag can match.\n * @param {'locking scripts' | 'entire transactions'} [args.include] - Optional. Whether to include locking scripts (with each output) or entire transactions (as aggregated BEEF, at the top level) in the result. By default, unless specified, neither are returned.\n * @param {BooleanDefaultFalse} [args.includeEntireTransactions] - Optional. Whether to include the entire transaction(s) in the result.\n * @param {BooleanDefaultFalse} [args.includeCustomInstructions] - Optional. Whether custom instructions should be returned in the result.\n * @param {BooleanDefaultFalse} [args.includeTags] - Optional. Whether the tags associated with the output should be returned.\n * @param {BooleanDefaultFalse} [args.includeLabels] - Optional. Whether the labels associated with the transaction containing the output should be returned.\n * @param {PositiveIntegerDefault10Max10000} [args.limit] - Optional limit on the number of outputs to return.\n * @param {number} [args.offset] - If positive or zero: Number of outputs to skip before starting to return results, oldest first.\n * If negative: Outputs are returned newest first and offset of -1 is the newest output.\n * When using negative offsets, caution is required as new outputs may be added between calls,\n * potentially causing outputs to be duplicated across calls.\n * @param {BooleanDefaultTrue} [args.seekPermission] — Optional. Whether to seek permission from the user for this operation if required. Default true, will return an error rather than proceed if set to false.\n */\nfunction validateListOutputsArgs(args) {\n let tagQueryMode;\n if (args.tagQueryMode === undefined || args.tagQueryMode === 'any')\n tagQueryMode = 'any';\n else if (args.tagQueryMode === 'all')\n tagQueryMode = 'all';\n else\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tagQueryMode', `undefined, 'any', or 'all'`);\n const vargs = {\n basket: validateStringLength(args.basket, 'basket', 1, 300),\n tags: (args.tags || []).map(t => validateStringLength(t, 'tag', 1, 300)),\n tagQueryMode,\n includeLockingScripts: args.include === 'locking scripts',\n includeTransactions: args.include === 'entire transactions',\n includeCustomInstructions: defaultFalse(args.includeCustomInstructions),\n includeTags: defaultFalse(args.includeTags),\n includeLabels: defaultFalse(args.includeLabels),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validateInteger(args.offset, 'offset', 0, undefined, undefined),\n seekPermission: defaultTrue(args.seekPermission),\n knownTxids: []\n };\n return vargs;\n}\n/**\n * @param {LabelStringUnder300Bytes[]} args.labels - An array of labels used to filter actions.\n * @param {'any' | 'all'} [args.labelQueryMode] - Optional. Specifies how to match labels (default is any which matches any of the labels).\n * @param {BooleanDefaultFalse} [args.includeLabels] - Optional. Whether to include transaction labels in the result set.\n * @param {BooleanDefaultFalse} [args.includeInputs] - Optional. Whether to include input details in the result set.\n * @param {BooleanDefaultFalse} [args.includeInputSourceLockingScripts] - Optional. Whether to include input source locking scripts in the result set.\n * @param {BooleanDefaultFalse} [args.includeInputUnlockingScripts] - Optional. Whether to include input unlocking scripts in the result set.\n * @param {BooleanDefaultFalse} [args.includeOutputs] - Optional. Whether to include output details in the result set.\n * @param {BooleanDefaultFalse} [args.includeOutputLockingScripts] - Optional. Whether to include output locking scripts in the result set.\n * @param {PositiveIntegerDefault10Max10000} [args.limit] - Optional. The maximum number of transactions to retrieve.\n * @param {PositiveIntegerOrZero} [args.offset] - Optional. Number of transactions to skip before starting to return the results.\n * @param {BooleanDefaultTrue} [args.seekPermission] — Optional. Whether to seek permission from the user for this operation if required. Default true, will return an error rather than proceed if set to false.\n */\nfunction validateListActionsArgs(args) {\n let labelQueryMode;\n if (args.labelQueryMode === undefined || args.labelQueryMode === 'any')\n labelQueryMode = 'any';\n else if (args.labelQueryMode === 'all')\n labelQueryMode = 'all';\n else\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('labelQueryMode', `undefined, 'any', or 'all'`);\n const vargs = {\n labels: (args.labels || []).map(t => validateLabel(t)),\n labelQueryMode,\n includeLabels: defaultFalse(args.includeLabels),\n includeInputs: defaultFalse(args.includeInputs),\n includeInputSourceLockingScripts: defaultFalse(args.includeInputSourceLockingScripts),\n includeInputUnlockingScripts: defaultFalse(args.includeInputUnlockingScripts),\n includeOutputs: defaultFalse(args.includeOutputs),\n includeOutputLockingScripts: defaultFalse(args.includeOutputLockingScripts),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validateInteger(args.offset, 'offset', 0, 0),\n seekPermission: defaultTrue(args.seekPermission)\n };\n return vargs;\n}\n//# sourceMappingURL=validationHelpers.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.parseWalletOutpoint = parseWalletOutpoint;\nexports.validateSatoshis = validateSatoshis;\nexports.validateOptionalInteger = validateOptionalInteger;\nexports.validateInteger = validateInteger;\nexports.validatePositiveIntegerOrZero = validatePositiveIntegerOrZero;\nexports.validateStringLength = validateStringLength;\nexports.isHexString = isHexString;\nexports.validateCreateActionInput = validateCreateActionInput;\nexports.validateCreateActionOutput = validateCreateActionOutput;\nexports.validateCreateActionOptions = validateCreateActionOptions;\nexports.validateCreateActionArgs = validateCreateActionArgs;\nexports.validateSignActionOptions = validateSignActionOptions;\nexports.validateSignActionArgs = validateSignActionArgs;\nexports.validateAbortActionArgs = validateAbortActionArgs;\nexports.validateWalletPayment = validateWalletPayment;\nexports.validateBasketInsertion = validateBasketInsertion;\nexports.validateInternalizeOutput = validateInternalizeOutput;\nexports.validateOriginator = validateOriginator;\nexports.validateInternalizeActionArgs = validateInternalizeActionArgs;\nexports.validateOptionalOutpointString = validateOptionalOutpointString;\nexports.validateOutpointString = validateOutpointString;\nexports.validateRelinquishOutputArgs = validateRelinquishOutputArgs;\nexports.validateRelinquishCertificateArgs = validateRelinquishCertificateArgs;\nexports.validateListCertificatesArgs = validateListCertificatesArgs;\nexports.validateAcquireCertificateArgs = validateAcquireCertificateArgs;\nexports.validateAcquireIssuanceCertificateArgs = validateAcquireIssuanceCertificateArgs;\nexports.validateAcquireDirectCertificateArgs = validateAcquireDirectCertificateArgs;\nexports.validateProveCertificateArgs = validateProveCertificateArgs;\nexports.validateDiscoverByIdentityKeyArgs = validateDiscoverByIdentityKeyArgs;\nexports.validateDiscoverByAttributesArgs = validateDiscoverByAttributesArgs;\nexports.validateListOutputsArgs = validateListOutputsArgs;\nexports.validateListActionsArgs = validateListActionsArgs;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nfunction parseWalletOutpoint(outpoint) {\n const [txid, vout] = outpoint.split('.');\n return { txid, vout: Number(vout) };\n}\nfunction defaultTrue(v) {\n return v !== null && v !== void 0 ? v : true;\n}\nfunction defaultFalse(v) {\n return v !== null && v !== void 0 ? v : false;\n}\nfunction defaultZero(v) {\n return v !== null && v !== void 0 ? v : 0;\n}\nfunction default0xffffffff(v) {\n return v !== null && v !== void 0 ? v : 0xffffffff;\n}\nfunction defaultOne(v) {\n return v !== null && v !== void 0 ? v : 1;\n}\nfunction defaultEmpty(v) {\n return v !== null && v !== void 0 ? v : [];\n}\nfunction validateOptionalStringLength(s, name, min, max) {\n if (s === undefined)\n return undefined;\n return validateStringLength(s, name, min, max);\n}\nfunction validateSatoshis(v, name, min) {\n if (v === undefined || !Number.isInteger(v) || v < 0 || v > 21e14)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, 'a valid number of satoshis');\n if (min !== undefined && v < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `at least ${min} satoshis.`);\n return v;\n}\nfunction validateOptionalInteger(v, name, min, max) {\n if (v === undefined)\n return undefined;\n return validateInteger(v, name, undefined, min, max);\n}\nfunction validateInteger(v, name, defaultValue, min, max) {\n if (v === undefined) {\n if (defaultValue !== undefined)\n return defaultValue;\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, 'a valid integer');\n }\n if (!Number.isInteger(v))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, 'an integer');\n v = Number(v);\n if (min !== undefined && v < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && v > max)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return v;\n}\nfunction validatePositiveIntegerOrZero(v, name) {\n return validateInteger(v, name, 0, 0);\n}\nfunction validateStringLength(s, name, min, max) {\n const bytes = sdk_1.Utils.toArray(s, 'utf8').length;\n if (min !== undefined && bytes < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && bytes > max)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction validateOptionalBasket(s) {\n if (s === undefined)\n return undefined;\n return validateBasket(s);\n}\nfunction validateBasket(s) {\n return validateIdentifier(s, 'basket', 1, 300);\n}\nfunction validateLabel(s) {\n return validateIdentifier(s, 'label', 1, 300);\n}\nfunction validateTag(s) {\n return validateIdentifier(s, 'tag', 1, 300);\n}\nfunction validateIdentifier(s, name, min, max) {\n s = s.trim().toLowerCase();\n const bytes = sdk_1.Utils.toArray(s, 'utf8').length;\n if (min !== undefined && bytes < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && bytes > max)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction validateOptionalBase64String(s, name, min, max) {\n if (s === undefined)\n return undefined;\n return validateBase64String(s, name, min, max);\n}\nfunction validateBase64String(s, name, min, max) {\n // Remove any whitespace and check if the string length is valid for Base64\n s = s.trim();\n const base64Regex = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/;\n const paddingMatch = /=+$/.exec(s);\n const paddingCount = paddingMatch ? paddingMatch[0].length : 0;\n if (paddingCount > 2 || (s.length % 4 !== 0 && paddingCount !== 0) || !base64Regex.test(s)) {\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `balid base64 string`);\n }\n const bytes = sdk_1.Utils.toArray(s, 'base64').length;\n if (min !== undefined && bytes < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && bytes > max)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction validateOptionalHexString(s, name, min, max) {\n if (s === undefined)\n return undefined;\n return validateHexString(s, name, min, max);\n}\n/**\n * @param s\n * @param name\n * @param min if valid, string length minimum (not bytes)\n * @param max if valid, string length maximum (not bytes)\n * @returns\n */\nfunction validateHexString(s, name, min, max) {\n s = s.trim().toLowerCase();\n if (s.length % 2 === 1)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `even length, not ${s.length}.`);\n const hexRegex = /^[0-9A-Fa-f]+$/;\n if (!hexRegex.test(s))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `hexadecimal string.`);\n if (min !== undefined && s.length < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `at least ${min} length.`);\n if (max !== undefined && s.length > max)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `no more than ${max} length.`);\n return s;\n}\nfunction isHexString(s) {\n s = s.trim();\n if (s.length % 2 === 1)\n return false;\n const hexRegex = /^[0-9A-Fa-f]+$/;\n if (!hexRegex.test(s))\n return false;\n return true;\n}\nfunction validateCreateActionInput(i) {\n var _a;\n if (i.unlockingScript === undefined && i.unlockingScriptLength === undefined)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('unlockingScript, unlockingScriptLength', `at least one valid value.`);\n const unlockingScript = validateOptionalHexString(i.unlockingScript, 'unlockingScript');\n const unlockingScriptLength = (_a = i.unlockingScriptLength) !== null && _a !== void 0 ? _a : unlockingScript.length / 2;\n if (unlockingScript && unlockingScriptLength !== unlockingScript.length / 2)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('unlockingScriptLength', `length unlockingScript if both valid.`);\n const vi = {\n outpoint: parseWalletOutpoint(i.outpoint),\n inputDescription: validateStringLength(i.inputDescription, 'inputDescription', 5, 2000),\n unlockingScript,\n unlockingScriptLength,\n sequenceNumber: default0xffffffff(i.sequenceNumber)\n };\n return vi;\n}\nfunction validateCreateActionOutput(o) {\n const vo = {\n lockingScript: validateHexString(o.lockingScript, 'lockingScript'),\n satoshis: validateSatoshis(o.satoshis, 'satoshis'),\n outputDescription: validateStringLength(o.outputDescription, 'outputDescription', 5, 2000),\n basket: validateOptionalBasket(o.basket),\n customInstructions: o.customInstructions,\n tags: defaultEmpty(o.tags).map(t => validateTag(t))\n };\n return vo;\n}\n/**\n * Set all default true/false booleans to true or false if undefined.\n * Set all possibly undefined numbers to their default values.\n * Set all possibly undefined arrays to empty arrays.\n * Convert string outpoints to `{ txid: string, vout: number }`\n */\nfunction validateCreateActionOptions(options) {\n const o = options || {};\n const vo = {\n signAndProcess: defaultTrue(o.signAndProcess),\n acceptDelayedBroadcast: defaultTrue(o.acceptDelayedBroadcast),\n knownTxids: defaultEmpty(o.knownTxids),\n returnTXIDOnly: defaultFalse(o.returnTXIDOnly),\n noSend: defaultFalse(o.noSend),\n noSendChange: defaultEmpty(o.noSendChange).map(nsc => parseWalletOutpoint(nsc)),\n sendWith: defaultEmpty(o.sendWith),\n randomizeOutputs: defaultTrue(o.randomizeOutputs)\n };\n return vo;\n}\nfunction validateCreateActionArgs(args) {\n var _a;\n const vargs = {\n description: validateStringLength(args.description, 'description', 5, 2000),\n inputBEEF: args.inputBEEF,\n inputs: defaultEmpty(args.inputs).map(i => validateCreateActionInput(i)),\n outputs: defaultEmpty(args.outputs).map(o => validateCreateActionOutput(o)),\n lockTime: defaultZero(args.lockTime),\n version: defaultOne(args.version),\n labels: defaultEmpty((_a = args.labels) === null || _a === void 0 ? void 0 : _a.map(l => validateLabel(l))),\n options: validateCreateActionOptions(args.options),\n isSendWith: false,\n isDelayed: false,\n isNoSend: false,\n isNewTx: false,\n isRemixChange: false,\n isSignAction: false,\n randomVals: undefined,\n includeAllSourceTransactions: false\n };\n vargs.isSendWith = vargs.options.sendWith.length > 0;\n vargs.isRemixChange = !vargs.isSendWith && vargs.inputs.length === 0 && vargs.outputs.length === 0;\n vargs.isNewTx = vargs.isRemixChange || vargs.inputs.length > 0 || vargs.outputs.length > 0;\n vargs.isSignAction =\n vargs.isNewTx && (vargs.options.signAndProcess === false || vargs.inputs.some(i => i.unlockingScript === undefined));\n vargs.isDelayed = vargs.options.acceptDelayedBroadcast;\n vargs.isNoSend = vargs.options.noSend;\n return vargs;\n}\n/**\n * Set all default true/false booleans to true or false if undefined.\n * Set all possibly undefined numbers to their default values.\n * Set all possibly undefined arrays to empty arrays.\n * Convert string outpoints to `{ txid: string, vout: number }`\n */\nfunction validateSignActionOptions(options) {\n const o = options || {};\n const vo = {\n acceptDelayedBroadcast: defaultTrue(o.acceptDelayedBroadcast),\n returnTXIDOnly: defaultFalse(o.returnTXIDOnly),\n noSend: defaultFalse(o.noSend),\n sendWith: defaultEmpty(o.sendWith)\n };\n return vo;\n}\nfunction validateSignActionArgs(args) {\n const vargs = {\n spends: args.spends,\n reference: args.reference,\n options: validateSignActionOptions(args.options),\n isSendWith: false,\n isDelayed: false,\n isNoSend: false,\n isNewTx: true,\n isRemixChange: false\n };\n vargs.isSendWith = vargs.options.sendWith.length > 0;\n vargs.isDelayed = vargs.options.acceptDelayedBroadcast;\n vargs.isNoSend = vargs.options.noSend;\n return vargs;\n}\nfunction validateAbortActionArgs(args) {\n const vargs = {\n reference: validateBase64String(args.reference, 'reference')\n };\n return vargs;\n}\nfunction validateWalletPayment(args) {\n if (args === undefined)\n return undefined;\n const v = {\n derivationPrefix: validateBase64String(args.derivationPrefix, 'derivationPrefix'),\n derivationSuffix: validateBase64String(args.derivationSuffix, 'derivationSuffix'),\n senderIdentityKey: validateHexString(args.senderIdentityKey, 'senderIdentityKey')\n };\n return v;\n}\nfunction validateBasketInsertion(args) {\n if (args === undefined)\n return undefined;\n const v = {\n basket: validateBasket(args.basket),\n customInstructions: validateOptionalStringLength(args.customInstructions, 'customInstructions', 0, 1000), // TODO: real max??\n tags: defaultEmpty(args.tags).map(t => validateTag(t))\n };\n return v;\n}\nfunction validateInternalizeOutput(args) {\n if (args.protocol !== 'basket insertion' && args.protocol !== 'wallet payment')\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('protocol', `'basket insertion' or 'wallet payment'`);\n const v = {\n outputIndex: validatePositiveIntegerOrZero(args.outputIndex, 'outputIndex'),\n protocol: args.protocol,\n paymentRemittance: validateWalletPayment(args.paymentRemittance),\n insertionRemittance: validateBasketInsertion(args.insertionRemittance)\n };\n return v;\n}\nfunction validateOriginator(s) {\n if (s === undefined)\n return undefined;\n s = s.trim().toLowerCase();\n validateStringLength(s, 'originator', 1, 250);\n const sps = s.split('.');\n for (const sp of sps) {\n validateStringLength(sp, 'originator part', 1, 63);\n }\n}\nfunction validateInternalizeActionArgs(args) {\n const vargs = {\n tx: args.tx,\n outputs: args.outputs.map(o => validateInternalizeOutput(o)),\n description: validateStringLength(args.description, 'description', 5, 2000),\n labels: (args.labels || []).map(t => validateLabel(t)),\n seekPermission: defaultTrue(args.seekPermission)\n };\n try {\n const beef = sdk_1.Beef.fromBinary(vargs.tx);\n if (beef.txs.length < 1)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', `at least one transaction to internalize an output from`);\n }\n catch (_a) {\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', `valid with at least one transaction to internalize an output from`);\n }\n if (vargs.outputs.length < 1)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('outputs', `at least one output to internalize from the transaction`);\n return vargs;\n}\nfunction validateOptionalOutpointString(outpoint, name) {\n if (outpoint === undefined)\n return undefined;\n return validateOutpointString(outpoint, name);\n}\nfunction validateOutpointString(outpoint, name) {\n const s = outpoint.split('.');\n if (s.length !== 2 || !Number.isInteger(Number(s[1])))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(name, `txid as hex string and numeric output index joined with '.'`);\n const txid = validateHexString(s[0], `${name} txid`, undefined, 64);\n const vout = validatePositiveIntegerOrZero(Number(s[1]), `${name} vout`);\n return `${txid}.${vout}`;\n}\nfunction validateRelinquishOutputArgs(args) {\n const vargs = {\n basket: validateBasket(args.basket),\n output: validateOutpointString(args.output, 'output')\n };\n return vargs;\n}\nfunction validateRelinquishCertificateArgs(args) {\n const vargs = {\n type: validateBase64String(args.type, 'type'),\n serialNumber: validateBase64String(args.serialNumber, 'serialNumber'),\n certifier: validateHexString(args.certifier, 'certifier')\n };\n return vargs;\n}\nfunction validateListCertificatesArgs(args) {\n const vargs = {\n certifiers: defaultEmpty(args.certifiers.map(c => validateHexString(c.trim(), 'certifiers'))),\n types: defaultEmpty(args.types.map(t => validateBase64String(t.trim(), 'types'))),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validatePositiveIntegerOrZero(defaultZero(args.offset), 'offset'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50),\n partial: undefined\n };\n return vargs;\n}\nfunction validateCertificateFields(fields) {\n for (const fieldName of Object.keys(fields)) {\n validateStringLength(fieldName, 'field name', 1, 50);\n }\n return fields;\n}\nfunction validateKeyringRevealer(kr, name) {\n if (kr === 'certifier')\n return kr;\n return validateHexString(kr, name);\n}\nfunction validateOptionalKeyringRevealer(kr, name) {\n if (kr === undefined)\n return undefined;\n return validateKeyringRevealer(kr, name);\n}\nfunction validateKeyringForSubject(kr, name) {\n for (const fn of Object.keys(kr)) {\n validateStringLength(fn, `${name} field name`, 1, 50);\n validateBase64String(kr[fn], `${name} field value`);\n }\n return kr;\n}\nfunction validateOptionalKeyringForSubject(kr, name) {\n if (kr === undefined)\n return undefined;\n return validateKeyringForSubject(kr, name);\n}\n/**\n *\n * @param args\n * @param subject Must be valid for \"direct\" `acquisitionProtocol`. public key of the certificate subject.\n * @returns\n */\nasync function validateAcquireCertificateArgs(args) {\n const vargs = {\n acquisitionProtocol: args.acquisitionProtocol,\n type: validateBase64String(args.type, 'type'),\n serialNumber: validateOptionalBase64String(args.serialNumber, 'serialNumber'),\n certifier: validateHexString(args.certifier, 'certifier'),\n revocationOutpoint: validateOptionalOutpointString(args.revocationOutpoint, 'revocationOutpoint'),\n fields: validateCertificateFields(args.fields),\n signature: validateOptionalHexString(args.signature, 'signature'),\n certifierUrl: args.certifierUrl,\n keyringRevealer: validateOptionalKeyringRevealer(args.keyringRevealer, 'keyringRevealer'),\n keyringForSubject: validateOptionalKeyringForSubject(args.keyringForSubject, 'keyringForSubject'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50)\n };\n if (vargs.privileged && !vargs.privilegedReason)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n if (vargs.acquisitionProtocol === 'direct') {\n if (!vargs.serialNumber)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('serialNumber', 'valid when acquisitionProtocol is \"direct\"');\n if (!vargs.signature)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('signature', 'valid when acquisitionProtocol is \"direct\"');\n if (!vargs.revocationOutpoint)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('revocationOutpoint', 'valid when acquisitionProtocol is \"direct\"');\n }\n return vargs;\n}\nfunction validateAcquireIssuanceCertificateArgs(args) {\n if (args.acquisitionProtocol !== 'issuance')\n throw new index_client_1.sdk.WERR_INTERNAL('Only acquire certificate via issuance requests allowed here.');\n if (args.serialNumber)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('serialNumber', 'valid when acquisitionProtocol is \"direct\"');\n if (args.signature)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('signature', 'valid when acquisitionProtocol is \"direct\"');\n if (args.revocationOutpoint)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('revocationOutpoint', 'valid when acquisitionProtocol is \"direct\"');\n if (args.keyringRevealer)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('keyringRevealer', 'valid when acquisitionProtocol is \"direct\"');\n if (args.keyringForSubject)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('keyringForSubject', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.certifierUrl)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('certifierUrl', 'valid when acquisitionProtocol is \"issuance\"');\n if (args.privileged && !args.privilegedReason)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n const vargs = {\n type: validateBase64String(args.type, 'type'),\n certifier: validateHexString(args.certifier, 'certifier'),\n certifierUrl: args.certifierUrl,\n fields: validateCertificateFields(args.fields),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50),\n subject: ''\n };\n return vargs;\n}\nfunction validateAcquireDirectCertificateArgs(args) {\n if (args.acquisitionProtocol !== 'direct')\n throw new index_client_1.sdk.WERR_INTERNAL('Only acquire direct certificate requests allowed here.');\n if (!args.serialNumber)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('serialNumber', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.signature)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('signature', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.revocationOutpoint)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('revocationOutpoint', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.keyringRevealer)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('keyringRevealer', 'valid when acquisitionProtocol is \"direct\"');\n if (!args.keyringForSubject)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('keyringForSubject', 'valid when acquisitionProtocol is \"direct\"');\n if (args.privileged && !args.privilegedReason)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n const vargs = {\n type: validateBase64String(args.type, 'type'),\n serialNumber: validateBase64String(args.serialNumber, 'serialNumber'),\n certifier: validateHexString(args.certifier, 'certifier'),\n revocationOutpoint: validateOutpointString(args.revocationOutpoint, 'revocationOutpoint'),\n fields: validateCertificateFields(args.fields),\n signature: validateHexString(args.signature, 'signature'),\n keyringRevealer: validateKeyringRevealer(args.keyringRevealer, 'keyringRevealer'),\n keyringForSubject: validateKeyringForSubject(args.keyringForSubject, 'keyringForSubject'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50),\n subject: ''\n };\n return vargs;\n}\nfunction validateProveCertificateArgs(args) {\n if (args.privileged && !args.privilegedReason)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('privilegedReason', `valid when 'privileged' is true `);\n const vargs = {\n type: validateOptionalBase64String(args.certificate.type, 'certificate.type'),\n serialNumber: validateOptionalBase64String(args.certificate.serialNumber, 'certificate.serialNumber'),\n certifier: validateOptionalHexString(args.certificate.certifier, 'certificate.certifier'),\n subject: validateOptionalHexString(args.certificate.subject, 'certificate.subject'),\n revocationOutpoint: validateOptionalOutpointString(args.certificate.revocationOutpoint, 'certificate.revocationOutpoint'),\n signature: validateOptionalHexString(args.certificate.signature, 'certificate.signature'),\n fieldsToReveal: defaultEmpty(args.fieldsToReveal).map(fieldName => validateStringLength(fieldName, `fieldsToReveal ${fieldName}`, 1, 50)),\n verifier: validateHexString(args.verifier, 'verifier'),\n privileged: defaultFalse(args.privileged),\n privilegedReason: validateOptionalStringLength(args.privilegedReason, 'privilegedReason', 5, 50)\n };\n return vargs;\n}\nfunction validateDiscoverByIdentityKeyArgs(args) {\n const vargs = {\n identityKey: validateHexString(args.identityKey, 'identityKey', 66, 66),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validatePositiveIntegerOrZero(defaultZero(args.offset), 'offset'),\n seekPermission: defaultFalse(args.seekPermission)\n };\n return vargs;\n}\nfunction validateAttributes(attributes) {\n for (const fieldName of Object.keys(attributes)) {\n validateStringLength(fieldName, `field name ${fieldName}`, 1, 50);\n }\n return attributes;\n}\nfunction validateDiscoverByAttributesArgs(args) {\n const vargs = {\n attributes: validateAttributes(args.attributes),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validatePositiveIntegerOrZero(defaultZero(args.offset), 'offset'),\n seekPermission: defaultFalse(args.seekPermission)\n };\n return vargs;\n}\n/**\n * @param {BasketStringUnder300Bytes} args.basket - Required. The associated basket name whose outputs should be listed.\n * @param {OutputTagStringUnder300Bytes[]} [args.tags] - Optional. Filter outputs based on these tags.\n * @param {'all' | 'any'} [args.tagQueryMode] - Optional. Filter mode, defining whether all or any of the tags must match. By default, any tag can match.\n * @param {'locking scripts' | 'entire transactions'} [args.include] - Optional. Whether to include locking scripts (with each output) or entire transactions (as aggregated BEEF, at the top level) in the result. By default, unless specified, neither are returned.\n * @param {BooleanDefaultFalse} [args.includeEntireTransactions] - Optional. Whether to include the entire transaction(s) in the result.\n * @param {BooleanDefaultFalse} [args.includeCustomInstructions] - Optional. Whether custom instructions should be returned in the result.\n * @param {BooleanDefaultFalse} [args.includeTags] - Optional. Whether the tags associated with the output should be returned.\n * @param {BooleanDefaultFalse} [args.includeLabels] - Optional. Whether the labels associated with the transaction containing the output should be returned.\n * @param {PositiveIntegerDefault10Max10000} [args.limit] - Optional limit on the number of outputs to return.\n * @param {PositiveIntegerOrZero} [args.offset] - Optional. Number of outputs to skip before starting to return results.\n * @param {BooleanDefaultTrue} [args.seekPermission] — Optional. Whether to seek permission from the user for this operation if required. Default true, will return an error rather than proceed if set to false.\n */\nfunction validateListOutputsArgs(args) {\n let tagQueryMode;\n if (args.tagQueryMode === undefined || args.tagQueryMode === 'any')\n tagQueryMode = 'any';\n else if (args.tagQueryMode === 'all')\n tagQueryMode = 'all';\n else\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tagQueryMode', `undefined, 'any', or 'all'`);\n const vargs = {\n basket: validateStringLength(args.basket, 'basket', 1, 300),\n tags: (args.tags || []).map(t => validateStringLength(t, 'tag', 1, 300)),\n tagQueryMode,\n includeLockingScripts: args.include === 'locking scripts',\n includeTransactions: args.include === 'entire transactions',\n includeCustomInstructions: defaultFalse(args.includeCustomInstructions),\n includeTags: defaultFalse(args.includeTags),\n includeLabels: defaultFalse(args.includeLabels),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validateInteger(args.offset, 'offset', 0, 0),\n seekPermission: defaultTrue(args.seekPermission),\n knownTxids: []\n };\n return vargs;\n}\n/**\n * @param {LabelStringUnder300Bytes[]} args.labels - An array of labels used to filter actions.\n * @param {'any' | 'all'} [args.labelQueryMode] - Optional. Specifies how to match labels (default is any which matches any of the labels).\n * @param {BooleanDefaultFalse} [args.includeLabels] - Optional. Whether to include transaction labels in the result set.\n * @param {BooleanDefaultFalse} [args.includeInputs] - Optional. Whether to include input details in the result set.\n * @param {BooleanDefaultFalse} [args.includeInputSourceLockingScripts] - Optional. Whether to include input source locking scripts in the result set.\n * @param {BooleanDefaultFalse} [args.includeInputUnlockingScripts] - Optional. Whether to include input unlocking scripts in the result set.\n * @param {BooleanDefaultFalse} [args.includeOutputs] - Optional. Whether to include output details in the result set.\n * @param {BooleanDefaultFalse} [args.includeOutputLockingScripts] - Optional. Whether to include output locking scripts in the result set.\n * @param {PositiveIntegerDefault10Max10000} [args.limit] - Optional. The maximum number of transactions to retrieve.\n * @param {PositiveIntegerOrZero} [args.offset] - Optional. Number of transactions to skip before starting to return the results.\n * @param {BooleanDefaultTrue} [args.seekPermission] — Optional. Whether to seek permission from the user for this operation if required. Default true, will return an error rather than proceed if set to false.\n */\nfunction validateListActionsArgs(args) {\n let labelQueryMode;\n if (args.labelQueryMode === undefined || args.labelQueryMode === 'any')\n labelQueryMode = 'any';\n else if (args.labelQueryMode === 'all')\n labelQueryMode = 'all';\n else\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('labelQueryMode', `undefined, 'any', or 'all'`);\n const vargs = {\n labels: (args.labels || []).map(t => validateLabel(t)),\n labelQueryMode,\n includeLabels: defaultFalse(args.includeLabels),\n includeInputs: defaultFalse(args.includeInputs),\n includeInputSourceLockingScripts: defaultFalse(args.includeInputSourceLockingScripts),\n includeInputUnlockingScripts: defaultFalse(args.includeInputUnlockingScripts),\n includeOutputs: defaultFalse(args.includeOutputs),\n includeOutputLockingScripts: defaultFalse(args.includeOutputLockingScripts),\n limit: validateInteger(args.limit, 'limit', 10, 1, 10000),\n offset: validateInteger(args.offset, 'offset', 0, 0),\n seekPermission: defaultTrue(args.seekPermission)\n };\n return vargs;\n}\n//# sourceMappingURL=validationHelpers.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js?\n}"); /***/ }), @@ -3080,7 +3124,18 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Services = void 0;\nexports.validateScriptHash = validateScriptHash;\nexports.toBinaryBaseBlockHeader = toBinaryBaseBlockHeader;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst ServiceCollection_1 = __webpack_require__(/*! ./ServiceCollection */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/ServiceCollection.js\");\nconst createDefaultWalletServicesOptions_1 = __webpack_require__(/*! ./createDefaultWalletServicesOptions */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/createDefaultWalletServicesOptions.js\");\nconst WhatsOnChain_1 = __webpack_require__(/*! ./providers/WhatsOnChain */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/WhatsOnChain.js\");\nconst exchangeRates_1 = __webpack_require__(/*! ./providers/exchangeRates */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/exchangeRates.js\");\nconst ARC_1 = __webpack_require__(/*! ./providers/ARC */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/ARC.js\");\nconst Bitails_1 = __webpack_require__(/*! ./providers/Bitails */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/Bitails.js\");\nconst getBeefForTxid_1 = __webpack_require__(/*! ./providers/getBeefForTxid */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/getBeefForTxid.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst ChaintracksChainTracker_1 = __webpack_require__(/*! ./chaintracker/ChaintracksChainTracker */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/ChaintracksChainTracker.js\");\nconst WalletError_1 = __webpack_require__(/*! ../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nclass Services {\n static createDefaultOptions(chain) {\n return (0, createDefaultWalletServicesOptions_1.createDefaultWalletServicesOptions)(chain);\n }\n constructor(optionsOrChain) {\n this.postBeefMode = 'UntilSuccess';\n this.targetCurrencies = ['USD', 'GBP', 'EUR'];\n this.chain = typeof optionsOrChain === 'string' ? optionsOrChain : optionsOrChain.chain;\n this.options = typeof optionsOrChain === 'string' ? Services.createDefaultOptions(this.chain) : optionsOrChain;\n this.whatsonchain = new WhatsOnChain_1.WhatsOnChain(this.chain, { apiKey: this.options.whatsOnChainApiKey }, this);\n this.arcTaal = new ARC_1.ARC(this.options.arcUrl, this.options.arcConfig, 'arcTaal');\n if (this.options.arcGorillaPoolUrl) {\n this.arcGorillaPool = new ARC_1.ARC(this.options.arcGorillaPoolUrl, this.options.arcGorillaPoolConfig, 'arcGorillaPool');\n }\n this.bitails = new Bitails_1.Bitails(this.chain);\n //prettier-ignore\n this.getMerklePathServices = new ServiceCollection_1.ServiceCollection('getMerklePath')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getMerklePath.bind(this.whatsonchain) })\n .add({ name: 'Bitails', service: this.bitails.getMerklePath.bind(this.bitails) });\n //prettier-ignore\n this.getRawTxServices = new ServiceCollection_1.ServiceCollection('getRawTx')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getRawTxResult.bind(this.whatsonchain) });\n this.postBeefServices = new ServiceCollection_1.ServiceCollection('postBeef');\n if (this.arcGorillaPool) {\n //prettier-ignore\n this.postBeefServices.add({ name: 'GorillaPoolArcBeef', service: this.arcGorillaPool.postBeef.bind(this.arcGorillaPool) });\n }\n //prettier-ignore\n this.postBeefServices\n .add({ name: 'TaalArcBeef', service: this.arcTaal.postBeef.bind(this.arcTaal) })\n .add({ name: 'Bitails', service: this.bitails.postBeef.bind(this.bitails) })\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.postBeef.bind(this.whatsonchain) });\n //prettier-ignore\n this.getUtxoStatusServices = new ServiceCollection_1.ServiceCollection('getUtxoStatus')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getUtxoStatus.bind(this.whatsonchain) });\n //prettier-ignore\n this.getStatusForTxidsServices = new ServiceCollection_1.ServiceCollection('getStatusForTxids')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getStatusForTxids.bind(this.whatsonchain) });\n //prettier-ignore\n this.getScriptHashHistoryServices = new ServiceCollection_1.ServiceCollection('getScriptHashHistory')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getScriptHashHistory.bind(this.whatsonchain) });\n //prettier-ignore\n this.updateFiatExchangeRateServices = new ServiceCollection_1.ServiceCollection('updateFiatExchangeRate')\n .add({ name: 'ChaintracksService', service: exchangeRates_1.updateChaintracksFiatExchangeRates })\n .add({ name: 'exchangeratesapi', service: exchangeRates_1.updateExchangeratesapi });\n }\n getServicesCallHistory(reset) {\n return {\n version: 2,\n getMerklePath: this.getMerklePathServices.getServiceCallHistory(reset),\n getRawTx: this.getRawTxServices.getServiceCallHistory(reset),\n postBeef: this.postBeefServices.getServiceCallHistory(reset),\n getUtxoStatus: this.getUtxoStatusServices.getServiceCallHistory(reset),\n getStatusForTxids: this.getStatusForTxidsServices.getServiceCallHistory(reset),\n getScriptHashHistory: this.getScriptHashHistoryServices.getServiceCallHistory(reset),\n updateFiatExchangeRates: this.updateFiatExchangeRateServices.getServiceCallHistory(reset)\n };\n }\n async getChainTracker() {\n if (!this.options.chaintracks)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('options.chaintracks', `valid to enable 'getChainTracker' service.`);\n return new ChaintracksChainTracker_1.ChaintracksChainTracker(this.chain, this.options.chaintracks);\n }\n async getBsvExchangeRate() {\n this.options.bsvExchangeRate = await this.whatsonchain.updateBsvExchangeRate(this.options.bsvExchangeRate, this.options.bsvUpdateMsecs);\n return this.options.bsvExchangeRate.rate;\n }\n async getFiatExchangeRate(currency, base) {\n const rates = await this.updateFiatExchangeRates(this.options.fiatExchangeRates, this.options.fiatUpdateMsecs);\n this.options.fiatExchangeRates = rates;\n base || (base = 'USD');\n const rate = rates.rates[currency] / rates.rates[base];\n return rate;\n }\n get getProofsCount() {\n return this.getMerklePathServices.count;\n }\n get getRawTxsCount() {\n return this.getRawTxServices.count;\n }\n get postBeefServicesCount() {\n return this.postBeefServices.count;\n }\n get getUtxoStatsCount() {\n return this.getUtxoStatusServices.count;\n }\n async getStatusForTxids(txids, useNext) {\n const services = this.getStatusForTxidsServices;\n if (useNext)\n services.next();\n let r0 = {\n name: '',\n status: 'error',\n error: new WERR_errors_1.WERR_INTERNAL('No services available.'),\n results: []\n };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(txids);\n if (r.status === 'success') {\n services.addServiceCallSuccess(stc);\n r0 = r;\n break;\n }\n else {\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n /**\n * @param script Output script to be hashed for `getUtxoStatus` default `outputFormat`\n * @returns script hash in 'hashLE' format, which is the default.\n */\n hashOutputScript(script) {\n const hash = sdk_1.Utils.toHex((0, utilityHelpers_1.sha256Hash)(sdk_1.Utils.toArray(script, 'hex')));\n return hash;\n }\n async isUtxo(output) {\n if (!output.lockingScript) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('output.lockingScript', 'validated by storage provider validateOutputScript.');\n }\n const hash = this.hashOutputScript(sdk_1.Utils.toHex(output.lockingScript));\n const or = await this.getUtxoStatus(hash, undefined, `${output.txid}.${output.vout}`);\n return or.isUtxo === true;\n }\n async getUtxoStatus(output, outputFormat, outpoint, useNext) {\n const services = this.getUtxoStatusServices;\n if (useNext)\n services.next();\n let r0 = {\n name: '',\n status: 'error',\n error: new WERR_errors_1.WERR_INTERNAL('No services available.'),\n details: []\n };\n for (let retry = 0; retry < 2; retry++) {\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(output, outputFormat, outpoint);\n if (r.status === 'success') {\n services.addServiceCallSuccess(stc);\n r0 = r;\n break;\n }\n else {\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n if (r0.status === 'success')\n break;\n await (0, utilityHelpers_1.wait)(2000);\n }\n return r0;\n }\n async getScriptHashHistory(hash, useNext) {\n const services = this.getScriptHashHistoryServices;\n if (useNext)\n services.next();\n let r0 = {\n name: '',\n status: 'error',\n error: new WERR_errors_1.WERR_INTERNAL('No services available.'),\n history: []\n };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(hash);\n if (r.status === 'success') {\n r0 = r;\n break;\n }\n else {\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n /**\n *\n * @param beef\n * @param chain\n * @returns\n */\n async postBeef(beef, txids) {\n let rs = [];\n const services = this.postBeefServices;\n const stcs = services.allServicesToCall;\n switch (this.postBeefMode) {\n case 'UntilSuccess':\n {\n for (const stc of stcs) {\n const r = await callService(stc);\n rs.push(r);\n if (r.status === 'success')\n break;\n if (r.txidResults && r.txidResults.every(txr => txr.serviceError)) {\n // move this service to the end of the list\n this.postBeefServices.moveServiceToLast(stc);\n }\n }\n }\n break;\n case 'PromiseAll':\n {\n rs = await Promise.all(stcs.map(async (stc) => {\n const r = await callService(stc);\n return r;\n }));\n }\n break;\n }\n return rs;\n async function callService(stc) {\n const r = await stc.service(beef, txids);\n if (r.status === 'success') {\n services.addServiceCallSuccess(stc);\n }\n else {\n if (r.error) {\n services.addServiceCallError(stc, r.error);\n }\n else {\n services.addServiceCallFailure(stc);\n }\n }\n return r;\n }\n }\n async getRawTx(txid, useNext) {\n const services = this.getRawTxServices;\n if (useNext)\n services.next();\n const r0 = { txid };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(txid, this.chain);\n if (r.rawTx) {\n const hash = (0, utilityHelpers_noBuffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(r.rawTx));\n // Confirm transaction hash matches txid\n if (hash === (0, utilityHelpers_noBuffer_1.asString)(txid)) {\n // If we have a match, call it done.\n r0.rawTx = r.rawTx;\n r0.name = r.name;\n r0.error = undefined;\n services.addServiceCallSuccess(stc);\n break;\n }\n r.error = new WERR_errors_1.WERR_INTERNAL(`computed txid ${hash} doesn't match requested value ${txid}`);\n r.rawTx = undefined;\n }\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else if (!r.rawTx)\n services.addServiceCallSuccess(stc, `not found`);\n else\n services.addServiceCallFailure(stc);\n if (r.error && !r0.error && !r0.rawTx)\n // If we have an error and didn't before...\n r0.error = r.error;\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n async invokeChaintracksWithRetry(method) {\n if (!this.options.chaintracks)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('options.chaintracks', 'valid for this service operation.');\n for (let retry = 0; retry < 3; retry++) {\n try {\n const r = await method();\n return r;\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n if (e.code != 'ECONNRESET')\n throw eu;\n }\n }\n throw new WERR_errors_1.WERR_INVALID_OPERATION('hashToHeader service unavailable');\n }\n async getHeaderForHeight(height) {\n const method = async () => {\n const header = await this.options.chaintracks.findHeaderForHeight(height);\n if (!header)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('hash', `valid height '${height}' on mined chain ${this.chain}`);\n return toBinaryBaseBlockHeader(header);\n };\n return this.invokeChaintracksWithRetry(method);\n }\n async getHeight() {\n const method = async () => {\n return await this.options.chaintracks.currentHeight();\n };\n return this.invokeChaintracksWithRetry(method);\n }\n async hashToHeader(hash) {\n const method = async () => {\n const header = await this.options.chaintracks.findHeaderForBlockHash(hash);\n return header;\n };\n let header = await this.invokeChaintracksWithRetry(method);\n if (!header) {\n header = await this.whatsonchain.getBlockHeaderByHash(hash);\n }\n if (!header)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('hash', `valid blockhash '${hash}' on mined chain ${this.chain}`);\n return header;\n }\n async getMerklePath(txid, useNext) {\n const services = this.getMerklePathServices;\n if (useNext)\n services.next();\n const r0 = { notes: [] };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(txid, this);\n if (r.notes)\n r0.notes.push(...r.notes);\n if (!r0.name)\n r0.name = r.name;\n if (r.merklePath) {\n // If we have a proof, call it done.\n r0.merklePath = r.merklePath;\n r0.header = r.header;\n r0.name = r.name;\n r0.error = undefined;\n services.addServiceCallSuccess(stc);\n break;\n }\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n if (r.error && !r0.error) {\n // If we have an error and didn't before...\n r0.error = r.error;\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n async updateFiatExchangeRates(rates, updateMsecs) {\n updateMsecs || (updateMsecs = 1000 * 60 * 15);\n const freshnessDate = new Date(Date.now() - updateMsecs);\n if (rates) {\n // Check if the rate we know is stale enough to update.\n updateMsecs || (updateMsecs = 1000 * 60 * 15);\n if (rates.timestamp > freshnessDate)\n return rates;\n }\n // Make sure we always start with the first service listed (chaintracks aggregator)\n const services = this.updateFiatExchangeRateServices.clone();\n let r0;\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(this.targetCurrencies, this.options);\n if (this.targetCurrencies.every(c => typeof r.rates[c] === 'number')) {\n services.addServiceCallSuccess(stc);\n r0 = r;\n break;\n }\n else {\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n if (!r0) {\n console.error('Failed to update fiat exchange rates.');\n if (!rates)\n throw new WERR_errors_1.WERR_INTERNAL();\n return rates;\n }\n return r0;\n }\n async nLockTimeIsFinal(tx) {\n const MAXINT = 0xffffffff;\n const BLOCK_LIMIT = 500000000;\n let nLockTime;\n if (typeof tx === 'number')\n nLockTime = tx;\n else {\n if (typeof tx === 'string') {\n tx = sdk_1.Transaction.fromHex(tx);\n }\n else if (Array.isArray(tx)) {\n tx = sdk_1.Transaction.fromBinary(tx);\n }\n if (tx instanceof sdk_1.Transaction) {\n if (tx.inputs.every(i => i.sequence === MAXINT)) {\n return true;\n }\n nLockTime = tx.lockTime;\n }\n else {\n throw new WERR_errors_1.WERR_INTERNAL('Should be either @bsv/sdk Transaction or babbage-bsv Transaction');\n }\n }\n if (nLockTime >= BLOCK_LIMIT) {\n const limit = Math.floor(Date.now() / 1000);\n return nLockTime < limit;\n }\n const height = await this.getHeight();\n return nLockTime < height;\n }\n async getBeefForTxid(txid) {\n const beef = await (0, getBeefForTxid_1.getBeefForTxid)(this, txid);\n return beef;\n }\n}\nexports.Services = Services;\nfunction validateScriptHash(output, outputFormat) {\n let b = (0, utilityHelpers_noBuffer_1.asArray)(output);\n if (!outputFormat) {\n if (b.length === 32)\n outputFormat = 'hashLE';\n else\n outputFormat = 'script';\n }\n switch (outputFormat) {\n case 'hashBE':\n break;\n case 'hashLE':\n b = b.reverse();\n break;\n case 'script':\n b = (0, utilityHelpers_1.sha256Hash)(b).reverse();\n break;\n default:\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('outputFormat', `not be ${outputFormat}`);\n }\n return (0, utilityHelpers_noBuffer_1.asString)(b);\n}\n/**\n * Serializes a block header as an 80 byte array.\n * The exact serialized format is defined in the Bitcoin White Paper\n * such that computing a double sha256 hash of the array computes\n * the block hash for the header.\n * @returns 80 byte array\n * @publicbody\n */\nfunction toBinaryBaseBlockHeader(header) {\n const writer = new sdk_1.Utils.Writer();\n writer.writeUInt32BE(header.version);\n writer.writeReverse((0, utilityHelpers_noBuffer_1.asArray)(header.previousHash));\n writer.writeReverse((0, utilityHelpers_noBuffer_1.asArray)(header.merkleRoot));\n writer.writeUInt32BE(header.time);\n writer.writeUInt32BE(header.bits);\n writer.writeUInt32BE(header.nonce);\n const r = writer.toArray();\n return r;\n}\n//# sourceMappingURL=Services.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Services = void 0;\nexports.validateScriptHash = validateScriptHash;\nexports.toBinaryBaseBlockHeader = toBinaryBaseBlockHeader;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst ServiceCollection_1 = __webpack_require__(/*! ./ServiceCollection */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/ServiceCollection.js\");\nconst createDefaultWalletServicesOptions_1 = __webpack_require__(/*! ./createDefaultWalletServicesOptions */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/createDefaultWalletServicesOptions.js\");\nconst chaintracker_1 = __webpack_require__(/*! ./chaintracker */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/index.js\");\nconst WhatsOnChain_1 = __webpack_require__(/*! ./providers/WhatsOnChain */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/WhatsOnChain.js\");\nconst echangeRates_1 = __webpack_require__(/*! ./providers/echangeRates */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/echangeRates.js\");\nconst ARC_1 = __webpack_require__(/*! ./providers/ARC */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/ARC.js\");\nconst Bitails_1 = __webpack_require__(/*! ./providers/Bitails */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/Bitails.js\");\nconst getBeefForTxid_1 = __webpack_require__(/*! ./providers/getBeefForTxid */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/getBeefForTxid.js\");\nclass Services {\n static createDefaultOptions(chain) {\n return (0, createDefaultWalletServicesOptions_1.createDefaultWalletServicesOptions)(chain);\n }\n constructor(optionsOrChain) {\n this.postBeefMode = 'UntilSuccess';\n this.targetCurrencies = ['USD', 'GBP', 'EUR'];\n this.chain = typeof optionsOrChain === 'string' ? optionsOrChain : optionsOrChain.chain;\n this.options = typeof optionsOrChain === 'string' ? Services.createDefaultOptions(this.chain) : optionsOrChain;\n this.whatsonchain = new WhatsOnChain_1.WhatsOnChain(this.chain, { apiKey: this.options.whatsOnChainApiKey }, this);\n this.arcTaal = new ARC_1.ARC(this.options.arcUrl, this.options.arcConfig, 'arcTaal');\n if (this.options.arcGorillaPoolUrl) {\n this.arcGorillaPool = new ARC_1.ARC(this.options.arcGorillaPoolUrl, this.options.arcGorillaPoolConfig, 'arcGorillaPool');\n }\n this.bitails = new Bitails_1.Bitails(this.chain);\n //prettier-ignore\n this.getMerklePathServices = new ServiceCollection_1.ServiceCollection('getMerklePath')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getMerklePath.bind(this.whatsonchain) })\n .add({ name: 'Bitails', service: this.bitails.getMerklePath.bind(this.bitails) });\n //prettier-ignore\n this.getRawTxServices = new ServiceCollection_1.ServiceCollection('getRawTx')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getRawTxResult.bind(this.whatsonchain) });\n this.postBeefServices = new ServiceCollection_1.ServiceCollection('postBeef');\n if (this.arcGorillaPool) {\n //prettier-ignore\n this.postBeefServices.add({ name: 'GorillaPoolArcBeef', service: this.arcGorillaPool.postBeef.bind(this.arcGorillaPool) });\n }\n //prettier-ignore\n this.postBeefServices\n .add({ name: 'TaalArcBeef', service: this.arcTaal.postBeef.bind(this.arcTaal) })\n .add({ name: 'Bitails', service: this.bitails.postBeef.bind(this.bitails) })\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.postBeef.bind(this.whatsonchain) });\n //prettier-ignore\n this.getUtxoStatusServices = new ServiceCollection_1.ServiceCollection('getUtxoStatus')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getUtxoStatus.bind(this.whatsonchain) });\n //prettier-ignore\n this.getStatusForTxidsServices = new ServiceCollection_1.ServiceCollection('getStatusForTxids')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getStatusForTxids.bind(this.whatsonchain) });\n //prettier-ignore\n this.getScriptHashHistoryServices = new ServiceCollection_1.ServiceCollection('getScriptHashHistory')\n .add({ name: 'WhatsOnChain', service: this.whatsonchain.getScriptHashHistory.bind(this.whatsonchain) });\n //prettier-ignore\n this.updateFiatExchangeRateServices = new ServiceCollection_1.ServiceCollection('updateFiatExchangeRate')\n .add({ name: 'ChaintracksService', service: echangeRates_1.updateChaintracksFiatExchangeRates })\n .add({ name: 'exchangeratesapi', service: echangeRates_1.updateExchangeratesapi });\n }\n getServicesCallHistory(reset) {\n return {\n version: 2,\n getMerklePath: this.getMerklePathServices.getServiceCallHistory(reset),\n getRawTx: this.getRawTxServices.getServiceCallHistory(reset),\n postBeef: this.postBeefServices.getServiceCallHistory(reset),\n getUtxoStatus: this.getUtxoStatusServices.getServiceCallHistory(reset),\n getStatusForTxids: this.getStatusForTxidsServices.getServiceCallHistory(reset),\n getScriptHashHistory: this.getScriptHashHistoryServices.getServiceCallHistory(reset),\n updateFiatExchangeRates: this.updateFiatExchangeRateServices.getServiceCallHistory(reset)\n };\n }\n async getChainTracker() {\n if (!this.options.chaintracks)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('options.chaintracks', `valid to enable 'getChainTracker' service.`);\n return new chaintracker_1.ChaintracksChainTracker(this.chain, this.options.chaintracks);\n }\n async getBsvExchangeRate() {\n this.options.bsvExchangeRate = await this.whatsonchain.updateBsvExchangeRate(this.options.bsvExchangeRate, this.options.bsvUpdateMsecs);\n return this.options.bsvExchangeRate.rate;\n }\n async getFiatExchangeRate(currency, base) {\n const rates = await this.updateFiatExchangeRates(this.options.fiatExchangeRates, this.options.fiatUpdateMsecs);\n this.options.fiatExchangeRates = rates;\n base || (base = 'USD');\n const rate = rates.rates[currency] / rates.rates[base];\n return rate;\n }\n get getProofsCount() {\n return this.getMerklePathServices.count;\n }\n get getRawTxsCount() {\n return this.getRawTxServices.count;\n }\n get postBeefServicesCount() {\n return this.postBeefServices.count;\n }\n get getUtxoStatsCount() {\n return this.getUtxoStatusServices.count;\n }\n async getStatusForTxids(txids, useNext) {\n const services = this.getStatusForTxidsServices;\n if (useNext)\n services.next();\n let r0 = {\n name: '',\n status: 'error',\n error: new index_client_1.sdk.WERR_INTERNAL('No services available.'),\n results: []\n };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(txids);\n if (r.status === 'success') {\n services.addServiceCallSuccess(stc);\n r0 = r;\n break;\n }\n else {\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n /**\n * @param script Output script to be hashed for `getUtxoStatus` default `outputFormat`\n * @returns script hash in 'hashLE' format, which is the default.\n */\n hashOutputScript(script) {\n const hash = sdk_1.Utils.toHex((0, index_client_1.sha256Hash)(sdk_1.Utils.toArray(script, 'hex')));\n return hash;\n }\n async isUtxo(output) {\n if (!output.lockingScript) {\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('output.lockingScript', 'validated by storage provider validateOutputScript.');\n }\n const hash = this.hashOutputScript(sdk_1.Utils.toHex(output.lockingScript));\n const or = await this.getUtxoStatus(hash, undefined, `${output.txid}.${output.vout}`);\n return or.isUtxo === true;\n }\n async getUtxoStatus(output, outputFormat, outpoint, useNext) {\n const services = this.getUtxoStatusServices;\n if (useNext)\n services.next();\n let r0 = {\n name: '',\n status: 'error',\n error: new index_client_1.sdk.WERR_INTERNAL('No services available.'),\n details: []\n };\n for (let retry = 0; retry < 2; retry++) {\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(output, outputFormat, outpoint);\n if (r.status === 'success') {\n services.addServiceCallSuccess(stc);\n r0 = r;\n break;\n }\n else {\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n if (r0.status === 'success')\n break;\n await (0, index_client_1.wait)(2000);\n }\n return r0;\n }\n async getScriptHashHistory(hash, useNext) {\n const services = this.getScriptHashHistoryServices;\n if (useNext)\n services.next();\n let r0 = {\n name: '',\n status: 'error',\n error: new index_client_1.sdk.WERR_INTERNAL('No services available.'),\n history: []\n };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(hash);\n if (r.status === 'success') {\n r0 = r;\n break;\n }\n else {\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n /**\n *\n * @param beef\n * @param chain\n * @returns\n */\n async postBeef(beef, txids) {\n let rs = [];\n const services = this.postBeefServices;\n const stcs = services.allServicesToCall;\n switch (this.postBeefMode) {\n case 'UntilSuccess':\n {\n for (const stc of stcs) {\n const r = await callService(stc);\n rs.push(r);\n if (r.status === 'success')\n break;\n if (r.txidResults && r.txidResults.every(txr => txr.serviceError)) {\n // move this service to the end of the list\n this.postBeefServices.moveServiceToLast(stc);\n }\n }\n }\n break;\n case 'PromiseAll':\n {\n rs = await Promise.all(stcs.map(async (stc) => {\n const r = await callService(stc);\n return r;\n }));\n }\n break;\n }\n return rs;\n async function callService(stc) {\n const r = await stc.service(beef, txids);\n if (r.status === 'success') {\n services.addServiceCallSuccess(stc);\n }\n else {\n if (r.error) {\n services.addServiceCallError(stc, r.error);\n }\n else {\n services.addServiceCallFailure(stc);\n }\n }\n return r;\n }\n }\n async getRawTx(txid, useNext) {\n const services = this.getRawTxServices;\n if (useNext)\n services.next();\n const r0 = { txid };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(txid, this.chain);\n if (r.rawTx) {\n const hash = (0, index_client_1.asString)((0, index_client_1.doubleSha256BE)(r.rawTx));\n // Confirm transaction hash matches txid\n if (hash === (0, index_client_1.asString)(txid)) {\n // If we have a match, call it done.\n r0.rawTx = r.rawTx;\n r0.name = r.name;\n r0.error = undefined;\n services.addServiceCallSuccess(stc);\n break;\n }\n r.error = new index_client_1.sdk.WERR_INTERNAL(`computed txid ${hash} doesn't match requested value ${txid}`);\n r.rawTx = undefined;\n }\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else if (!r.rawTx)\n services.addServiceCallSuccess(stc, `not found`);\n else\n services.addServiceCallFailure(stc);\n if (r.error && !r0.error && !r0.rawTx)\n // If we have an error and didn't before...\n r0.error = r.error;\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n async invokeChaintracksWithRetry(method) {\n if (!this.options.chaintracks)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('options.chaintracks', 'valid for this service operation.');\n for (let retry = 0; retry < 3; retry++) {\n try {\n const r = await method();\n return r;\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n if (e.code != 'ECONNRESET')\n throw eu;\n }\n }\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('hashToHeader service unavailable');\n }\n async getHeaderForHeight(height) {\n const method = async () => {\n const header = await this.options.chaintracks.findHeaderForHeight(height);\n if (!header)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('hash', `valid height '${height}' on mined chain ${this.chain}`);\n return toBinaryBaseBlockHeader(header);\n };\n return this.invokeChaintracksWithRetry(method);\n }\n async getHeight() {\n const method = async () => {\n return await this.options.chaintracks.currentHeight();\n };\n return this.invokeChaintracksWithRetry(method);\n }\n async hashToHeader(hash) {\n const method = async () => {\n const header = await this.options.chaintracks.findHeaderForBlockHash(hash);\n if (!header)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('hash', `valid blockhash '${hash}' on mined chain ${this.chain}`);\n return header;\n };\n return this.invokeChaintracksWithRetry(method);\n }\n async getMerklePath(txid, useNext) {\n const services = this.getMerklePathServices;\n if (useNext)\n services.next();\n const r0 = { notes: [] };\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(txid, this);\n if (r.notes)\n r0.notes.push(...r.notes);\n if (!r0.name)\n r0.name = r.name;\n if (r.merklePath) {\n // If we have a proof, call it done.\n r0.merklePath = r.merklePath;\n r0.header = r.header;\n r0.name = r.name;\n r0.error = undefined;\n services.addServiceCallSuccess(stc);\n break;\n }\n if (r.error)\n services.addServiceCallError(stc, r.error);\n else\n services.addServiceCallFailure(stc);\n if (r.error && !r0.error) {\n // If we have an error and didn't before...\n r0.error = r.error;\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n return r0;\n }\n async updateFiatExchangeRates(rates, updateMsecs) {\n updateMsecs || (updateMsecs = 1000 * 60 * 15);\n const freshnessDate = new Date(Date.now() - updateMsecs);\n if (rates) {\n // Check if the rate we know is stale enough to update.\n updateMsecs || (updateMsecs = 1000 * 60 * 15);\n if (rates.timestamp > freshnessDate)\n return rates;\n }\n // Make sure we always start with the first service listed (chaintracks aggregator)\n const services = this.updateFiatExchangeRateServices.clone();\n let r0;\n for (let tries = 0; tries < services.count; tries++) {\n const stc = services.serviceToCall;\n try {\n const r = await stc.service(this.targetCurrencies, this.options);\n if (this.targetCurrencies.every(c => typeof r.rates[c] === 'number')) {\n services.addServiceCallSuccess(stc);\n r0 = r;\n break;\n }\n else {\n services.addServiceCallFailure(stc);\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n services.addServiceCallError(stc, e);\n }\n services.next();\n }\n if (!r0) {\n console.error('Failed to update fiat exchange rates.');\n if (!rates)\n throw new index_client_1.sdk.WERR_INTERNAL();\n return rates;\n }\n return r0;\n }\n async nLockTimeIsFinal(tx) {\n const MAXINT = 0xffffffff;\n const BLOCK_LIMIT = 500000000;\n let nLockTime;\n if (typeof tx === 'number')\n nLockTime = tx;\n else {\n if (typeof tx === 'string') {\n tx = sdk_1.Transaction.fromHex(tx);\n }\n else if (Array.isArray(tx)) {\n tx = sdk_1.Transaction.fromBinary(tx);\n }\n if (tx instanceof sdk_1.Transaction) {\n if (tx.inputs.every(i => i.sequence === MAXINT)) {\n return true;\n }\n nLockTime = tx.lockTime;\n }\n else {\n throw new index_client_1.sdk.WERR_INTERNAL('Should be either @bsv/sdk Transaction or babbage-bsv Transaction');\n }\n }\n if (nLockTime >= BLOCK_LIMIT) {\n const limit = Math.floor(Date.now() / 1000);\n return nLockTime < limit;\n }\n const height = await this.getHeight();\n return nLockTime < height;\n }\n async getBeefForTxid(txid) {\n const beef = await (0, getBeefForTxid_1.getBeefForTxid)(this, txid);\n return beef;\n }\n}\nexports.Services = Services;\nfunction validateScriptHash(output, outputFormat) {\n let b = (0, index_client_1.asArray)(output);\n if (!outputFormat) {\n if (b.length === 32)\n outputFormat = 'hashLE';\n else\n outputFormat = 'script';\n }\n switch (outputFormat) {\n case 'hashBE':\n break;\n case 'hashLE':\n b = b.reverse();\n break;\n case 'script':\n b = (0, index_client_1.sha256Hash)(b).reverse();\n break;\n default:\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('outputFormat', `not be ${outputFormat}`);\n }\n return (0, index_client_1.asString)(b);\n}\n/**\n * Serializes a block header as an 80 byte array.\n * The exact serialized format is defined in the Bitcoin White Paper\n * such that computing a double sha256 hash of the array computes\n * the block hash for the header.\n * @returns 80 byte array\n * @publicbody\n */\nfunction toBinaryBaseBlockHeader(header) {\n const writer = new sdk_1.Utils.Writer();\n writer.writeUInt32BE(header.version);\n writer.writeReverse((0, index_client_1.asArray)(header.previousHash));\n writer.writeReverse((0, index_client_1.asArray)(header.merkleRoot));\n writer.writeUInt32BE(header.time);\n writer.writeUInt32BE(header.bits);\n writer.writeUInt32BE(header.nonce);\n const r = writer.toArray();\n return r;\n}\n//# sourceMappingURL=Services.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/BHServiceClient.js": +/*!**************************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/BHServiceClient.js ***! + \**************************************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BHServiceClient = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst ChaintracksServiceClient_1 = __webpack_require__(/*! ./chaintracks/ChaintracksServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ./chaintracks/util/blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nclass BHServiceClient {\n constructor(chain, url, apiKey) {\n this.bhs = new sdk_1.BlockHeadersService(url, { apiKey });\n this.cache = {};\n this.chain = chain;\n this.serviceUrl = url;\n this.options = ChaintracksServiceClient_1.ChaintracksServiceClient.createChaintracksServiceClientOptions();\n this.options.useAuthrite = true;\n this.apiKey = apiKey;\n }\n async currentHeight() {\n return await this.bhs.currentHeight();\n }\n async isValidRootForHeight(root, height) {\n const cachedRoot = this.cache[height];\n if (cachedRoot) {\n return cachedRoot === root;\n }\n const isValid = await this.bhs.isValidRootForHeight(root, height);\n this.cache[height] = root;\n return isValid;\n }\n async getPresentHeight() {\n return await this.bhs.currentHeight();\n }\n async findHeaderForHeight(height) {\n const response = await this.getJsonOrUndefined(`/api/v1/chain/header/byHeight?height=${height}`);\n const header = response === null || response === void 0 ? void 0 : response[0];\n if (!header)\n return undefined;\n const formatted = {\n version: header.version,\n previousHash: header.prevBlockHash,\n merkleRoot: header.merkleRoot,\n time: header.creationTimestamp,\n bits: header.difficultyTarget,\n nonce: header.nonce,\n height,\n hash: header.hash\n };\n return formatted;\n }\n async findHeaderForBlockHash(hash) {\n const response = await this.getJsonOrUndefined(`/api/v1/chain/header/state/${hash}`);\n if (!(response === null || response === void 0 ? void 0 : response.header))\n return undefined;\n const formatted = {\n version: response.header.version,\n previousHash: response.header.prevBlockHash,\n merkleRoot: response.header.merkleRoot,\n time: response.header.creationTimestamp,\n bits: response.header.difficultyTarget,\n nonce: response.header.nonce,\n height: response.height,\n hash: response.header.hash\n };\n return formatted;\n }\n async getHeaders(height, count) {\n const response = await this.getJsonOrUndefined(`/api/v1/chain/header/byHeight?height=${height}&count=${count}`);\n if (!response)\n return '';\n if (response.length < count)\n throw new Error('Cannot retrieve enough headers');\n const headers = response.map(response => {\n const header = {\n version: response.version,\n previousHash: response.prevBlockHash,\n merkleRoot: response.merkleRoot,\n time: response.creationTimestamp,\n bits: response.difficultyTarget,\n nonce: response.nonce\n };\n return (0, blockHeaderUtilities_1.serializeBlockHeader)(header);\n });\n return headers.reduce((str, arr) => str + sdk_1.Utils.toHex(arr), '');\n }\n async findChainWorkForBlockHash(hash) {\n throw new Error('Not implemented');\n }\n async findChainTipHeader() {\n const response = await this.getJson('/api/v1/chain/tip/longest');\n const formatted = {\n version: response.header.version,\n previousHash: response.header.prevBlockHash,\n merkleRoot: response.header.merkleRoot,\n time: response.header.creationTimestamp,\n bits: response.header.difficultyTarget,\n nonce: response.header.nonce,\n height: response.height,\n hash: response.header.hash\n };\n return formatted;\n }\n async getJsonOrUndefined(path) {\n let e = undefined;\n for (let retry = 0; retry < 3; retry++) {\n try {\n const r = await fetch(`${this.serviceUrl}${path}`, { headers: { Authorization: `Bearer ${this.apiKey}` } });\n if (r.status !== 200)\n throw new Error(JSON.stringify(r));\n const v = await r.json();\n if (!v)\n return undefined;\n return v;\n }\n catch (eu) {\n e = eu;\n }\n if (e && e.name !== 'ECONNRESET')\n break;\n }\n if (e)\n throw e;\n }\n async getJson(path) {\n const r = await this.getJsonOrUndefined(path);\n if (r === undefined)\n throw new Error('Value was undefined. Requested object may not exist.');\n return r;\n }\n /*\n Please note that all methods hereafter are included only to match the interface of ChaintracksServiceClient.\n */\n async postJsonVoid(path, params) {\n throw new Error('Not implemented');\n }\n async addHeader(header) {\n throw new Error('Not implemented');\n }\n async findHeaderForMerkleRoot(merkleRoot, height) {\n throw new Error('Not implemented');\n }\n async startListening() {\n throw new Error('Not implemented');\n }\n async listening() {\n throw new Error('Not implemented');\n }\n async isSynchronized() {\n throw new Error('Not implemented');\n }\n async getChain() {\n return this.chain;\n }\n async isListening() {\n throw new Error('Not implemented');\n }\n async getChainTipHeader() {\n throw new Error('Not implemented');\n }\n async findChainTipHashHex() {\n throw new Error('Not implemented');\n }\n}\nexports.BHServiceClient = BHServiceClient;\n//# sourceMappingURL=BHServiceClient.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/BHServiceClient.js?\n}"); /***/ }), @@ -3091,7 +3146,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksChainTracker = void 0;\nconst ChaintracksServiceClient_1 = __webpack_require__(/*! ./chaintracks/ChaintracksServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nclass ChaintracksChainTracker {\n constructor(chain, chaintracks, options) {\n chain || (chain = 'main');\n this.chaintracks =\n chaintracks !== null && chaintracks !== void 0 ? chaintracks : new ChaintracksServiceClient_1.ChaintracksServiceClient(chain, `https://npm-registry.babbage.systems:808${chain === 'main' ? '4' : '3'}`);\n this.cache = {};\n this.options = options || {};\n }\n async currentHeight() {\n return await this.chaintracks.getPresentHeight();\n }\n async isValidRootForHeight(root, height) {\n const cachedRoot = this.cache[height];\n if (cachedRoot) {\n return cachedRoot === root;\n }\n let header;\n const retries = this.options.maxRetries || 3;\n let error = undefined;\n for (let tryCount = 1; tryCount <= retries; tryCount++) {\n try {\n header = await this.chaintracks.findHeaderForHeight(height);\n if (!header) {\n return false;\n }\n break;\n }\n catch (eu) {\n error = WalletError_1.WalletError.fromUnknown(eu);\n if (tryCount > retries) {\n throw error;\n }\n await (0, utilityHelpers_1.wait)(1000);\n }\n }\n if (!header)\n throw new WERR_errors_1.WERR_INTERNAL('no header should have returned false or thrown an error.');\n this.cache[height] = header.merkleRoot;\n if (header.merkleRoot !== root) {\n return false;\n }\n return true;\n }\n}\nexports.ChaintracksChainTracker = ChaintracksChainTracker;\n//# sourceMappingURL=ChaintracksChainTracker.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/ChaintracksChainTracker.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksChainTracker = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst ChaintracksServiceClient_1 = __webpack_require__(/*! ./chaintracks/ChaintracksServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js\");\nclass ChaintracksChainTracker {\n constructor(chain, chaintracks, options) {\n chain || (chain = 'main');\n this.chaintracks =\n chaintracks !== null && chaintracks !== void 0 ? chaintracks : new ChaintracksServiceClient_1.ChaintracksServiceClient(chain, `https://npm-registry.babbage.systems:808${chain === 'main' ? '4' : '3'}`);\n this.cache = {};\n this.options = options || {};\n }\n async currentHeight() {\n return await this.chaintracks.getPresentHeight();\n }\n async isValidRootForHeight(root, height) {\n const cachedRoot = this.cache[height];\n if (cachedRoot) {\n return cachedRoot === root;\n }\n let header;\n const retries = this.options.maxRetries || 3;\n let error = undefined;\n for (let tryCount = 1; tryCount <= retries; tryCount++) {\n try {\n header = await this.chaintracks.findHeaderForHeight(height);\n if (!header) {\n return false;\n }\n break;\n }\n catch (eu) {\n error = index_client_1.sdk.WalletError.fromUnknown(eu);\n if (tryCount > retries) {\n throw error;\n }\n await (0, index_client_1.wait)(1000);\n }\n }\n if (!header)\n throw new index_client_1.sdk.WERR_INTERNAL('no header should have returned false or thrown an error.');\n this.cache[height] = header.merkleRoot;\n if (header.merkleRoot !== root) {\n return false;\n }\n return true;\n }\n}\nexports.ChaintracksChainTracker = ChaintracksChainTracker;\n//# sourceMappingURL=ChaintracksChainTracker.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/ChaintracksChainTracker.js?\n}"); /***/ }), @@ -3106,94 +3161,6 @@ /***/ }), -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkIngestorApi.js": -/*!******************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkIngestorApi.js ***! - \******************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=BulkIngestorApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkIngestorApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkStorageApi.js": -/*!*****************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkStorageApi.js ***! - \*****************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=BulkStorageApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkStorageApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksApi.js": -/*!*****************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksApi.js ***! - \*****************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=ChaintracksApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFetchApi.js": -/*!**********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFetchApi.js ***! - \**********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=ChaintracksFetchApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFetchApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFsApi.js": -/*!*******************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFsApi.js ***! - \*******************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=ChaintracksFsApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFsApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksStorageApi.js": -/*!************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksStorageApi.js ***! - \************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=ChaintracksStorageApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksStorageApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/LiveIngestorApi.js": -/*!******************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/LiveIngestorApi.js ***! - \******************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n//# sourceMappingURL=LiveIngestorApi.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/LiveIngestorApi.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Chaintracks.js": -/*!**********************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Chaintracks.js ***! - \**********************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Chaintracks = void 0;\nconst dirtyHashes_1 = __webpack_require__(/*! ./util/dirtyHashes */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/dirtyHashes.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ./util/blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst HeightRange_1 = __webpack_require__(/*! ./util/HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nconst SingleWriterMultiReaderLock_1 = __webpack_require__(/*! ./util/SingleWriterMultiReaderLock */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/SingleWriterMultiReaderLock.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nclass Chaintracks {\n static createOptions(chain) {\n return {\n chain,\n storage: undefined,\n bulkIngestors: [],\n liveIngestors: [],\n addLiveRecursionLimit: 36,\n logging: 'all',\n readonly: false\n };\n }\n constructor(options) {\n this.options = options;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.log = () => { };\n // Collection of all long running \"threads\": main thread (liveHeaders consumer / monitor) and each live header ingestor.\n this.promises = [];\n this.callbacks = { header: [], reorg: [] };\n this.baseHeaders = [];\n this.liveHeaders = [];\n this.addLiveRecursionLimit = 11;\n this.available = false;\n this.subscriberCallbacksEnabled = false;\n this.stopMainThread = true;\n this.lastPresentHeight = 0;\n this.lastPresentHeightMsecs = 0;\n this.lastPresentHeightMaxAge = 60 * 1000; // 1 minute, in milliseconds\n this.lock = new SingleWriterMultiReaderLock_1.SingleWriterMultiReaderLock();\n if (!options.storage)\n throw new Error('storage is required.');\n if (!options.bulkIngestors || options.bulkIngestors.length < 1)\n throw new Error('At least one bulk ingestor is required.');\n if (!options.liveIngestors || options.liveIngestors.length < 1)\n throw new Error('At least one live ingestor is required.');\n this.chain = options.chain;\n this.readonly = options.readonly;\n this.storage = options.storage;\n this.bulkIngestors = options.bulkIngestors;\n this.liveIngestors = options.liveIngestors;\n this.addLiveRecursionLimit = options.addLiveRecursionLimit;\n if (options.logging === 'all')\n this.log = (...args) => console.log(new Date().toISOString(), ...args);\n this.log(`New ChaintracksBase Instance Constructed ${options.chain}Net`);\n }\n async getChain() {\n return this.chain;\n }\n /**\n * Caches and returns most recently sourced value if less than one minute old.\n * @returns the current externally available chain height (via bulk ingestors).\n */\n async getPresentHeight() {\n const now = Date.now();\n if (this.lastPresentHeight && now - this.lastPresentHeightMsecs < this.lastPresentHeightMaxAge) {\n return this.lastPresentHeight;\n }\n const presentHeights = [];\n for (const bulk of this.bulkIngestors) {\n try {\n const presentHeight = await bulk.getPresentHeight();\n if (presentHeight)\n presentHeights.push(presentHeight);\n }\n catch (uerr) {\n console.log(uerr);\n }\n }\n const presentHeight = presentHeights.length ? Math.max(...presentHeights) : undefined;\n if (!presentHeight)\n throw new Error('At least one bulk ingestor must implement getPresentHeight.');\n this.lastPresentHeight = presentHeight;\n this.lastPresentHeightMsecs = now;\n return presentHeight;\n }\n async currentHeight() {\n return await this.getPresentHeight();\n }\n async subscribeHeaders(listener) {\n const ID = (0, utilityHelpers_1.randomBytesBase64)(8);\n this.callbacks.header[ID] = listener;\n return ID;\n }\n async subscribeReorgs(listener) {\n const ID = (0, utilityHelpers_1.randomBytesBase64)(8);\n this.callbacks.reorg[ID] = listener;\n return ID;\n }\n async unsubscribe(subscriptionId) {\n let success = true;\n if (this.callbacks.header[subscriptionId])\n delete this.callbacks.header[subscriptionId];\n else if (this.callbacks.reorg[subscriptionId])\n delete this.callbacks.reorg[subscriptionId];\n else\n success = false;\n return success;\n }\n /**\n * Queues a potentially new, unknown header for consideration as an addition to the chain.\n * When the header is considered, if the prior header is unknown, recursive calls to the\n * bulk ingestors will be attempted to resolve the linkage up to a depth of `addLiveRecursionLimit`.\n *\n * Headers are considered in the order they were added.\n *\n * @param header\n */\n async addHeader(header) {\n this.baseHeaders.push(header);\n }\n /**\n * If not already available, takes a writer lock to queue calls until available.\n * Becoming available starts by initializing ingestors and main thread,\n * and ends when main thread sets `available`.\n * Note that the main thread continues running and takes additional write locks\n * itself when already available.\n *\n * @returns when available for client requests\n */\n async makeAvailable() {\n if (this.available)\n return;\n await this.lock.withWriteLock(async () => {\n // Only the first call proceeds to initialize...\n if (this.available)\n return;\n // Make sure database schema exists and is updated...\n await this.storage.migrateLatest();\n for (const bulkIn of this.bulkIngestors)\n await bulkIn.setStorage(this.storage);\n for (const liveIn of this.liveIngestors)\n await liveIn.setStorage(this.storage);\n // Start all live ingestors to push new headers onto liveHeaders... each long running.\n for (const liveIngestor of this.liveIngestors)\n this.promises.push(liveIngestor.startListening(this.liveHeaders));\n // Start mai loop to shift out liveHeaders...once sync'd, will set `available` true.\n this.promises.push(this.mainThreadShiftLiveHeaders());\n // Wait for the main thread to finish initial sync.\n while (!this.available) {\n await (0, utilityHelpers_1.wait)(100);\n }\n });\n }\n async startPromises() {\n if (this.promises.length > 0 || this.stopMainThread !== true)\n return;\n }\n async destroy() {\n if (!this.available)\n return;\n await this.lock.withWriteLock(async () => {\n if (!this.available || this.stopMainThread)\n return;\n this.log('Shutting Down');\n this.stopMainThread = true;\n for (const liveIn of this.liveIngestors)\n await liveIn.shutdown();\n for (const bulkIn of this.bulkIngestors)\n await bulkIn.shutdown();\n await Promise.all(this.promises);\n await this.storage.destroy();\n this.available = false;\n this.stopMainThread = false;\n this.log('Shutdown');\n });\n }\n async listening() {\n return this.makeAvailable();\n }\n async isListening() {\n return this.available;\n }\n async isSynchronized() {\n await this.makeAvailable();\n // TODO add synchronized flag... false while bulksyncing...\n return true;\n }\n async findHeaderForHeight(height) {\n await this.makeAvailable();\n return this.lock.withReadLock(async () => this.findHeaderForHeightNoLock(height));\n }\n async findHeaderForHeightNoLock(height) {\n return await this.storage.findHeaderForHeightOrUndefined(height);\n }\n async findHeaderForBlockHash(hash) {\n await this.makeAvailable();\n return this.lock.withReadLock(async () => this.findHeaderForBlockHashNoLock(hash));\n }\n async findHeaderForBlockHashNoLock(hash) {\n return (await this.storage.findLiveHeaderForBlockHash(hash)) || undefined;\n }\n async isValidRootForHeight(root, height) {\n const r = await this.findHeaderForHeight(height);\n if (!r)\n return false;\n const isValid = root === r.merkleRoot;\n return isValid;\n }\n async getInfo() {\n await this.makeAvailable();\n return this.lock.withReadLock(async () => this.getInfoNoLock());\n }\n async getInfoNoLock() {\n const liveRange = await this.storage.getLiveHeightRange();\n const info = {\n chain: this.chain,\n heightBulk: liveRange.minHeight - 1,\n heightLive: liveRange.maxHeight,\n storage: this.storage.constructor.name,\n bulkIngestors: this.bulkIngestors.map(bulkIngestor => bulkIngestor.constructor.name),\n liveIngestors: this.liveIngestors.map(liveIngestor => liveIngestor.constructor.name),\n packages: []\n };\n return info;\n }\n async getHeaders(height, count) {\n await this.makeAvailable();\n return this.lock.withReadLock(async () => (0, utilityHelpers_noBuffer_1.asString)(await this.storage.getHeaders(height, count)));\n }\n async findChainTipHeader() {\n await this.makeAvailable();\n return this.lock.withReadLock(async () => await this.storage.findChainTipHeader());\n }\n async findChainTipHash() {\n await this.makeAvailable();\n return this.lock.withReadLock(async () => await this.storage.findChainTipHash());\n }\n async findLiveHeaderForBlockHash(hash) {\n await this.makeAvailable();\n const header = await this.lock.withReadLock(async () => await this.storage.findLiveHeaderForBlockHash(hash));\n return header || undefined;\n }\n async findChainWorkForBlockHash(hash) {\n const header = await this.findLiveHeaderForBlockHash(hash);\n return header === null || header === void 0 ? void 0 : header.chainWork;\n }\n /**\n * @returns true iff all headers from height zero through current chainTipHeader height can be retreived and form a valid chain.\n */\n async validate() {\n let h = await this.findChainTipHeader();\n while (h.height > 0) {\n const hp = await this.findHeaderForHeight(h.height - 1);\n if (!hp || hp.hash !== h.previousHash)\n throw new Error(`validation fails at height ${h.height}`);\n h = hp;\n if (10000 * Math.floor(h.height / 10000) === h.height)\n this.log(`height ${h.height}`);\n }\n this.log('validated');\n return true;\n }\n async exportBulkHeaders(toFolder, toFs, sourceUrl, toHeadersPerFile, maxHeight) {\n toHeadersPerFile || (toHeadersPerFile = 100000);\n const bulk = this.storage.bulkManager;\n await bulk.exportHeadersToFs(toFs, toHeadersPerFile, toFolder, sourceUrl, maxHeight);\n }\n async startListening() {\n this.makeAvailable();\n }\n async syncBulkStorage(presentHeight, initialRanges) {\n await this.lock.withWriteLock(async () => await this.syncBulkStorageNoLock(presentHeight, initialRanges));\n }\n async syncBulkStorageNoLock(presentHeight, initialRanges) {\n let newLiveHeaders = [];\n let bulkDone = false;\n let before = initialRanges;\n let after = before;\n let added = HeightRange_1.HeightRange.empty;\n let done = false;\n for (; !done;) {\n for (const bulk of this.bulkIngestors) {\n try {\n const r = await bulk.synchronize(presentHeight, before, newLiveHeaders);\n newLiveHeaders = r.liveHeaders;\n after = await this.storage.getAvailableHeightRanges();\n added = after.bulk.above(before.bulk);\n before = after;\n this.log(`Bulk Ingestor: ${added.length} added with ${newLiveHeaders.length} live headers from ${bulk.constructor.name}`);\n if (r.done) {\n done = true;\n break;\n }\n }\n catch (uerr) {\n console.log(uerr);\n }\n }\n if (bulkDone)\n break;\n }\n this.liveHeaders.unshift(...newLiveHeaders);\n added = after.bulk.above(initialRanges.bulk);\n this.log(`syncBulkStorage done\n Before sync: bulk ${initialRanges.bulk}, live ${initialRanges.live}\n After sync: bulk ${after.bulk}, live ${after.live}\n ${added.length} headers added to bulk storage\n ${this.liveHeaders.length} headers forwarded to live header storage\n`);\n }\n async getMissingBlockHeader(hash) {\n for (const live of this.liveIngestors) {\n const header = await live.getHeaderByHash(hash);\n if (header)\n return header;\n }\n return undefined;\n }\n invalidInsertHeaderResult(ihr) {\n return ihr.noActiveAncestor || ihr.noTip || ihr.badPrev;\n }\n async addLiveHeader(header) {\n (0, blockHeaderUtilities_1.validateHeaderFormat)(header);\n (0, dirtyHashes_1.validateAgainstDirtyHashes)(header.hash);\n const ihr = this.available\n ? await this.lock.withWriteLock(async () => await this.storage.insertHeader(header))\n : await this.storage.insertHeader(header);\n if (this.invalidInsertHeaderResult(ihr))\n return ihr;\n if (this.subscriberCallbacksEnabled && ihr.added && ihr.isActiveTip) {\n // If a new active chaintip has been added, notify subscribed event listeners...\n for (const id in this.callbacks.header) {\n const addListener = this.callbacks.header[id];\n if (addListener) {\n try {\n addListener(header);\n }\n catch (_a) {\n /* ignore all errors thrown */\n }\n }\n }\n if (ihr.reorgDepth > 0 && ihr.priorTip) {\n // If the new header was also a reorg, notify subscribed event listeners...\n for (const id in this.callbacks.reorg) {\n const reorgListener = this.callbacks.reorg[id];\n if (reorgListener) {\n try {\n reorgListener(ihr.reorgDepth, ihr.priorTip, header);\n }\n catch (_b) {\n /* ignore all errors thrown */\n }\n }\n }\n }\n }\n return ihr;\n }\n /**\n * Long running method terminated by setting `stopMainThread` false.\n *\n * The promise returned by this method is held in the `promises` array.\n *\n * When synchronized (bulk and live storage is valid up to most recent presentHeight),\n * this method will process headers from `baseHeaders` and `liveHeaders` arrays to extend the chain of headers.\n *\n * If a significant gap is detected between bulk+live and presentHeight, `syncBulkStorage` is called to re-establish sync.\n *\n * Periodically CDN bulk ingestor is invoked to check if incremental headers can be migrated to CDN backed files.\n */\n async mainThreadShiftLiveHeaders() {\n this.stopMainThread = false;\n let lastSyncCheck = Date.now();\n let lastBulkSync = Date.now();\n const cdnSyncRepeatMsecs = 24 * 60 * 60 * 1000; // 24 hours\n const syncCheckRepeatMsecs = 30 * 60 * 1000; // 30 minutes\n while (!this.stopMainThread) {\n // Review the need for bulk sync...\n const now = Date.now();\n lastSyncCheck = now;\n const presentHeight = await this.getPresentHeight();\n const before = await this.storage.getAvailableHeightRanges();\n // Skip bulk sync if within less than half the recursion limit of present height\n let skipBulkSync = !before.live.isEmpty && before.live.maxHeight >= presentHeight - this.addLiveRecursionLimit / 2;\n if (skipBulkSync && now - lastSyncCheck > cdnSyncRepeatMsecs) {\n // If we haven't re-synced in a long time, do it just to check for a CDN update.\n skipBulkSync = false;\n }\n this.log(`Chaintracks Update Services: Bulk Header Sync Review\n presentHeight=${presentHeight} addLiveRecursionLimit=${this.addLiveRecursionLimit}\n Before synchronize: bulk ${before.bulk}, live ${before.live}\n ${skipBulkSync ? 'Skipping' : 'Starting'} syncBulkStorage.\n`);\n if (!skipBulkSync) {\n // Bring bulk storage up-to-date and (re-)initialize liveHeaders\n lastBulkSync = now;\n if (this.available)\n // Once available, initial write lock is released, take a new one to update bulk storage.\n await this.syncBulkStorage(presentHeight, before);\n else\n // While still not available, the makeAvailable write lock is held.\n await this.syncBulkStorageNoLock(presentHeight, before);\n }\n let count = 0;\n let liveHeaderDupes = 0;\n let needSyncCheck = false;\n for (; !needSyncCheck && !this.stopMainThread;) {\n let header = this.liveHeaders.shift();\n if (header) {\n // Process a \"live\" block header...\n let recursions = this.addLiveRecursionLimit;\n for (; !needSyncCheck && !this.stopMainThread;) {\n const ihr = await this.addLiveHeader(header);\n if (this.invalidInsertHeaderResult(ihr)) {\n this.log(`Ignoring liveHeader ${header.height} ${header.hash} due to invalid insert result.`);\n needSyncCheck = true;\n }\n else if (ihr.noPrev) {\n // Previous header is unknown, request it by hash from the network and try adding it first...\n if (recursions-- <= 0) {\n // Ignore this header...\n this.log(`Ignoring liveHeader ${header.height} ${header.hash} addLiveRecursionLimit=${this.addLiveRecursionLimit} exceeded.`);\n needSyncCheck = true;\n }\n else {\n const hash = header.previousHash;\n const prevHeader = await this.getMissingBlockHeader(hash);\n if (!prevHeader) {\n this.log(`Ignoring liveHeader ${header.height} ${header.hash} failed to find previous header by hash ${(0, utilityHelpers_noBuffer_1.asString)(hash)}`);\n needSyncCheck = true;\n }\n else {\n // Switch to trying to add prevHeader, unshifting current header to try it again after prevHeader exists.\n this.liveHeaders.unshift(header);\n header = prevHeader;\n }\n }\n }\n else {\n if (this.subscriberCallbacksEnabled)\n this.log(`addLiveHeader ${header.height}${ihr.added ? ' added' : ''}${ihr.dupe ? ' dupe' : ''}${ihr.isActiveTip ? ' isActiveTip' : ''}${ihr.reorgDepth ? ' reorg depth ' + ihr.reorgDepth : ''}${ihr.noPrev ? ' noPrev' : ''}${ihr.noActiveAncestor || ihr.noTip || ihr.badPrev ? ' error' : ''}`);\n if (ihr.dupe) {\n liveHeaderDupes++;\n }\n // Header wasn't invalid and previous header is known. If it was successfully added, count it as a win.\n if (ihr.added) {\n count++;\n }\n break;\n }\n }\n }\n else {\n // There are no liveHeaders currently to process, check the out-of-band baseHeaders channel (`addHeader` method called by a client).\n const bheader = this.baseHeaders.shift();\n if (bheader) {\n const prev = await this.storage.findLiveHeaderForBlockHash(bheader.previousHash);\n if (!prev) {\n // Ignoring attempt to add a baseHeader with unknown previous hash, no attempt made to find previous header(s).\n this.log(`Ignoring header with unknown previousHash ${bheader.previousHash} in live storage.`);\n // Does not trigger a re-sync.\n }\n else {\n const header = {\n ...bheader,\n height: prev.height + 1,\n hash: (0, blockHeaderUtilities_1.blockHash)(bheader)\n };\n const ihr = await this.addLiveHeader(header);\n if (this.invalidInsertHeaderResult(ihr)) {\n this.log(`Ignoring invalid baseHeader ${header.height} ${header.hash}.`);\n }\n else {\n if (this.subscriberCallbacksEnabled)\n this.log(`addBaseHeader ${header.height}${ihr.added ? ' added' : ''}${ihr.dupe ? ' dupe' : ''}${ihr.isActiveTip ? ' isActiveTip' : ''}${ihr.reorgDepth ? ' reorg depth ' + ihr.reorgDepth : ''}${ihr.noPrev ? ' noPrev' : ''}${ihr.noActiveAncestor || ihr.noTip || ihr.badPrev ? ' error' : ''}`);\n // baseHeader was successfully added.\n if (ihr.added) {\n count++;\n }\n }\n }\n }\n else {\n // There are no liveHeaders and no baseHeaders to add,\n if (count > 0) {\n if (liveHeaderDupes > 0) {\n this.log(`${liveHeaderDupes} duplicate headers ignored.`);\n liveHeaderDupes = 0;\n }\n const updated = await this.storage.getAvailableHeightRanges();\n this.log(`${count} live headers added: bulk ${updated.bulk}, live ${updated.live}`);\n count = 0;\n }\n if (!this.subscriberCallbacksEnabled) {\n const live = await this.storage.getLiveHeightRange();\n if (!live.isEmpty) {\n this.subscriberCallbacksEnabled = true;\n this.log(`listening at height of ${live.maxHeight}`);\n }\n }\n if (!this.available) {\n this.available = true;\n }\n needSyncCheck = Date.now() - lastSyncCheck > syncCheckRepeatMsecs;\n // If we aren't going to review sync, wait before checking input queues again\n if (!needSyncCheck)\n await (0, utilityHelpers_1.wait)(1000);\n }\n }\n }\n }\n }\n}\nexports.Chaintracks = Chaintracks;\n//# sourceMappingURL=Chaintracks.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Chaintracks.js?\n}"); - -/***/ }), - /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js": /*!***********************************************************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js ***! @@ -3201,205 +3168,18 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksServiceClient = void 0;\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\n/**\n * Connects to a ChaintracksService to implement 'ChaintracksClientApi'\n *\n */\nclass ChaintracksServiceClient {\n static createChaintracksServiceClientOptions() {\n const options = {\n useAuthrite: false\n };\n return options;\n }\n constructor(chain, serviceUrl, options) {\n this.chain = chain;\n this.serviceUrl = serviceUrl;\n this.options = options || ChaintracksServiceClient.createChaintracksServiceClientOptions();\n }\n subscribeHeaders(listener) {\n throw new Error('Method not implemented.');\n }\n subscribeReorgs(listener) {\n throw new Error('Method not implemented.');\n }\n unsubscribe(subscriptionId) {\n throw new Error('Method not implemented.');\n }\n async currentHeight() {\n return await this.getPresentHeight();\n }\n async isValidRootForHeight(root, height) {\n const r = await this.findHeaderForHeight(height);\n if (!r)\n return false;\n const isValid = root === (0, utilityHelpers_noBuffer_1.asString)(r.merkleRoot);\n return isValid;\n }\n async getJsonOrUndefined(path) {\n let e = undefined;\n for (let retry = 0; retry < 3; retry++) {\n try {\n const r = await fetch(`${this.serviceUrl}${path}`);\n const v = await r.json();\n if (v.status === 'success')\n return v.value;\n else\n e = new Error(JSON.stringify(v));\n }\n catch (eu) {\n e = eu;\n }\n if (e && e.name !== 'ECONNRESET')\n break;\n }\n if (e)\n throw e;\n }\n async getJson(path) {\n const r = await this.getJsonOrUndefined(path);\n if (r === undefined)\n throw new Error('Value was undefined. Requested object may not exist.');\n return r;\n }\n async postJsonVoid(path, params) {\n const headers = {};\n headers['Content-Type'] = 'application/json';\n const r = await fetch(`${this.serviceUrl}${path}`, {\n body: JSON.stringify(params),\n method: 'POST',\n headers\n //cache: 'no-cache',\n });\n try {\n const s = await r.json();\n if (s.status === 'success')\n return;\n throw new Error(JSON.stringify(s));\n }\n catch (e) {\n console.log(`Exception: ${JSON.stringify(e)}`);\n throw new Error(JSON.stringify(e));\n }\n }\n //\n // HTTP API FUNCTIONS\n //\n async addHeader(header) {\n const r = await this.postJsonVoid('/addHeaderHex', header);\n if (typeof r === 'string')\n throw new Error(r);\n }\n async startListening() {\n await this.getPresentHeight();\n }\n async listening() {\n await this.getPresentHeight();\n }\n async getChain() {\n return this.chain;\n //return await this.getJson('/getChain')\n }\n async isListening() {\n try {\n await this.getPresentHeight();\n return true;\n }\n catch (_a) {\n return false;\n }\n }\n async isSynchronized() {\n return await this.isListening();\n }\n async getPresentHeight() {\n return await this.getJson('/getPresentHeight');\n }\n async getInfo() {\n return await this.getJson('/getInfo');\n }\n async findChainTipHeader() {\n return await this.getJson('/findChainTipHeaderHex');\n }\n async findChainTipHash() {\n return await this.getJson('/findChainTipHashHex');\n }\n async getHeaders(height, count) {\n return await this.getJson(`/getHeaders?height=${height}&count=${count}`);\n }\n async findHeaderForHeight(height) {\n return await this.getJsonOrUndefined(`/findHeaderHexForHeight?height=${height}`);\n }\n async findHeaderForBlockHash(hash) {\n return await this.getJsonOrUndefined(`/findHeaderHexForBlockHash?hash=${hash}`);\n }\n}\nexports.ChaintracksServiceClient = ChaintracksServiceClient;\n//# sourceMappingURL=ChaintracksServiceClient.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.js": -/*!**********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.js ***! - \**********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkIngestorBase = void 0;\nconst HeightRange_1 = __webpack_require__(/*! ../util/HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nclass BulkIngestorBase {\n /**\n *\n * @param chain\n * @param localCachePath defaults to './data/ingest_headers/'\n * @returns\n */\n static createBulkIngestorBaseOptions(chain) {\n const options = {\n chain,\n jsonResource: `${chain}NetBlockHeaders.json`,\n bypassLiveEnabled: true\n };\n return options;\n }\n constructor(options) {\n if (!options.jsonResource)\n throw new Error('The jsonFilename options property is required.');\n this.chain = options.chain;\n this.jsonFilename = options.jsonResource;\n this.bypassLiveEnabled = options.bypassLiveEnabled;\n }\n async setStorage(storage) {\n this.storageEngine = storage;\n }\n async shutdown() { }\n storageOrUndefined() {\n return this.storageEngine;\n }\n storage() {\n if (!this.storageEngine)\n throw new Error('storageEngine must be set.');\n return this.storageEngine;\n }\n /**\n * At least one derived BulkIngestor must override this method to provide the current height of the active chain tip.\n * @returns undefined unless overridden\n */\n async getPresentHeight() {\n return undefined;\n }\n /**\n * A BulkIngestor has two potential goals:\n * 1. To source missing bulk headers and include them in bulk storage.\n * 2. To source missing live headers to be forwarded to live storage.\n *\n * @param presentHeight current height of the active chain tip, may lag the true value.\n * @param before current bulk and live storage height ranges, either may be empty.\n * @param priorLiveHeaders any headers accumulated by prior bulk ingestor(s) that are too recent for bulk storage.\n * @returns updated priorLiveHeaders including any accumulated by this ingestor\n */\n async synchronize(presentHeight, before, priorLiveHeaders) {\n const storage = this.storage();\n const r = {\n liveHeaders: priorLiveHeaders,\n liveRange: HeightRange_1.HeightRange.from(priorLiveHeaders),\n done: false,\n log: ''\n };\n // Decisions to be made:\n // Q1. Are we already done?\n // Q2. Are there live headers that should be migrated to bulk?\n // Q3. What range of headers do we still need to retrieve?\n // Q1: We are done if we have enough live headers and they include presentHeight.\n const currentFullRange = before.bulk.union(before.live);\n if (currentFullRange.maxHeight >= presentHeight) {\n r.done = true;\n return r;\n }\n const targetBulkRange = new HeightRange_1.HeightRange(0, Math.max(0, presentHeight - storage.liveHeightThreshold));\n let missingBulkRange = targetBulkRange.subtract(before.bulk);\n const updateMissingBulkRange = async () => {\n before = await storage.getAvailableHeightRanges();\n missingBulkRange = targetBulkRange.subtract(before.bulk);\n };\n // Q2: If missingBulkRange isn't empty and there are live headers in storage,\n // migrate from existing live headers in excess of reorgHeightThreshold.\n if (!missingBulkRange.isEmpty && !before.live.isEmpty) {\n const countToMigrate = Math.min(missingBulkRange.length, Math.max(0, before.live.length - storage.reorgHeightThreshold));\n r.log += `Migrating ${countToMigrate} live headers to bulk storage.\\n`;\n await storage.migrateLiveToBulk(countToMigrate);\n await updateMissingBulkRange();\n if (!missingBulkRange.isEmpty) {\n // If there are still missing bulk headers, MUST flush live storage.\n const countToFlush = before.live.length;\n r.log += `Flushing ${countToFlush} live headers from live storage.\\n`;\n await storage.deleteLiveBlockHeaders();\n await updateMissingBulkRange();\n }\n }\n const targetFullRange = new HeightRange_1.HeightRange(0, presentHeight);\n // Q3: What to fetch...\n let rangeToFetch;\n if (missingBulkRange.isEmpty) {\n // If there are no missing bulk headers, we don't need existing bulk range.\n rangeToFetch = targetFullRange.subtract(before.bulk);\n // And if there are live headers in excess of reorgHeightThreshold, they can be skipped as well.\n if (before.live.length > storage.reorgHeightThreshold) {\n rangeToFetch = rangeToFetch.subtract(new HeightRange_1.HeightRange(before.live.minHeight, before.live.maxHeight - storage.reorgHeightThreshold));\n }\n }\n else {\n // If there are missing bulk headers, ingest from start of missing through present height.\n rangeToFetch = new HeightRange_1.HeightRange(missingBulkRange.minHeight, presentHeight);\n }\n const newLiveHeaders = await this.fetchHeaders(before, rangeToFetch, missingBulkRange, priorLiveHeaders);\n await updateMissingBulkRange();\n r.liveHeaders = newLiveHeaders;\n r.liveRange = HeightRange_1.HeightRange.from(r.liveHeaders);\n r.done = missingBulkRange.isEmpty && r.liveRange.maxHeight >= presentHeight;\n return r;\n }\n}\nexports.BulkIngestorBase = BulkIngestorBase;\n//# sourceMappingURL=BulkIngestorBase.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js": -/*!*********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js ***! - \*********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkIngestorCDN = void 0;\nconst BulkIngestorBase_1 = __webpack_require__(/*! ./BulkIngestorBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.js\");\nconst sdk_1 = __webpack_require__(/*! ../../../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst BulkFileDataManager_1 = __webpack_require__(/*! ../util/BulkFileDataManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js\");\nclass BulkIngestorCDN extends BulkIngestorBase_1.BulkIngestorBase {\n /**\n *\n * @param chain\n * @param localCachePath defaults to './data/bulk_cdn_headers/'\n * @returns\n */\n static createBulkIngestorCDNOptions(chain, cdnUrl, fetch, maxPerFile) {\n const options = {\n ...BulkIngestorBase_1.BulkIngestorBase.createBulkIngestorBaseOptions(chain),\n fetch,\n jsonResource: `${chain}NetBlockHeaders.json`,\n cdnUrl,\n maxPerFile\n };\n return options;\n }\n constructor(options) {\n super(options);\n if (!options.jsonResource)\n throw new Error('The jsonResource options property is required.');\n if (!options.cdnUrl)\n throw new Error('The cdnUrl options property is required.');\n this.fetch = options.fetch;\n this.jsonResource = options.jsonResource;\n this.cdnUrl = options.cdnUrl;\n this.maxPerFile = options.maxPerFile;\n }\n async getPresentHeight() {\n return undefined;\n }\n getJsonHttpHeaders() {\n const headers = {\n Accept: 'application/json'\n };\n return headers;\n }\n /**\n * A BulkFile CDN serves a JSON BulkHeaderFilesInfo resource which lists all the available binary bulk header files available and associated metadata.\n *\n * The term \"CDN file\" is used for a local bulk file that has a sourceUrl. (Not undefined)\n * The term \"incremental file\" is used for the local bulk file that holds all the non-CDN bulk headers and must chain to the live headers if there are any.\n *\n * Bulk ingesting from a CDN happens in one of three contexts:\n *\n * 1. Cold Start: No local bulk or live headers.\n * 2. Incremental: Available CDN files extend into an existing incremental file but not into the live headers.\n * 3. Replace: Available CDN files extend into live headers.\n *\n * Context Cold Start:\n * - The CDN files are selected in height order, starting at zero, always choosing the largest count less than the local maximum (maxPerFile).\n *\n * Context Incremental:\n * - Last existing CDN file is updated if CDN now has a higher count.\n * - Additional CDN files are added as in Cold Start.\n * - The existing incremental file is truncated or deleted.\n *\n * Context Replace:\n * - Existing live headers are truncated or deleted.\n * - Proceed as context Incremental.\n *\n * @param before bulk and live range of headers before ingesting any new headers.\n * @param fetchRange total range of header heights needed including live headers\n * @param bulkRange range of missing bulk header heights required.\n * @param priorLiveHeaders\n * @returns\n */\n async fetchHeaders(before, fetchRange, bulkRange, priorLiveHeaders) {\n const storage = this.storage();\n const toUrl = (file) => this.fetch.pathJoin(this.cdnUrl, file);\n const url = toUrl(this.jsonResource);\n this.availableBulkFiles = await this.fetch.fetchJson(url);\n if (!this.availableBulkFiles) {\n throw new sdk_1.WERR_INVALID_PARAMETER(`${this.jsonResource}`, `a valid BulkHeaderFilesInfo JSON resource available from ${url}`);\n }\n this.selectedFiles = (0, BulkFileDataManager_1.selectBulkHeaderFiles)(this.availableBulkFiles.files, this.chain, this.maxPerFile || this.availableBulkFiles.headersPerFile);\n for (const bf of this.selectedFiles) {\n if (!bf.fileHash) {\n throw new sdk_1.WERR_INVALID_PARAMETER(`fileHash`, `valid for alll files in ${this.jsonResource} from ${url}`);\n }\n if (!bf.chain || bf.chain !== this.chain) {\n throw new sdk_1.WERR_INVALID_PARAMETER(`chain`, `\"${this.chain}\" for all files in ${this.jsonResource} from ${url}`);\n }\n if (!bf.sourceUrl || bf.sourceUrl !== this.cdnUrl)\n bf.sourceUrl = this.cdnUrl;\n }\n let log = 'BulkIngestorCDN fetchHeaders log:\\n';\n log += ` url: ${url}\\n`;\n this.currentRange = await storage.bulkManager.getHeightRange();\n log += ` bulk range before: ${this.currentRange}\\n`;\n const r = await storage.bulkManager.merge(this.selectedFiles);\n this.currentRange = await storage.bulkManager.getHeightRange();\n log += ` bulk range after: ${this.currentRange}\\n`;\n for (const u of r.unchanged) {\n log += ` unchanged: ${u.fileName}, fileId=${u.fileId}\\n`;\n }\n for (const i of r.inserted) {\n log += ` inserted: ${i.fileName}, fileId=${i.fileId}\\n`;\n }\n for (const u of r.updated) {\n log += ` updated: ${u.fileName}, fileId=${u.fileId}\\n`;\n }\n console.log(log);\n return priorLiveHeaders;\n }\n}\nexports.BulkIngestorCDN = BulkIngestorCDN;\n//# sourceMappingURL=BulkIngestorCDN.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDNBabbage.js": -/*!****************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDNBabbage.js ***! - \****************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkIngestorCDNBabbage = void 0;\nconst BulkIngestorCDN_1 = __webpack_require__(/*! ./BulkIngestorCDN */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js\");\nclass BulkIngestorCDNBabbage extends BulkIngestorCDN_1.BulkIngestorCDN {\n /**\n *\n * @param chain\n * @param rootFolder defaults to './data/bulk_cdn_babbage_headers/'\n * @returns\n */\n static createBulkIngestorCDNBabbageOptions(chain, fetch) {\n const options = {\n ...BulkIngestorCDN_1.BulkIngestorCDN.createBulkIngestorCDNOptions(chain, 'https://cdn.projectbabbage.com/blockheaders/', fetch)\n };\n return options;\n }\n}\nexports.BulkIngestorCDNBabbage = BulkIngestorCDNBabbage;\n//# sourceMappingURL=BulkIngestorCDNBabbage.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDNBabbage.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainCdn.js": -/*!*********************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainCdn.js ***! - \*********************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkIngestorWhatsOnChainCdn = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst BulkIngestorBase_1 = __webpack_require__(/*! ./BulkIngestorBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ../util/blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nconst ChaintracksFetch_1 = __webpack_require__(/*! ../util/ChaintracksFetch */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js\");\nconst WhatsOnChainServices_1 = __webpack_require__(/*! ./WhatsOnChainServices */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.js\");\nclass BulkIngestorWhatsOnChainCdn extends BulkIngestorBase_1.BulkIngestorBase {\n /**\n *\n * @param chain\n * @param localCachePath defaults to './data/ingest_whatsonchain_headers'\n * @returns\n */\n static createBulkIngestorWhatsOnChainOptions(chain) {\n const options = {\n ...WhatsOnChainServices_1.WhatsOnChainServices.createWhatsOnChainServicesOptions(chain),\n ...BulkIngestorBase_1.BulkIngestorBase.createBulkIngestorBaseOptions(chain),\n idleWait: 5000\n };\n return options;\n }\n constructor(options) {\n super(options);\n this.stopOldListenersToken = { stop: undefined };\n this.idleWait = options.idleWait || 5000;\n this.woc = new WhatsOnChainServices_1.WhatsOnChainServices(options);\n this.fetch = options.fetch || new ChaintracksFetch_1.ChaintracksFetch();\n }\n async getPresentHeight() {\n const presentHeight = await this.woc.getChainTipHeight();\n (0, utilityHelpers_1.logger)(`presentHeight=${presentHeight}`);\n return presentHeight;\n }\n async fetchHeaders(before, fetchRange, bulkRange, priorLiveHeaders) {\n const oldHeaders = [];\n try {\n const ranges = await this.woc.getHeaderByteFileLinks(fetchRange, this.fetch);\n const headers = [];\n for (const range of ranges) {\n for (let height = range.range.minHeight; height <= range.range.maxHeight; height++) {\n if (fetchRange.contains(height)) {\n if (!range.data)\n range.data = await this.fetch.download(this.fetch.pathJoin(range.sourceUrl, range.fileName));\n const h = (0, blockHeaderUtilities_1.deserializeBlockHeader)(range.data, (height - range.range.minHeight) * 80, height);\n oldHeaders.push(h);\n }\n }\n }\n }\n catch (e) {\n (0, utilityHelpers_1.logger)(`Errors during WhatsOnChain ingestion:\\n${e}`);\n }\n const liveHeaders = await this.storage().addBulkHeaders(oldHeaders, bulkRange, priorLiveHeaders);\n return liveHeaders;\n }\n}\nexports.BulkIngestorWhatsOnChainCdn = BulkIngestorWhatsOnChainCdn;\n//# sourceMappingURL=BulkIngestorWhatsOnChainCdn.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainCdn.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorBase.js": -/*!**********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorBase.js ***! - \**********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.LiveIngestorBase = void 0;\n/**\n *\n */\nclass LiveIngestorBase {\n static createLiveIngestorBaseOptions(chain) {\n const options = {\n chain\n };\n return options;\n }\n constructor(options) {\n this.chain = options.chain;\n }\n /**\n * Release resources.\n * Override if required.\n */\n async shutdown() { }\n /**\n * Allocate resources.\n * @param storage coordinating storage engine.\n */\n async setStorage(storage) {\n this.storageEngine = storage;\n }\n /**\n *\n * @returns coordinating storage engine.\n */\n storage() {\n if (!this.storageEngine)\n throw new Error('storageEngine must be set.');\n return this.storageEngine;\n }\n}\nexports.LiveIngestorBase = LiveIngestorBase;\n//# sourceMappingURL=LiveIngestorBase.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorBase.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainPoll.js": -/*!**********************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainPoll.js ***! - \**********************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.LiveIngestorWhatsOnChainPoll = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst LiveIngestorBase_1 = __webpack_require__(/*! ./LiveIngestorBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorBase.js\");\nconst WhatsOnChainServices_1 = __webpack_require__(/*! ./WhatsOnChainServices */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.js\");\n/**\n * Reports new headers by polling periodically.\n */\nclass LiveIngestorWhatsOnChainPoll extends LiveIngestorBase_1.LiveIngestorBase {\n static createLiveIngestorWhatsOnChainOptions(chain) {\n const options = {\n ...WhatsOnChainServices_1.WhatsOnChainServices.createWhatsOnChainServicesOptions(chain),\n ...LiveIngestorBase_1.LiveIngestorBase.createLiveIngestorBaseOptions(chain),\n idleWait: 100000\n };\n return options;\n }\n constructor(options) {\n super(options);\n this.done = false;\n this.idleWait = options.idleWait || 100000;\n this.woc = new WhatsOnChainServices_1.WhatsOnChainServices(options);\n }\n async getHeaderByHash(hash) {\n const header = await this.woc.getHeaderByHash(hash);\n return header;\n }\n async startListening(liveHeaders) {\n this.done = false;\n let lastHeaders = [];\n for (; !this.done;) {\n const headers = await this.woc.getHeaders();\n const newHeaders = headers.filter(h => !lastHeaders.some(lh => lh.hash === h.hash));\n for (const h of newHeaders) {\n const bh = (0, WhatsOnChainServices_1.wocGetHeadersHeaderToBlockHeader)(h);\n liveHeaders.unshift(bh);\n }\n lastHeaders = headers;\n for (let sec = 0; sec < 60 && !this.done; sec++) {\n // Only wait a second at a time so we notice `done` sooner...\n await (0, utilityHelpers_1.wait)(1000);\n }\n }\n console.log(`LiveIngestorWhatsOnChainPoll stopped`);\n }\n stopListening() {\n this.done = true;\n }\n async shutdown() {\n this.stopListening();\n }\n}\nexports.LiveIngestorWhatsOnChainPoll = LiveIngestorWhatsOnChainPoll;\n//# sourceMappingURL=LiveIngestorWhatsOnChainPoll.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainPoll.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.js": -/*!**************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.js ***! - \**************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.WhatsOnChainServices = void 0;\nexports.wocGetHeadersHeaderToBlockHeader = wocGetHeadersHeaderToBlockHeader;\nconst WhatsOnChain_1 = __webpack_require__(/*! ../../../providers/WhatsOnChain */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/WhatsOnChain.js\");\nconst ChaintracksFetch_1 = __webpack_require__(/*! ../util/ChaintracksFetch */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js\");\nconst HeightRange_1 = __webpack_require__(/*! ../util/HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nclass WhatsOnChainServices {\n static createWhatsOnChainServicesOptions(chain) {\n const options = {\n chain,\n apiKey: '',\n timeout: 30000,\n userAgent: 'BabbageWhatsOnChainServices',\n enableCache: true,\n chainInfoMsecs: 5000\n };\n return options;\n }\n constructor(options) {\n this.options = options;\n const config = {\n apiKey: this.options.apiKey,\n timeout: this.options.timeout,\n userAgent: this.options.userAgent,\n enableCache: this.options.enableCache\n };\n this.chain = options.chain;\n WhatsOnChainServices.chainInfoMsecs[this.chain] = options.chainInfoMsecs;\n this.woc = new WhatsOnChain_1.WhatsOnChain(this.chain, config);\n }\n async getHeaderByHash(hash) {\n const header = await this.woc.getBlockHeaderByHash(hash);\n return header;\n }\n async getChainInfo() {\n const now = new Date();\n let update = WhatsOnChainServices.chainInfo[this.chain] === undefined;\n if (!update && WhatsOnChainServices.chainInfoTime[this.chain] !== undefined) {\n const elapsed = now.getTime() - WhatsOnChainServices.chainInfoTime[this.chain].getTime();\n update = elapsed > WhatsOnChainServices.chainInfoMsecs[this.chain];\n }\n if (update) {\n WhatsOnChainServices.chainInfo[this.chain] = await this.woc.getChainInfo();\n WhatsOnChainServices.chainInfoTime[this.chain] = now;\n }\n if (!WhatsOnChainServices.chainInfo[this.chain])\n throw new Error('Unexpected failure to update chainInfo.');\n return WhatsOnChainServices.chainInfo[this.chain];\n }\n async getChainTipHeight() {\n return (await this.getChainInfo()).blocks;\n }\n async getChainTipHash() {\n return (await this.getChainInfo()).bestblockhash;\n }\n /**\n * @param fetch\n * @returns returns the last 10 block headers including height, size, chainwork...\n */\n async getHeaders(fetch) {\n fetch || (fetch = new ChaintracksFetch_1.ChaintracksFetch());\n const headers = await fetch.fetchJson(`https://api.whatsonchain.com/v1/bsv/${this.chain}/block/headers`);\n return headers;\n }\n async getHeaderByteFileLinks(neededRange, fetch) {\n fetch || (fetch = new ChaintracksFetch_1.ChaintracksFetch());\n const files = await fetch.fetchJson(`https://api.whatsonchain.com/v1/bsv/${this.chain}/block/headers/resources`);\n const r = [];\n let range = undefined;\n for (const link of files.files) {\n const parsed = parseFileLink(link);\n if (parsed === undefined)\n continue; // parse error, return empty result\n if (parsed.range === 'latest') {\n if (range === undefined)\n continue; // should not happen on valid input\n const fromHeight = range.maxHeight + 1;\n if (neededRange.maxHeight >= fromHeight) {\n // We need this range but don't know maxHeight\n const data = await fetch.download(link);\n range = new HeightRange_1.HeightRange(fromHeight, fromHeight + data.length / 80 - 1);\n if (!neededRange.intersect(range).isEmpty)\n r.push({ sourceUrl: parsed.sourceUrl, fileName: parsed.fileName, range, data });\n }\n }\n else {\n range = new HeightRange_1.HeightRange(parsed.range.fromHeight, parsed.range.toHeight);\n if (!neededRange.intersect(range).isEmpty)\n r.push({ sourceUrl: parsed.sourceUrl, fileName: parsed.fileName, range, data: undefined });\n }\n }\n return r;\n function parseFileLink(file) {\n const url = new URL(file);\n const parts = url.pathname.split('/');\n const fileName = parts.pop();\n if (!fileName)\n return undefined; // no file name, invalid link\n const sourceUrl = `${url.protocol}//${url.hostname}${parts.join('/')}`;\n const bits = fileName.split('_');\n if (bits.length === 1 && bits[0] === 'latest') {\n return { range: 'latest', sourceUrl, fileName };\n }\n if (bits.length === 3) {\n const fromHeight = parseInt(bits[0], 10);\n const toHeight = parseInt(bits[1], 10);\n if (Number.isInteger(fromHeight) && Number.isInteger(toHeight)) {\n return { range: { fromHeight, toHeight }, sourceUrl, fileName };\n }\n }\n return undefined;\n }\n }\n}\nexports.WhatsOnChainServices = WhatsOnChainServices;\nWhatsOnChainServices.chainInfo = [];\nWhatsOnChainServices.chainInfoTime = [];\nWhatsOnChainServices.chainInfoMsecs = [];\nfunction wocGetHeadersHeaderToBlockHeader(h) {\n const bits = typeof h.bits === 'string' ? parseInt(h.bits, 16) : h.bits;\n if (!h.previousblockhash) {\n h.previousblockhash = '0000000000000000000000000000000000000000000000000000000000000000'; // genesis\n }\n const bh = {\n height: h.height,\n hash: h.hash,\n version: h.version,\n previousHash: h.previousblockhash,\n merkleRoot: h.merkleroot,\n time: h.time,\n bits,\n nonce: h.nonce\n };\n return bh;\n}\n//# sourceMappingURL=WhatsOnChainServices.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/BulkStorageBase.js": -/*!**********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/BulkStorageBase.js ***! - \**********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkStorageBase = void 0;\nconst HeightRange_1 = __webpack_require__(/*! ../util/HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nclass BulkStorageBase {\n static createBulkStorageBaseOptions(chain, fs) {\n const options = {\n chain,\n fs\n };\n return options;\n }\n constructor(options) {\n this.chain = options.chain;\n this.fs = options.fs;\n }\n async shutdown() { }\n async findHeaderForHeight(height) {\n const header = await this.findHeaderForHeightOrUndefined(height);\n if (!header)\n throw new Error(`No header found for height ${height}`);\n return header;\n }\n async getHeightRange() {\n return new HeightRange_1.HeightRange(0, await this.getMaxHeight());\n }\n async setStorage(storage) { }\n async exportBulkHeaders(rootFolder, jsonFilename, maxPerFile) {\n const info = {\n rootFolder: rootFolder,\n jsonFilename: jsonFilename,\n files: [],\n headersPerFile: maxPerFile\n };\n const maxHeight = await this.getMaxHeight();\n const baseFilename = jsonFilename.slice(0, -5); // remove \".json\"\n let prevHash = '00'.repeat(32);\n let prevChainWork = '00'.repeat(32);\n for (let height = 0; height <= maxHeight; height += maxPerFile) {\n const count = Math.min(maxPerFile, maxHeight - height + 1);\n let file = {\n fileName: `${baseFilename}_${info.files.length}.headers`,\n firstHeight: height,\n prevHash: prevHash,\n prevChainWork: prevChainWork,\n count: count,\n lastHash: null,\n fileHash: null,\n lastChainWork: ''\n };\n const buffer = await this.headersToBuffer(height, count);\n await this.fs.writeFile(this.fs.pathJoin(rootFolder, file.fileName), buffer);\n /*\n file = await BulkFilesReader.validateHeaderFile(this.fs, rootFolder, file)\n if (!file.lastHash) throw new Error('Unexpected result.')\n prevHash = file.lastHash\n prevChainWork = file.lastChainWork\n info.files.push(file)\n */\n }\n const bytes = (0, utilityHelpers_noBuffer_1.asUint8Array)(JSON.stringify(info), 'utf8');\n await this.fs.writeFile(this.fs.pathJoin(rootFolder, jsonFilename), bytes);\n }\n}\nexports.BulkStorageBase = BulkStorageBase;\n//# sourceMappingURL=BulkStorageBase.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/BulkStorageBase.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageBase.js": -/*!*****************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageBase.js ***! - \*****************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksStorageBase = void 0;\nconst sdk_1 = __webpack_require__(/*! ../../../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ../util/blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nconst BulkFileDataManager_1 = __webpack_require__(/*! ../util/BulkFileDataManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js\");\n/**\n * Required interface methods of a Chaintracks Storage Engine implementation.\n */\nclass ChaintracksStorageBase {\n static createStorageBaseOptions(chain) {\n const options = {\n chain,\n liveHeightThreshold: 2000,\n reorgHeightThreshold: 400,\n bulkMigrationChunkSize: 500,\n batchInsertLimit: 400,\n bulkFileDataManager: undefined\n };\n return options;\n }\n constructor(options) {\n this.isAvailable = false;\n this.hasMigrated = false;\n this.nowMigratingLiveToBulk = false;\n this.chain = options.chain;\n this.liveHeightThreshold = options.liveHeightThreshold;\n this.reorgHeightThreshold = options.reorgHeightThreshold;\n this.bulkMigrationChunkSize = options.bulkMigrationChunkSize;\n this.batchInsertLimit = options.batchInsertLimit;\n this.bulkManager =\n options.bulkFileDataManager || new BulkFileDataManager_1.BulkFileDataManager(BulkFileDataManager_1.BulkFileDataManager.createDefaultOptions(this.chain));\n }\n async shutdown() {\n /* base class does notning */\n }\n async makeAvailable() {\n if (this.isAvailable)\n return;\n this.isAvailable = true;\n }\n async migrateLatest() {\n this.hasMigrated = true;\n }\n async dropAllData() {\n await this.bulkManager.deleteBulkFiles();\n await this.makeAvailable();\n }\n // BASE CLASS IMPLEMENTATIONS - MAY BE OVERRIDEN\n async deleteBulkBlockHeaders() {\n await this.bulkManager.deleteBulkFiles();\n }\n async getAvailableHeightRanges() {\n await this.makeAvailable();\n const bulk = await this.bulkManager.getHeightRange();\n const live = await this.getLiveHeightRange();\n if (bulk.isEmpty) {\n if (!live.isEmpty && live.minHeight !== 0)\n throw new Error('With empty bulk storage, live storage must start with genesis header.');\n }\n else {\n if (bulk.minHeight != 0)\n throw new Error(\"Bulk storage doesn't start with genesis header.\");\n if (!live.isEmpty && bulk.maxHeight + 1 !== live.minHeight)\n throw new Error('There is a gap or overlap between bulk and live header storage.');\n }\n return { bulk, live };\n }\n async pruneLiveBlockHeaders(activeTipHeight) {\n await this.makeAvailable();\n try {\n const minHeight = this.lastActiveMinHeight || (await this.findLiveHeightRange()).minHeight;\n let totalCount = activeTipHeight - minHeight + 1 - this.liveHeightThreshold;\n while (totalCount >= this.bulkMigrationChunkSize) {\n const count = Math.min(totalCount, this.bulkMigrationChunkSize);\n await this.migrateLiveToBulk(count);\n totalCount -= count;\n this.lastActiveMinHeight = undefined;\n }\n }\n catch (err) {\n console.log(err);\n throw err;\n }\n }\n async findChainTipHash() {\n await this.makeAvailable();\n const tip = await this.findChainTipHeader();\n return tip.hash;\n }\n async findChainTipWork() {\n await this.makeAvailable();\n const tip = await this.findChainTipHeader();\n return tip.chainWork;\n }\n async findChainWorkForBlockHash(hash) {\n await this.makeAvailable();\n const header = await this.findLiveHeaderForBlockHash(hash);\n if (header !== null)\n return header.chainWork;\n throw new Error(`Header with hash of ${hash} was not found in the live headers database.`);\n }\n async findBulkFilesHeaderForHeightOrUndefined(height) {\n await this.makeAvailable();\n return this.bulkManager.findHeaderForHeightOrUndefined(height);\n }\n async findHeaderForHeightOrUndefined(height) {\n await this.makeAvailable();\n if (isNaN(height) || height < 0 || Math.ceil(height) !== height)\n throw new sdk_1.WERR_INVALID_PARAMETER('height', `a non-negative integer (${height}).`);\n const liveHeader = await this.findLiveHeaderForHeight(height);\n if (liveHeader !== null)\n return liveHeader;\n const header = await this.findBulkFilesHeaderForHeightOrUndefined(height);\n return header;\n }\n async findHeaderForHeight(height) {\n await this.makeAvailable();\n const header = await this.findHeaderForHeightOrUndefined(height);\n if (header)\n return header;\n throw new Error(`Header with height of ${height} was not found.`);\n }\n async isMerkleRootActive(merkleRoot) {\n await this.makeAvailable();\n const header = await this.findLiveHeaderForMerkleRoot(merkleRoot);\n return header ? header.isActive : false;\n }\n async findCommonAncestor(header1, header2) {\n await this.makeAvailable();\n /*eslint no-constant-condition: [\"error\", { \"checkLoops\": false }]*/\n while (true) {\n if (header1.previousHeaderId === null || header2.previousHeaderId === null)\n throw new Error('Reached start of live database without resolving the reorg.');\n if (header1.previousHeaderId === header2.previousHeaderId)\n return await this.findLiveHeaderForHeaderId(header1.previousHeaderId);\n const backupHeader1 = header1.height >= header2.height;\n if (header2.height >= header1.height)\n header2 = await this.findLiveHeaderForHeaderId(header2.previousHeaderId);\n if (backupHeader1)\n header1 = await this.findLiveHeaderForHeaderId(header1.previousHeaderId);\n }\n }\n async findReorgDepth(header1, header2) {\n await this.makeAvailable();\n const ancestor = await this.findCommonAncestor(header1, header2);\n return Math.max(header1.height, header2.height) - ancestor.height;\n }\n async migrateLiveToBulk(count, ignoreLimits = false) {\n await this.makeAvailable();\n if (!ignoreLimits && count > this.bulkMigrationChunkSize)\n return;\n if (this.nowMigratingLiveToBulk) {\n console.log('Already migrating live to bulk.');\n return;\n }\n try {\n this.nowMigratingLiveToBulk = true;\n const headers = await this.liveHeadersForBulk(count);\n await this.addLiveHeadersToBulk(headers);\n await this.deleteOlderLiveBlockHeaders(headers.slice(-1)[0].height);\n }\n finally {\n this.nowMigratingLiveToBulk = false;\n }\n }\n async addBulkHeaders(headers, bulkRange, priorLiveHeaders) {\n await this.makeAvailable();\n if (!headers || headers.length === 0)\n return priorLiveHeaders;\n // Get the current extent of validated bulk and live block headers.\n const before = await this.getAvailableHeightRanges();\n const bulkFiles = this.bulkManager;\n // Review `headers`, applying the following rules:\n // 1. Height must be outside the current bulk HeightRange.\n // 2. Height must not exceed presentHeight - liveHeightThreshold. If presentHeight is unknown, use maximum height across all headers.\n // 3. Compute chainWork for each header.\n // 4. Verify chain of header hash and previousHash values. One header at each height. Retain chain with most chainWork.\n const minHeight = !bulkRange.isEmpty ? bulkRange.minHeight : before.bulk.isEmpty ? 0 : before.bulk.maxHeight + 1;\n const filteredHeaders = headers.concat(priorLiveHeaders || []).filter(h => h.height >= minHeight);\n const sortedHeaders = filteredHeaders.sort((a, b) => a.height - b.height);\n const liveHeaders = sortedHeaders.filter(h => bulkRange.isEmpty || !bulkRange.contains(h.height));\n if (liveHeaders.length === sortedHeaders.length) {\n // All headers are live, no bulk headers to add.\n return liveHeaders;\n }\n const chains = [];\n for (const h of sortedHeaders) {\n const dupe = chains.find(c => {\n const lh = c.headers[c.headers.length - 1];\n return lh.hash === h.hash;\n });\n if (dupe)\n continue;\n const chainWork = (0, blockHeaderUtilities_1.convertBitsToWork)(h.bits);\n let chain = chains.find(c => {\n const lh = c.headers[c.headers.length - 1];\n return lh.height + 1 === h.height && lh.hash === h.previousHash;\n });\n if (chain) {\n chain.headers.push(h);\n chain.chainWork = (0, blockHeaderUtilities_1.addWork)(chain.chainWork, chainWork);\n if (h.height <= bulkRange.maxHeight) {\n chain.bulkChainWork = chain.chainWork;\n }\n continue;\n }\n // Since headers are assumed to be sorted by height,\n // if this header doesn't extend an existing chain,\n // it may be a branch from the previous header.\n chain = chains.find(c => {\n const lh = c.headers[c.headers.length - 2];\n return lh.height + 1 === h.height && lh.hash === h.previousHash;\n });\n if (chain) {\n // This header competes with tip of `chain`.\n // Create a new chain with this header as the tip.\n const headers = chain.headers.slice(0, -1);\n headers.push(h);\n const otherHeaderChainWork = (0, blockHeaderUtilities_1.convertBitsToWork)(chain.headers[chain.headers.length - 1].bits);\n const newChainWork = (0, blockHeaderUtilities_1.addWork)((0, blockHeaderUtilities_1.subWork)(chain.chainWork, otherHeaderChainWork), chainWork);\n const newChain = {\n headers,\n chainWork: newChainWork,\n bulkChainWork: h.height <= bulkRange.maxHeight ? newChainWork : undefined\n };\n chains.push(newChain);\n continue;\n }\n // Starting a new chain\n chains.push({ headers: [h], chainWork, bulkChainWork: h.height <= bulkRange.maxHeight ? chainWork : undefined });\n }\n // Find the chain with the most chainWork.\n const bestChain = chains.reduce((best, c) => ((0, blockHeaderUtilities_1.isMoreWork)(c.chainWork, best.chainWork) ? c : best), chains[0]);\n const newBulkHeaders = bestChain.headers.slice(0, bulkRange.maxHeight - bestChain.headers[0].height + 1);\n await this.addBulkHeadersFromBestChain(newBulkHeaders, bestChain);\n return liveHeaders;\n }\n async addBulkHeadersFromBestChain(newBulkHeaders, bestChain) {\n if (!bestChain.bulkChainWork) {\n throw new sdk_1.WERR_INTERNAL(`bulkChainWork is not defined for the best chain with height ${bestChain.headers[0].height}`);\n }\n await this.bulkManager.mergeIncrementalBlockHeaders(newBulkHeaders, bestChain.bulkChainWork);\n }\n async addLiveHeadersToBulk(liveHeaders) {\n if (liveHeaders.length === 0)\n return;\n const lastChainWork = liveHeaders.slice(-1)[0].chainWork;\n await this.bulkManager.mergeIncrementalBlockHeaders(liveHeaders, lastChainWork);\n }\n}\nexports.ChaintracksStorageBase = ChaintracksStorageBase;\n//# sourceMappingURL=ChaintracksStorageBase.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageBase.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageNoDb.js": -/*!*****************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageNoDb.js ***! - \*****************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksStorageNoDb = void 0;\nconst ChaintracksStorageBase_1 = __webpack_require__(/*! ../Storage/ChaintracksStorageBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageBase.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ../util/blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nconst HeightRange_1 = __webpack_require__(/*! ../util/HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nconst BulkFilesReader_1 = __webpack_require__(/*! ../util/BulkFilesReader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFilesReader.js\");\nconst ChaintracksFetch_1 = __webpack_require__(/*! ../util/ChaintracksFetch */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nclass ChaintracksStorageNoDb extends ChaintracksStorageBase_1.ChaintracksStorageBase {\n constructor(options) {\n super(options);\n }\n async destroy() { }\n async getData() {\n if (this.chain === 'main') {\n return ChaintracksStorageNoDb.mainData;\n }\n else if (this.chain === 'test') {\n return ChaintracksStorageNoDb.testData;\n }\n else {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('chain', `either 'main' or 'test. '${this.chain}' is unsupported.`);\n }\n }\n async deleteLiveBlockHeaders() {\n const data = await this.getData();\n data.liveHeaders.clear();\n data.maxHeaderId = 0;\n data.tipHeaderId = 0;\n data.hashToHeaderId.clear();\n }\n async deleteOlderLiveBlockHeaders(maxHeight) {\n const data = await this.getData();\n let deletedCount = 0;\n // Clear previousHeaderId references\n for (const [headerId, header] of data.liveHeaders) {\n if (header.previousHeaderId) {\n const prevHeader = data.liveHeaders.get(header.previousHeaderId);\n if (prevHeader && prevHeader.height <= maxHeight) {\n data.liveHeaders.set(headerId, { ...header, previousHeaderId: null });\n }\n }\n }\n // Delete headers up to maxHeight\n const headersToDelete = new Set();\n for (const [headerId, header] of data.liveHeaders) {\n if (header.height <= maxHeight) {\n headersToDelete.add(headerId);\n data.hashToHeaderId.delete(header.hash);\n }\n }\n deletedCount = headersToDelete.size;\n for (const headerId of headersToDelete) {\n data.liveHeaders.delete(headerId);\n }\n // Update tipHeaderId if necessary\n if (data.liveHeaders.size > 0) {\n const tip = Array.from(data.liveHeaders.values()).find(h => h.isActive && h.isChainTip);\n data.tipHeaderId = tip ? tip.headerId : 0;\n }\n else {\n data.tipHeaderId = 0;\n }\n return deletedCount;\n }\n async findChainTipHeader() {\n const data = await this.getData();\n const tip = Array.from(data.liveHeaders.values()).find(h => h.isActive && h.isChainTip);\n if (!tip)\n throw new Error('Database contains no active chain tip header.');\n return tip;\n }\n async findChainTipHeaderOrUndefined() {\n const data = await this.getData();\n return Array.from(data.liveHeaders.values()).find(h => h.isActive && h.isChainTip);\n }\n async findLiveHeaderForBlockHash(hash) {\n const data = await this.getData();\n const headerId = data.hashToHeaderId.get(hash);\n return headerId ? data.liveHeaders.get(headerId) || null : null;\n }\n async findLiveHeaderForHeaderId(headerId) {\n const data = await this.getData();\n const header = data.liveHeaders.get(headerId);\n if (!header)\n throw new Error(`HeaderId ${headerId} not found in live header database.`);\n return header;\n }\n async findLiveHeaderForHeight(height) {\n const data = await this.getData();\n return Array.from(data.liveHeaders.values()).find(h => h.height === height && h.isActive) || null;\n }\n async findLiveHeaderForMerkleRoot(merkleRoot) {\n const data = await this.getData();\n return Array.from(data.liveHeaders.values()).find(h => h.merkleRoot === merkleRoot) || null;\n }\n async findLiveHeightRange() {\n const data = await this.getData();\n const activeHeaders = Array.from(data.liveHeaders.values()).filter(h => h.isActive);\n if (activeHeaders.length === 0) {\n return { minHeight: 0, maxHeight: -1 };\n }\n const minHeight = Math.min(...activeHeaders.map(h => h.height));\n const maxHeight = Math.max(...activeHeaders.map(h => h.height));\n return { minHeight, maxHeight };\n }\n async findMaxHeaderId() {\n const data = await this.getData();\n return data.maxHeaderId;\n }\n async getLiveHeightRange() {\n const data = await this.getData();\n const activeHeaders = Array.from(data.liveHeaders.values()).filter(h => h.isActive);\n if (activeHeaders.length === 0) {\n return new HeightRange_1.HeightRange(0, -1);\n }\n const minHeight = Math.min(...activeHeaders.map(h => h.height));\n const maxHeight = Math.max(...activeHeaders.map(h => h.height));\n return new HeightRange_1.HeightRange(minHeight, maxHeight);\n }\n async liveHeadersForBulk(count) {\n const data = await this.getData();\n return Array.from(data.liveHeaders.values())\n .filter(h => h.isActive)\n .sort((a, b) => a.height - b.height)\n .slice(0, count);\n }\n async getHeaders(height, count) {\n if (count <= 0)\n return [];\n const data = await this.getData();\n const headers = Array.from(data.liveHeaders.values())\n .filter(h => h.isActive && h.height >= height && h.height < height + count)\n .sort((a, b) => a.height - b.height)\n .slice(0, count);\n const bufs = [];\n if (headers.length === 0 || headers[0].height > height) {\n const bulkCount = headers.length === 0 ? count : headers[0].height - height;\n const range = new HeightRange_1.HeightRange(height, height + bulkCount - 1);\n const reader = await BulkFilesReader_1.BulkFilesReaderStorage.fromStorage(this, new ChaintracksFetch_1.ChaintracksFetch(), range, bulkCount * 80);\n const bulkData = await reader.read();\n if (bulkData) {\n bufs.push(bulkData);\n }\n }\n if (headers.length > 0) {\n let buf = new Uint8Array(headers.length * 80);\n for (let i = 0; i < headers.length; i++) {\n const h = headers[i];\n const ha = (0, blockHeaderUtilities_1.serializeBaseBlockHeader)(h);\n buf.set(ha, i * 80);\n }\n bufs.push(buf);\n }\n const r = [];\n for (const bh of bufs) {\n for (const b of bh) {\n r.push(b);\n }\n }\n return r;\n }\n async insertHeader(header, prev) {\n const data = await this.getData();\n let ok = true;\n let dupe = false;\n let noPrev = false;\n let badPrev = false;\n let noActiveAncestor = false;\n let noTip = false;\n let setActiveChainTip = false;\n let reorgDepth = 0;\n let priorTip;\n // Check for duplicate\n if (data.hashToHeaderId.has(header.hash)) {\n dupe = true;\n return { added: false, dupe, isActiveTip: false, reorgDepth, priorTip, noPrev, badPrev, noActiveAncestor, noTip };\n }\n // Find previous header\n let oneBack = Array.from(data.liveHeaders.values()).find(h => h.hash === header.previousHash);\n if (!oneBack && prev && prev.hash === header.previousHash && prev.height + 1 === header.height) {\n oneBack = prev;\n }\n if (!oneBack) {\n // Check if this is first live header\n if (data.liveHeaders.size === 0) {\n const lbf = await this.bulkManager.getLastFile();\n if (lbf && header.previousHash === lbf.lastHash && header.height === lbf.firstHeight + lbf.count) {\n const chainWork = (0, blockHeaderUtilities_1.addWork)(lbf.lastChainWork, (0, blockHeaderUtilities_1.convertBitsToWork)(header.bits));\n const newHeader = {\n ...header,\n headerId: ++data.maxHeaderId,\n previousHeaderId: null,\n chainWork,\n isChainTip: true,\n isActive: true\n };\n data.liveHeaders.set(newHeader.headerId, newHeader);\n data.hashToHeaderId.set(header.hash, newHeader.headerId);\n data.tipHeaderId = newHeader.headerId;\n return {\n added: true,\n dupe,\n isActiveTip: true,\n reorgDepth,\n priorTip,\n noPrev,\n badPrev,\n noActiveAncestor,\n noTip\n };\n }\n noPrev = true;\n return {\n added: false,\n dupe,\n isActiveTip: false,\n reorgDepth,\n priorTip,\n noPrev,\n badPrev,\n noActiveAncestor,\n noTip\n };\n }\n noPrev = true;\n return { added: false, dupe, isActiveTip: false, reorgDepth, priorTip, noPrev, badPrev, noActiveAncestor, noTip };\n }\n if (oneBack.height + 1 !== header.height) {\n badPrev = true;\n return { added: false, dupe, isActiveTip: false, reorgDepth, priorTip, noPrev, badPrev, noActiveAncestor, noTip };\n }\n const chainWork = (0, blockHeaderUtilities_1.addWork)(oneBack.chainWork, (0, blockHeaderUtilities_1.convertBitsToWork)(header.bits));\n let tip = oneBack.isActive && oneBack.isChainTip\n ? oneBack\n : Array.from(data.liveHeaders.values()).find(h => h.isActive && h.isChainTip);\n if (!tip) {\n noTip = true;\n return { added: false, dupe, isActiveTip: false, reorgDepth, priorTip, noPrev, badPrev, noActiveAncestor, noTip };\n }\n priorTip = tip;\n setActiveChainTip = (0, blockHeaderUtilities_1.isMoreWork)(chainWork, tip.chainWork);\n const newHeader = {\n ...header,\n headerId: ++data.maxHeaderId,\n previousHeaderId: oneBack === prev ? null : oneBack.headerId,\n chainWork,\n isChainTip: setActiveChainTip,\n isActive: setActiveChainTip\n };\n if (setActiveChainTip) {\n let activeAncestor = oneBack;\n while (!activeAncestor.isActive) {\n const previousHeader = data.liveHeaders.get(activeAncestor.previousHeaderId);\n if (!previousHeader) {\n noActiveAncestor = true;\n return {\n added: false,\n dupe,\n isActiveTip: false,\n reorgDepth,\n priorTip,\n noPrev,\n badPrev,\n noActiveAncestor,\n noTip\n };\n }\n activeAncestor = previousHeader;\n }\n if (!(oneBack.isActive && oneBack.isChainTip)) {\n reorgDepth = Math.min(priorTip.height, header.height) - activeAncestor.height;\n }\n if (activeAncestor.headerId !== oneBack.headerId) {\n let headerToDeactivate = Array.from(data.liveHeaders.values()).find(h => h.isChainTip && h.isActive);\n while (headerToDeactivate && headerToDeactivate.headerId !== activeAncestor.headerId) {\n data.liveHeaders.set(headerToDeactivate.headerId, { ...headerToDeactivate, isActive: false });\n headerToDeactivate = data.liveHeaders.get(headerToDeactivate.previousHeaderId);\n }\n let headerToActivate = oneBack;\n while (headerToActivate.headerId !== activeAncestor.headerId) {\n data.liveHeaders.set(headerToActivate.headerId, { ...headerToActivate, isActive: true });\n headerToActivate = data.liveHeaders.get(headerToActivate.previousHeaderId);\n }\n }\n }\n if (oneBack.isChainTip && oneBack !== prev) {\n data.liveHeaders.set(oneBack.headerId, { ...oneBack, isChainTip: false });\n }\n data.liveHeaders.set(newHeader.headerId, newHeader);\n data.hashToHeaderId.set(newHeader.hash, newHeader.headerId);\n if (setActiveChainTip) {\n data.tipHeaderId = newHeader.headerId;\n this.pruneLiveBlockHeaders(newHeader.height);\n }\n return {\n added: ok,\n dupe,\n isActiveTip: setActiveChainTip,\n reorgDepth,\n priorTip,\n noPrev,\n badPrev,\n noActiveAncestor,\n noTip\n };\n }\n}\nexports.ChaintracksStorageNoDb = ChaintracksStorageNoDb;\nChaintracksStorageNoDb.mainData = {\n chain: 'main',\n liveHeaders: new Map(),\n maxHeaderId: 0,\n tipHeaderId: 0,\n hashToHeaderId: new Map()\n};\nChaintracksStorageNoDb.testData = {\n chain: 'test',\n liveHeaders: new Map(),\n maxHeaderId: 0,\n tipHeaderId: 0,\n hashToHeaderId: new Map()\n};\n//# sourceMappingURL=ChaintracksStorageNoDb.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageNoDb.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/createDefaultNoDbChaintracksOptions.js": -/*!**********************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/createDefaultNoDbChaintracksOptions.js ***! - \**********************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.createNoDbChaintracksOptions = createNoDbChaintracksOptions;\nconst Chaintracks_1 = __webpack_require__(/*! ./Chaintracks */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Chaintracks.js\");\nconst BulkIngestorCDNBabbage_1 = __webpack_require__(/*! ./Ingest/BulkIngestorCDNBabbage */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDNBabbage.js\");\nconst ChaintracksFetch_1 = __webpack_require__(/*! ./util/ChaintracksFetch */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js\");\nconst LiveIngestorWhatsOnChainPoll_1 = __webpack_require__(/*! ./Ingest/LiveIngestorWhatsOnChainPoll */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainPoll.js\");\nconst BulkIngestorWhatsOnChainCdn_1 = __webpack_require__(/*! ./Ingest/BulkIngestorWhatsOnChainCdn */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainCdn.js\");\nconst ChaintracksStorageNoDb_1 = __webpack_require__(/*! ./Storage/ChaintracksStorageNoDb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageNoDb.js\");\nfunction createNoDbChaintracksOptions(chain) {\n const options = Chaintracks_1.Chaintracks.createOptions(chain);\n const so = ChaintracksStorageNoDb_1.ChaintracksStorageNoDb.createStorageBaseOptions(chain);\n const s = new ChaintracksStorageNoDb_1.ChaintracksStorageNoDb(so);\n options.storage = s;\n const fetch = new ChaintracksFetch_1.ChaintracksFetch();\n const bulkCDNOptions = BulkIngestorCDNBabbage_1.BulkIngestorCDNBabbage.createBulkIngestorCDNBabbageOptions(chain, fetch);\n options.bulkIngestors.push(new BulkIngestorCDNBabbage_1.BulkIngestorCDNBabbage(bulkCDNOptions));\n const bulkWhatsOnChainOptions = BulkIngestorWhatsOnChainCdn_1.BulkIngestorWhatsOnChainCdn.createBulkIngestorWhatsOnChainOptions(chain);\n options.bulkIngestors.push(new BulkIngestorWhatsOnChainCdn_1.BulkIngestorWhatsOnChainCdn(bulkWhatsOnChainOptions));\n const liveWhatsOnChainOptions = LiveIngestorWhatsOnChainPoll_1.LiveIngestorWhatsOnChainPoll.createLiveIngestorWhatsOnChainOptions(chain);\n options.liveIngestors.push(new LiveIngestorWhatsOnChainPoll_1.LiveIngestorWhatsOnChainPoll(liveWhatsOnChainOptions));\n return options;\n}\n//# sourceMappingURL=createDefaultNoDbChaintracksOptions.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/createDefaultNoDbChaintracksOptions.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksServiceClient = void 0;\n/* eslint-disable @typescript-eslint/no-unused-vars */\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * Connects to a ChaintracksService to implement 'ChaintracksClientApi'\n *\n */\nclass ChaintracksServiceClient {\n static createChaintracksServiceClientOptions() {\n const options = {\n useAuthrite: false\n };\n return options;\n }\n constructor(chain, serviceUrl, options) {\n this.chain = chain;\n this.serviceUrl = serviceUrl;\n this.options = options || ChaintracksServiceClient.createChaintracksServiceClientOptions();\n }\n async currentHeight() {\n return await this.getPresentHeight();\n }\n async isValidRootForHeight(root, height) {\n const r = await this.findHeaderForHeight(height);\n if (!r)\n return false;\n const isValid = root === (0, index_client_1.asString)(r.merkleRoot);\n return isValid;\n }\n async getJsonOrUndefined(path) {\n let e = undefined;\n for (let retry = 0; retry < 3; retry++) {\n try {\n const r = await fetch(`${this.serviceUrl}${path}`);\n const v = await r.json();\n if (v.status === 'success')\n return v.value;\n else\n e = new Error(JSON.stringify(v));\n }\n catch (eu) {\n e = eu;\n }\n if (e && e.name !== 'ECONNRESET')\n break;\n }\n if (e)\n throw e;\n }\n async getJson(path) {\n const r = await this.getJsonOrUndefined(path);\n if (r === undefined)\n throw new Error('Value was undefined. Requested object may not exist.');\n return r;\n }\n async postJsonVoid(path, params) {\n const headers = {};\n headers['Content-Type'] = 'application/json';\n const r = await fetch(`${this.serviceUrl}${path}`, {\n body: JSON.stringify(params),\n method: 'POST',\n headers\n //cache: 'no-cache',\n });\n try {\n const s = await r.json();\n if (s.status === 'success')\n return;\n throw new Error(JSON.stringify(s));\n }\n catch (e) {\n console.log(`Exception: ${JSON.stringify(e)}`);\n throw new Error(JSON.stringify(e));\n }\n }\n //\n // HTTP API FUNCTIONS\n //\n async addHeader(header) {\n const r = await this.postJsonVoid('/addHeaderHex', header);\n if (typeof r === 'string')\n throw new Error(r);\n }\n async startListening() {\n return await this.getJsonOrUndefined('/startListening');\n }\n async listening() {\n return await this.getJsonOrUndefined('/listening');\n }\n async getChain() {\n return this.chain;\n //return await this.getJson('/getChain')\n }\n async isListening() {\n return await this.getJson('/isListening');\n }\n async isSynchronized() {\n return await this.getJson('/isSynchronized');\n }\n async getPresentHeight() {\n return await this.getJson('/getPresentHeight');\n }\n async findChainTipHeader() {\n return await this.getJson('/findChainTipHeaderHex');\n }\n async findChainTipHashHex() {\n return await this.getJson('/findChainTipHashHex');\n }\n async getHeaders(height, count) {\n return await this.getJson(`/getHeaders?height=${height}&count=${count}`);\n }\n async findHeaderForHeight(height) {\n return await this.getJsonOrUndefined(`/findHeaderHexForHeight?height=${height}`);\n }\n async findChainWorkForBlockHash(hash) {\n return await this.getJsonOrUndefined(`/findChainWorkHexForBlockHash?hash=${(0, index_client_1.asString)(hash)}`);\n }\n async findHeaderForBlockHash(hash) {\n return await this.getJsonOrUndefined(`/findHeaderHexForBlockHash?hash=${(0, index_client_1.asString)(hash)}`);\n }\n async findHeaderForMerkleRoot(merkleRoot, height) {\n return await this.getJsonOrUndefined(`/findHeaderHexForMerkleRoot?root=${(0, index_client_1.asString)(merkleRoot)}&height=${height}`);\n }\n}\nexports.ChaintracksServiceClient = ChaintracksServiceClient;\n//# sourceMappingURL=ChaintracksServiceClient.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js?\n}"); /***/ }), -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.client.js": -/*!***********************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.client.js ***! - \***********************************************************************************************************/ +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.js": +/*!****************************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.js ***! + \****************************************************************************************************/ /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.utils = void 0;\n__exportStar(__webpack_require__(/*! ./Api/ChaintracksApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/ChaintracksFsApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFsApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/ChaintracksFetchApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksFetchApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/ChaintracksStorageApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/ChaintracksStorageApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/BulkStorageApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkStorageApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/BulkIngestorApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BulkIngestorApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/LiveIngestorApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/LiveIngestorApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/BlockHeaderApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BlockHeaderApi.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Chaintracks */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Chaintracks.js\"), exports);\n__exportStar(__webpack_require__(/*! ./ChaintracksServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/BulkIngestorBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorBase.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/LiveIngestorBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorBase.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/BulkIngestorCDN */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDN.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/BulkIngestorCDNBabbage */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorCDNBabbage.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/BulkIngestorWhatsOnChainCdn */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/BulkIngestorWhatsOnChainCdn.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/LiveIngestorWhatsOnChainPoll */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/LiveIngestorWhatsOnChainPoll.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Ingest/WhatsOnChainServices */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Ingest/WhatsOnChainServices.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Storage/BulkStorageBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/BulkStorageBase.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Storage/ChaintracksStorageBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageBase.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Storage/ChaintracksStorageNoDb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Storage/ChaintracksStorageNoDb.js\"), exports);\n__exportStar(__webpack_require__(/*! ./createDefaultNoDbChaintracksOptions */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/createDefaultNoDbChaintracksOptions.js\"), exports);\n__exportStar(__webpack_require__(/*! ./util/BulkFilesReader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFilesReader.js\"), exports);\n__exportStar(__webpack_require__(/*! ./util/HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\"), exports);\n__exportStar(__webpack_require__(/*! ./util/BulkFileDataManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./util/ChaintracksFetch */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js\"), exports);\nexports.utils = __importStar(__webpack_require__(/*! ./util/blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\"));\n//# sourceMappingURL=index.client.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.client.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js": -/*!***********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js ***! - \***********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkFileDataReader = exports.BulkFileDataManager = void 0;\nexports.selectBulkHeaderFiles = selectBulkHeaderFiles;\nconst sdk_1 = __webpack_require__(/*! ../../../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst sdk_2 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst validBulkHeaderFilesByFileHash_1 = __webpack_require__(/*! ./validBulkHeaderFilesByFileHash */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/validBulkHeaderFilesByFileHash.js\");\nconst HeightRange_1 = __webpack_require__(/*! ./HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ./blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nconst ChaintracksFetch_1 = __webpack_require__(/*! ./ChaintracksFetch */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js\");\nconst SingleWriterMultiReaderLock_1 = __webpack_require__(/*! ./SingleWriterMultiReaderLock */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/SingleWriterMultiReaderLock.js\");\n/**\n * Manages bulk file data (typically 8MB chunks of 100,000 headers each).\n *\n * If not cached in memory,\n * optionally fetches data by `sourceUrl` from CDN on demand,\n * optionally finds data by `fileId` in a database on demand,\n * and retains a limited number of files in memory,\n * subject to the optional `maxRetained` limit.\n */\nclass BulkFileDataManager {\n static createDefaultOptions(chain) {\n return {\n chain,\n maxPerFile: 100000,\n maxRetained: 2,\n fetch: new ChaintracksFetch_1.ChaintracksFetch(),\n fromKnownSourceUrl: 'https://cdn.projectbabbage.com/blockheaders'\n };\n }\n constructor(options) {\n this.bfds = [];\n this.fileHashToIndex = {};\n this.lock = new SingleWriterMultiReaderLock_1.SingleWriterMultiReaderLock();\n if (typeof options === 'object')\n options = options;\n else\n options = BulkFileDataManager.createDefaultOptions(options);\n this.chain = options.chain;\n this.maxPerFile = options.maxPerFile;\n this.maxRetained = options.maxRetained;\n this.fromKnownSourceUrl = options.fromKnownSourceUrl;\n this.fetch = options.fetch;\n this.deleteBulkFilesNoLock();\n }\n async createReader(range, maxBufferSize) {\n range = range || (await this.getHeightRange());\n maxBufferSize = maxBufferSize || 1000000 * 80; // 100,000 headers, 8MB\n return new BulkFileDataReader(this, range, maxBufferSize);\n }\n async updateFromUrl(cdnUrl) {\n if (!this.fetch)\n throw new sdk_1.WERR_INVALID_OPERATION('fetch is not defined in the BulkFileDataManager.');\n const toUrl = (file) => this.fetch.pathJoin(cdnUrl, file);\n const url = toUrl(`${this.chain}NetBlockHeaders.json`);\n const availableBulkFiles = (await this.fetch.fetchJson(url));\n if (!availableBulkFiles)\n throw new sdk_1.WERR_INVALID_PARAMETER(`cdnUrl`, `a valid BulkHeaderFilesInfo JSON resource available from ${url}`);\n const selectedFiles = selectBulkHeaderFiles(availableBulkFiles.files, this.chain, this.maxPerFile || availableBulkFiles.headersPerFile);\n for (const bf of selectedFiles) {\n if (!bf.fileHash) {\n throw new sdk_1.WERR_INVALID_PARAMETER(`fileHash`, `valid for all files in json downloaded from ${url}`);\n }\n if (!bf.chain || bf.chain !== this.chain) {\n throw new sdk_1.WERR_INVALID_PARAMETER(`chain`, `\"${this.chain}\" for all files in json downloaded from ${url}`);\n }\n if (!bf.sourceUrl || bf.sourceUrl !== cdnUrl)\n bf.sourceUrl = cdnUrl;\n }\n const rangeBefore = await this.getHeightRange();\n const r = await this.merge(selectedFiles);\n const rangeAfter = await this.getHeightRange();\n let log = 'BulkDataFileManager.updateFromUrl\\n';\n log += ` url: ${url}\\n`;\n log += ` bulk range before: ${rangeBefore}\\n`;\n log += ` bulk range after: ${rangeAfter}\\n`;\n console.log(log);\n }\n async setStorage(storage) {\n return this.lock.withWriteLock(async () => this.setStorageNoLock(storage));\n }\n async setStorageNoLock(storage) {\n this.storage = storage;\n // Sync bfds with storage. Two scenarios supported:\n const sfs = await this.storage.getBulkFiles();\n if (sfs.length === 0) {\n // 1. Storage has no files: Update storage to reflect bfds.\n for (const bfd of this.bfds) {\n await this.ensureData(bfd);\n bfd.fileId = await this.storage.insertBulkFile(bfdToInfo(bfd, true));\n }\n }\n else {\n // 2. bfds are a prefix of storage, including last bfd having same firstHeight but possibly fewer headers: Merge storage to bfds.\n const r = await this.mergeNoLock(sfs);\n }\n }\n async deleteBulkFiles() {\n return this.lock.withWriteLock(async () => this.deleteBulkFilesNoLock());\n }\n deleteBulkFilesNoLock() {\n this.bfds = [];\n this.fileHashToIndex = {};\n if (this.fromKnownSourceUrl) {\n const files = selectBulkHeaderFiles(validBulkHeaderFilesByFileHash_1.validBulkHeaderFiles.filter(f => f.sourceUrl === this.fromKnownSourceUrl), this.chain, this.maxPerFile);\n for (const file of files) {\n this.add({ ...file, fileHash: file.fileHash, mru: Date.now() });\n }\n }\n }\n async merge(files) {\n return this.lock.withWriteLock(async () => this.mergeNoLock(files));\n }\n async mergeNoLock(files) {\n const r = { inserted: [], updated: [], unchanged: [], dropped: [] };\n for (const file of files) {\n const hbf = this.getBfdForHeight(file.firstHeight);\n if (hbf && file.fileId)\n hbf.fileId = file.fileId; // Always update fileId if provided\n const lbf = this.getLastBfd();\n if (hbf &&\n hbf.fileHash === file.fileHash &&\n hbf.count === file.count &&\n hbf.lastHash === file.lastHash &&\n hbf.lastChainWork === file.lastChainWork) {\n // We already have an identical matching file...\n r.unchanged.push(bfdToInfo(hbf));\n continue;\n }\n const vbf = await this.validateFileInfo(file);\n if (hbf) {\n // We have a matching file by firstHeight but count and fileHash differ\n await this.update(vbf, hbf, r);\n }\n else if (isBdfIncremental(vbf) && lbf && isBdfIncremental(lbf)) {\n await this.mergeIncremental(lbf, vbf, r);\n }\n else {\n const added = this.add(vbf);\n r.inserted.push(added);\n if (this.storage) {\n vbf.fileId = await this.storage.insertBulkFile(added);\n }\n }\n }\n console.log(`BulkFileDataManager.merge:\\n${this.toLogString(r)}\\n`);\n return r;\n }\n async mergeIncremental(lbf, vbf, r) {\n lbf.count += vbf.count;\n lbf.lastHash = vbf.lastHash;\n lbf.lastChainWork = vbf.lastChainWork;\n await this.ensureData(lbf);\n const newData = new Uint8Array(lbf.data.length + vbf.data.length);\n newData.set(lbf.data);\n newData.set(vbf.data, lbf.data.length);\n lbf.data = newData;\n delete this.fileHashToIndex[lbf.fileHash];\n lbf.fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_2.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(newData)), 'base64');\n this.fileHashToIndex[lbf.fileHash] = this.bfds.length - 1;\n lbf.mru = Date.now();\n const lbfInfo = bfdToInfo(lbf, true);\n r.updated.push(lbfInfo);\n if (this.storage && lbf.fileId) {\n await this.storage.updateBulkFile(lbf.fileId, lbfInfo);\n }\n }\n toLogString(what) {\n let log = '';\n if (!what) {\n log += this.toLogString(this.bfds);\n }\n else if (what['updated']) {\n what = what;\n for (const { category, bfds } of [\n { category: 'unchanged', bfds: what.unchanged },\n { category: 'dropped', bfds: what.dropped },\n { category: 'updated', bfds: what.updated },\n { category: 'inserted', bfds: what.inserted }\n ]) {\n if (bfds.length > 0) {\n log += ` ${category}:\\n`;\n log += this.toLogString(bfds);\n }\n }\n }\n else if (Array.isArray(what)) {\n what = what;\n let i = -1;\n for (const bfd of what) {\n i++;\n log += ` ${i}: ${bfd.fileName} fileId=${bfd.fileId} ${bfd.firstHeight}-${bfd.firstHeight + bfd.count - 1}\\n`;\n }\n }\n return log;\n }\n async mergeIncrementalBlockHeaders(newBulkHeaders, incrementalChainWork) {\n if (newBulkHeaders.length === 0)\n return;\n return this.lock.withWriteLock(async () => {\n const lbf = this.getLastFileNoLock();\n const nextHeight = lbf ? lbf.firstHeight + lbf.count : 0;\n if (nextHeight > 0 && newBulkHeaders.length > 0 && newBulkHeaders[0].height < nextHeight) {\n // Don't modify the incoming array...\n newBulkHeaders = [...newBulkHeaders];\n // If we have more headers than we need, drop the incoming headers.\n while (newBulkHeaders.length > 0 && newBulkHeaders[0].height < nextHeight) {\n const h = newBulkHeaders.shift();\n if (h && incrementalChainWork) {\n incrementalChainWork = (0, blockHeaderUtilities_1.subWork)(incrementalChainWork, (0, blockHeaderUtilities_1.convertBitsToWork)(h.bits));\n }\n }\n }\n if (newBulkHeaders.length === 0)\n return;\n if (!lbf || nextHeight !== newBulkHeaders[0].height)\n throw new sdk_1.WERR_INVALID_PARAMETER('newBulkHeaders', 'an extension of existing bulk headers');\n if (!lbf.lastHash)\n throw new sdk_1.WERR_INTERNAL(`lastHash is not defined for the last bulk file ${lbf.fileName}`);\n const fbh = newBulkHeaders[0];\n const lbh = newBulkHeaders.slice(-1)[0];\n let lastChainWork = lbf.lastChainWork;\n if (incrementalChainWork) {\n lastChainWork = (0, blockHeaderUtilities_1.addWork)(incrementalChainWork, lastChainWork);\n }\n else {\n // If lastChainWork is not provided, calculate it from the last file with basic validation.\n let lastHeight = lbf.firstHeight + lbf.count - 1;\n let lastHash = lbf.lastHash;\n for (const h of newBulkHeaders) {\n if (h.height !== lastHeight + 1 || h.previousHash !== lastHash) {\n throw new sdk_1.WERR_INVALID_PARAMETER('headers', `an extension of existing bulk headers, header with height ${h.height} is non-sequential`);\n }\n lastChainWork = (0, blockHeaderUtilities_1.addWork)(lastChainWork, (0, blockHeaderUtilities_1.convertBitsToWork)(h.bits));\n lastHeight = h.height;\n lastHash = h.hash;\n }\n }\n const data = (0, blockHeaderUtilities_1.serializeBaseBlockHeaders)(newBulkHeaders);\n const fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_2.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(data)), 'base64');\n const bf = {\n fileId: undefined,\n chain: this.chain,\n sourceUrl: undefined,\n fileName: 'incremental',\n firstHeight: fbh.height,\n count: newBulkHeaders.length,\n prevChainWork: lbf.lastChainWork,\n lastChainWork,\n prevHash: lbf.lastHash,\n lastHash: lbh.hash,\n fileHash,\n data\n };\n await this.mergeNoLock([bf]);\n });\n }\n async getBulkFiles(keepData) {\n return this.lock.withReadLock(async () => {\n return this.bfds.map(bfd => bfdToInfo(bfd, keepData));\n });\n }\n async getHeightRange() {\n return this.lock.withReadLock(async () => {\n if (this.bfds.length === 0)\n return HeightRange_1.HeightRange.empty;\n const first = this.bfds[0];\n const last = this.bfds[this.bfds.length - 1];\n return new HeightRange_1.HeightRange(first.firstHeight, last.firstHeight + last.count - 1);\n });\n }\n async getDataFromFile(file, offset, length) {\n const bfd = await this.getBfdForHeight(file.firstHeight);\n if (!bfd || bfd.count < file.count)\n throw new sdk_1.WERR_INVALID_PARAMETER('file', `a match for ${file.firstHeight}, ${file.count} in the BulkFileDataManager.`);\n return this.lock.withReadLock(async () => this.getDataFromFileNoLock(bfd, offset, length));\n }\n async getDataFromFileNoLock(bfd, offset, length) {\n const fileLength = bfd.count * 80;\n offset = offset || 0;\n if (offset > fileLength - 1)\n return undefined;\n length = length || bfd.count * 80 - offset;\n length = Math.min(length, fileLength - offset);\n let data;\n if (bfd.data) {\n data = bfd.data.slice(offset, offset + length);\n }\n else if (bfd.fileId && this.storage) {\n data = await this.storage.getBulkFileData(bfd.fileId, offset, length);\n }\n if (!data) {\n await this.ensureData(bfd);\n if (bfd.data)\n data = bfd.data.slice(offset, offset + length);\n }\n if (!data)\n return undefined;\n return data;\n }\n async findHeaderForHeightOrUndefined(height) {\n return this.lock.withReadLock(async () => {\n if (!Number.isInteger(height) || height < 0)\n throw new sdk_1.WERR_INVALID_PARAMETER('height', `a non-negative integer (${height}).`);\n const file = this.bfds.find(f => f.firstHeight <= height && f.firstHeight + f.count > height);\n if (!file)\n return undefined;\n const offset = (height - file.firstHeight) * 80;\n const data = await this.getDataFromFileNoLock(file, offset, 80);\n if (!data)\n return undefined;\n const header = (0, blockHeaderUtilities_1.deserializeBlockHeader)(data, 0, height);\n return header;\n });\n }\n async getFileForHeight(height) {\n return this.lock.withReadLock(async () => {\n const bfd = this.getBfdForHeight(height);\n if (!bfd)\n return undefined;\n return bfdToInfo(bfd);\n });\n }\n getBfdForHeight(height) {\n if (!Number.isInteger(height) || height < 0)\n throw new sdk_1.WERR_INVALID_PARAMETER('height', `a non-negative integer (${height}).`);\n const file = this.bfds.find(f => f.firstHeight <= height && f.firstHeight + f.count > height);\n return file;\n }\n getLastBfd(fromEnd = 1) {\n if (this.bfds.length < fromEnd)\n return undefined;\n const bfd = this.bfds[this.bfds.length - fromEnd];\n return bfd;\n }\n async getLastFile(fromEnd = 1) {\n return this.lock.withReadLock(async () => this.getLastFileNoLock(fromEnd));\n }\n getLastFileNoLock(fromEnd = 1) {\n const bfd = this.getLastBfd(fromEnd);\n if (!bfd)\n return undefined;\n return bfdToInfo(bfd);\n }\n async getDataByFileHash(fileHash) {\n const index = this.fileHashToIndex[fileHash];\n if (index === undefined)\n throw new sdk_1.WERR_INVALID_PARAMETER('fileHash', `known to the BulkFileDataManager. ${fileHash} is unknown.`);\n const bfd = this.bfds[index];\n const data = await this.ensureData(bfd);\n return data;\n }\n async getDataByFileId(fileId) {\n const bfd = this.bfds.find(f => f.fileId === fileId);\n if (bfd === undefined)\n throw new sdk_1.WERR_INVALID_PARAMETER('fileId', `known to the BulkFileDataManager. ${fileId} is unknown.`);\n const data = await this.ensureData(bfd);\n return data;\n }\n async validateFileInfo(file) {\n var _a;\n if (file.chain !== this.chain)\n throw new sdk_1.WERR_INVALID_PARAMETER('chain', `${this.chain}`);\n if (file.count <= 0)\n throw new sdk_1.WERR_INVALID_PARAMETER('bf.count', `expected count to be greater than 0, but got ${file.count}`);\n if (file.count > this.maxPerFile && file.fileName !== 'incremental')\n throw new sdk_1.WERR_INVALID_PARAMETER('count', `less than or equal to maxPerFile ${this.maxPerFile}`);\n if (!file.fileHash)\n throw new sdk_1.WERR_INVALID_PARAMETER('fileHash', `defined`);\n if (!file.sourceUrl && !file.fileId && !file.data)\n throw new sdk_1.WERR_INVALID_PARAMETER('data', `defined when sourceUrl and fileId are undefined`);\n let bfd = {\n ...file,\n fileHash: file.fileHash,\n mru: Date.now()\n };\n if (!bfd.validated) {\n await this.ensureData(bfd);\n if (!bfd.data || bfd.data.length !== bfd.count * 80)\n throw new sdk_1.WERR_INVALID_PARAMETER('file.data', `bulk file ${bfd.fileName} data length ${(_a = bfd.data) === null || _a === void 0 ? void 0 : _a.length} does not match expected count ${bfd.count}`);\n bfd.fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_2.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(bfd.data)), 'base64');\n if (file.fileHash && file.fileHash !== bfd.fileHash)\n throw new sdk_1.WERR_INVALID_PARAMETER('file.fileHash', `expected ${file.fileHash} but got ${bfd.fileHash}`);\n if (!(0, validBulkHeaderFilesByFileHash_1.isKnownValidBulkHeaderFile)(bfd)) {\n const pbf = bfd.firstHeight > 0 ? this.getBfdForHeight(bfd.firstHeight - 1) : undefined;\n const prevHash = pbf ? pbf.lastHash : '00'.repeat(32);\n const prevChainWork = pbf ? pbf.lastChainWork : '00'.repeat(32);\n const { lastHeaderHash, lastChainWork } = (0, blockHeaderUtilities_1.validateBufferOfHeaders)(bfd.data, prevHash, 0, undefined, prevChainWork);\n if (bfd.lastHash && bfd.lastHash !== lastHeaderHash)\n throw new sdk_1.WERR_INVALID_PARAMETER('file.lastHash', `expected ${bfd.lastHash} but got ${lastHeaderHash}`);\n if (bfd.lastChainWork && bfd.lastChainWork !== lastChainWork)\n throw new sdk_1.WERR_INVALID_PARAMETER('file.lastChainWork', `expected ${bfd.lastChainWork} but got ${lastChainWork}`);\n bfd.lastHash = lastHeaderHash;\n bfd.lastChainWork = lastChainWork;\n if (bfd.firstHeight === 0) {\n (0, blockHeaderUtilities_1.validateGenesisHeader)(bfd.data, bfd.chain);\n }\n }\n bfd.validated = true;\n }\n return bfd;\n }\n async ReValidate() {\n return this.lock.withReadLock(async () => this.ReValidateNoLock());\n }\n async ReValidateNoLock() {\n for (const file of this.bfds) {\n await this.ensureData(file);\n file.validated = false; // Reset validation to re-validate on next access\n const bfd = await this.validateFileInfo(file);\n if (!bfd.validated)\n throw new sdk_1.WERR_INTERNAL(`BulkFileDataManager.ReValidate failed for file ${bfd.fileName}`);\n file.validated = true;\n }\n }\n validateBfdForAdd(bfd) {\n if (this.bfds.length === 0 && bfd.firstHeight !== 0)\n throw new sdk_1.WERR_INVALID_PARAMETER('firstHeight', `0 for the first file`);\n if (this.bfds.length > 0) {\n const last = this.bfds[this.bfds.length - 1];\n if (bfd.firstHeight !== last.firstHeight + last.count)\n throw new sdk_1.WERR_INVALID_PARAMETER('firstHeight', `the last file's firstHeight + count`);\n if (bfd.prevHash !== last.lastHash || bfd.prevChainWork !== last.lastChainWork)\n throw new sdk_1.WERR_INVALID_PARAMETER('prevHash/prevChainWork', `the last file's lastHash/lastChainWork`);\n }\n }\n add(bfd) {\n this.validateBfdForAdd(bfd);\n const index = this.bfds.length;\n this.bfds.push(bfd);\n this.fileHashToIndex[bfd.fileHash] = index;\n this.ensureMaxRetained();\n return bfdToInfo(bfd, true);\n }\n replaceBfdAtIndex(index, update) {\n const oldBfd = this.bfds[index];\n delete this.fileHashToIndex[oldBfd.fileHash];\n this.bfds[index] = update;\n this.fileHashToIndex[update.fileHash] = index;\n }\n /**\n * Updating an existing file occurs in two specific contexts:\n *\n * 1. CDN Update: CDN files of a specific `maxPerFile` series typically ends in a partial file\n * which may periodically add more headers until the next file is started.\n * If the CDN update is the second to last file (followed by an incremental file),\n * then the incremental file is updated or deleted and also returned as the result (with a count of zero if deleted).\n *\n * 2. Incremental Update: The last bulk file is almost always an \"incremental\" file\n * which is not limited by \"maxPerFile\" and holds all non-CDN bulk headers.\n * If is updated with new bulk headers which come either from non CDN ingestors or from live header migration to bulk.\n *\n * Updating preserves the following properties:\n *\n * - Any existing headers following this update are preserved and must form an unbroken chain.\n * - There can be at most one incremental file and it must be the last file.\n * - The update start conditions (height, prevHash, prevChainWork) must match an existing file which may be either CDN or internal.\n * - The update fileId must match, it may be undefind.\n * - The fileName does not need to match.\n * - The incremental file must always have fileName \"incremental\" and sourceUrl must be undefined.\n * - The update count must be greater than 0.\n * - The update count must be greater than current count for CDN to CDN update.\n *\n * @param update new validated BulkFileData to update.\n * @param hbf corresponding existing BulkFileData to update.\n */\n async update(update, hbf, r) {\n if (!hbf ||\n hbf.firstHeight !== update.firstHeight ||\n hbf.prevChainWork !== update.prevChainWork ||\n hbf.prevHash !== update.prevHash)\n throw new sdk_1.WERR_INVALID_PARAMETER('file', `an existing file by height, prevChainWork and prevHash`);\n if (isBdfCdn(update) === isBdfCdn(hbf) && update.count <= hbf.count)\n throw new sdk_1.WERR_INVALID_PARAMETER('file.count', `greater than the current count ${hbf.count}`);\n const lbf = this.getLastBfd();\n let index = this.bfds.length - 1;\n let truncate = undefined;\n let replaced = undefined;\n let drop = undefined;\n if (hbf.firstHeight === lbf.firstHeight) {\n // If the update is for the last file, there are three cases:\n if (isBdfIncremental(update)) {\n // 1. Incremental file may only be extended with more incremental headers.\n if (!isBdfIncremental(lbf))\n throw new sdk_1.WERR_INVALID_PARAMETER('file', `an incremental file to update an existing incremental file`);\n }\n else {\n // The update is a CDN bulk file.\n if (isBdfCdn(lbf)) {\n // 2. An updated CDN file replaces a partial CDN file.\n if (update.count <= lbf.count)\n throw new sdk_1.WERR_INVALID_PARAMETER('update.count', `CDN update must have more headers. ${update.count} <= ${lbf.count}`);\n }\n else {\n // 3. A new CDN file replaces some or all of current incremental file.\n // Retain extra incremental headers if any.\n if (update.count < lbf.count) {\n // The new CDN partially replaces the last incremental file, prepare to shift work and re-add it.\n await this.ensureData(lbf);\n truncate = lbf;\n }\n }\n }\n }\n else {\n // If the update is NOT for the last file, then it MUST be for the second to last file which MUST be a CDN file:\n // - it must be a CDN file update with more headers than the current CDN file.\n // - the last file must be an incremental file which is updated or deleted. The updated (or deleted) last file is returned.\n const lbf2 = this.getLastBfd(2);\n if (!lbf2 || hbf.firstHeight !== lbf2.firstHeight)\n throw new sdk_1.WERR_INVALID_PARAMETER('file', `an update to last or second to last file`);\n if (!isBdfCdn(update) || !isBdfCdn(lbf2) || update.count <= lbf2.count)\n throw new sdk_1.WERR_INVALID_PARAMETER('file', `a CDN file update with more headers than the current CDN file`);\n if (!isBdfIncremental(lbf))\n throw new sdk_1.WERR_INVALID_PARAMETER('file', `a CDN file update followed by an incremental file`);\n if (!update.fileId)\n update.fileId = lbf2.fileId; // Update fileId if not provided\n if (update.count >= lbf2.count + lbf.count) {\n // The current last file is fully replaced by the CDN update.\n drop = lbf;\n }\n else {\n // If the update doesn't fully replace the last incremental file, make sure data is available to be truncated.\n await this.ensureData(lbf);\n truncate = lbf;\n // The existing second to last file is fully replaced by the update.\n replaced = lbf2;\n }\n index = index - 1; // The update replaces the second to last file.\n }\n // In all cases the bulk file at the current fileId if any is updated.\n this.replaceBfdAtIndex(index, update);\n if (truncate) {\n // If there is a bulk file to be truncated, it becomes the new (reduced) last file.\n await this.shiftWork(update, truncate, replaced);\n }\n if (drop) {\n this.dropLastBulkFile(drop);\n }\n const updateInfo = bfdToInfo(update, true);\n const truncateInfo = truncate ? bfdToInfo(truncate, true) : undefined;\n if (this.storage) {\n // Keep storage in sync.\n if (update.fileId) {\n await this.storage.updateBulkFile(update.fileId, updateInfo);\n }\n if (truncate && truncateInfo) {\n if (replaced) {\n await this.storage.updateBulkFile(truncate.fileId, truncateInfo);\n }\n else {\n truncateInfo.fileId = undefined; // Make sure truncate is a new file.\n truncate.fileId = await this.storage.insertBulkFile(truncateInfo);\n }\n }\n if (drop && drop.fileId) {\n await this.storage.deleteBulkFile(drop.fileId);\n }\n }\n if (r) {\n // Update results for logging...\n r.updated.push(updateInfo);\n if (truncateInfo) {\n if (replaced) {\n r.updated.push(truncateInfo);\n }\n else {\n r.inserted.push(truncateInfo);\n }\n }\n if (drop) {\n r.dropped.push(bfdToInfo(drop));\n }\n }\n this.ensureMaxRetained();\n }\n dropLastBulkFile(lbf) {\n delete this.fileHashToIndex[lbf.fileHash];\n const index = this.bfds.indexOf(lbf);\n if (index !== this.bfds.length - 1)\n throw new sdk_1.WERR_INTERNAL(`dropLastBulkFile requires lbf is the current last file.`);\n this.bfds.pop();\n }\n /**\n * Remove work (and headers) from `truncate` that now exists in `update`.\n * There are two scenarios:\n * 1. `replaced` is undefined: update is a CDN file that splits an incremental file that must be truncated.\n * 2. `replaced` is valid: update is a CDN update that replaced an existing CDN file and splits an incremental file that must be truncated.\n * @param update the new CDN update file.\n * @param truncate the incremental file to be truncated (losing work which now exists in `update`).\n * @param replaced the existing CDN file that was replaced by `update` (if any).\n */\n async shiftWork(update, truncate, replaced) {\n var _a;\n const updateIndex = this.fileHashToIndex[update.fileHash];\n // replaced will be valid if the update replaced it and it must become the new last file.\n // truncateIndex will be updateIndex + 1 if the existing last file is being truncated and update is second to last.\n const truncateIndex = this.fileHashToIndex[truncate.fileHash];\n if (truncateIndex !== undefined && truncateIndex !== updateIndex + 1)\n throw new sdk_1.WERR_INTERNAL(`shiftWork requires update to have replaced truncate or truncate to follow update`);\n if (truncateIndex !== undefined && !replaced)\n throw new sdk_1.WERR_INTERNAL(`shiftWork requires valid replaced when update hasn't replaced truncate`);\n truncate.prevHash = update.lastHash;\n truncate.prevChainWork = update.lastChainWork;\n // truncate.lastChainWork, truncate.lastHash remain unchanged\n let count = update.count;\n if (replaced) {\n count -= replaced.count;\n }\n else {\n // The truncated file is itself being replaced by the update and must be inserted as a new file.\n truncate.fileId = undefined;\n this.bfds.push(truncate); // Add the truncated file as a new entry.\n }\n truncate.count -= count;\n truncate.firstHeight += count;\n truncate.data = (_a = truncate.data) === null || _a === void 0 ? void 0 : _a.slice(count * 80);\n delete this.fileHashToIndex[truncate.fileHash];\n truncate.fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_2.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(truncate.data)), 'base64');\n this.fileHashToIndex[truncate.fileHash] = updateIndex + 1;\n }\n /**\n *\n * @param bfd\n * @returns\n */\n async ensureData(bfd) {\n if (bfd.data)\n return bfd.data;\n if (this.storage && bfd.fileId) {\n bfd.data = await this.storage.getBulkFileData(bfd.fileId);\n if (!bfd.data)\n throw new sdk_1.WERR_INVALID_PARAMETER('fileId', `valid, data not found for fileId ${bfd.fileId}`);\n }\n if (!bfd.data && this.fetch && bfd.sourceUrl) {\n // TODO - restore this change\n const url = this.fetch.pathJoin(bfd.sourceUrl, bfd.fileName);\n //const url = this.fetch.pathJoin('http://localhost:8842/blockheaders', bfd.fileName)\n try {\n bfd.data = await this.fetch.download(url);\n }\n catch (err) {\n bfd.data = await this.fetch.download(url);\n }\n if (!bfd.data)\n throw new sdk_1.WERR_INVALID_PARAMETER('sourceUrl', `data not found for sourceUrl ${url}`);\n }\n if (!bfd.data)\n throw new sdk_1.WERR_INVALID_PARAMETER('data', `defined. Unable to retrieve data for ${bfd.fileName}`);\n bfd.mru = Date.now();\n // Validate retrieved data.\n const fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_2.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(bfd.data)), 'base64');\n if (fileHash !== bfd.fileHash)\n throw new sdk_1.WERR_INVALID_PARAMETER('fileHash', `a match for retrieved data for ${bfd.fileName}`);\n this.ensureMaxRetained();\n return bfd.data;\n }\n ensureMaxRetained() {\n if (this.maxRetained === undefined)\n return;\n let withData = this.bfds.filter(bfd => bfd.data && (bfd.fileId || bfd.sourceUrl));\n let countToRelease = withData.length - this.maxRetained;\n if (countToRelease <= 0)\n return;\n const sorted = withData.sort((a, b) => a.mru - b.mru);\n while (countToRelease-- > 0 && sorted.length > 0) {\n const oldest = sorted.shift();\n // Release the least recently used data\n oldest.data = undefined; // Release the data\n }\n }\n async exportHeadersToFs(toFs, toHeadersPerFile, toFolder, sourceUrl, maxHeight) {\n const chain = this.chain;\n const toFileName = (i) => `${chain}Net_${i}.headers`;\n const toPath = (i) => toFs.pathJoin(toFolder, toFileName(i));\n const toJsonPath = () => toFs.pathJoin(toFolder, `${chain}NetBlockHeaders.json`);\n const toBulkFiles = {\n rootFolder: sourceUrl || toFolder,\n jsonFilename: `${chain}NetBlockHeaders.json`,\n headersPerFile: toHeadersPerFile,\n files: []\n };\n let range = await this.getHeightRange();\n if (maxHeight)\n range = range.intersect(new HeightRange_1.HeightRange(0, maxHeight));\n const reader = await this.createReader(range, toHeadersPerFile * 80);\n let firstHeight = 0;\n let lastHeaderHash = '00'.repeat(32);\n let lastChainWork = '00'.repeat(32);\n let i = -1;\n for (;;) {\n i++;\n const data = await reader.read();\n if (!data || data.length === 0) {\n break;\n }\n const last = (0, blockHeaderUtilities_1.validateBufferOfHeaders)(data, lastHeaderHash, 0, undefined, lastChainWork);\n await toFs.writeFile(toPath(i), data);\n const fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_2.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(data)), 'base64');\n const file = {\n chain,\n count: data.length / 80,\n fileHash,\n fileName: toFileName(i),\n firstHeight,\n lastChainWork: last.lastChainWork,\n lastHash: last.lastHeaderHash,\n prevChainWork: lastChainWork,\n prevHash: lastHeaderHash,\n sourceUrl\n };\n toBulkFiles.files.push(file);\n firstHeight += file.count;\n lastHeaderHash = file.lastHash;\n lastChainWork = file.lastChainWork;\n }\n await toFs.writeFile(toJsonPath(), (0, utilityHelpers_noBuffer_1.asUint8Array)(JSON.stringify(toBulkFiles), 'utf8'));\n }\n}\nexports.BulkFileDataManager = BulkFileDataManager;\nfunction selectBulkHeaderFiles(files, chain, maxPerFile) {\n const r = [];\n let height = 0;\n for (;;) {\n const choices = files.filter(f => f.firstHeight === height && f.count <= maxPerFile && f.chain === chain);\n // Pick the file with the maximum count\n const choice = choices.reduce((a, b) => (a.count > b.count ? a : b), choices[0]);\n if (!choice)\n break; // no more files to select\n r.push(choice);\n height += choice.count;\n }\n return r;\n}\nfunction isBdfIncremental(bfd) {\n return bfd.fileName === 'incremental' && !bfd.sourceUrl;\n}\nfunction isBdfCdn(bfd) {\n return !isBdfIncremental(bfd);\n}\nfunction bfdToInfo(bfd, keepData) {\n return {\n chain: bfd.chain,\n fileHash: bfd.fileHash,\n fileName: bfd.fileName,\n sourceUrl: bfd.sourceUrl,\n fileId: bfd.fileId,\n count: bfd.count,\n prevChainWork: bfd.prevChainWork,\n lastChainWork: bfd.lastChainWork,\n firstHeight: bfd.firstHeight,\n prevHash: bfd.prevHash,\n lastHash: bfd.lastHash,\n validated: bfd.validated || false,\n data: keepData ? bfd.data : undefined\n };\n}\nclass BulkFileDataReader {\n constructor(manager, range, maxBufferSize) {\n this.manager = manager;\n this.range = range;\n this.maxBufferSize = maxBufferSize;\n this.nextHeight = range.minHeight;\n }\n /**\n * Returns the Buffer of block headers from the given `file` for the given `range`.\n * If `range` is undefined, the file's full height range is read.\n * The returned Buffer will only contain headers in `file` and in `range`\n * @param file\n * @param range\n */\n async readBufferFromFile(file, range) {\n // Constrain the range to the file's contents...\n let fileRange = new HeightRange_1.HeightRange(file.firstHeight, file.firstHeight + file.count - 1);\n if (range)\n fileRange = fileRange.intersect(range);\n if (fileRange.isEmpty)\n return undefined;\n const offset = (fileRange.minHeight - file.firstHeight) * 80;\n const length = fileRange.length * 80;\n return await this.manager.getDataFromFile(file, offset, length);\n }\n /**\n * @returns an array containing the next `maxBufferSize` bytes of headers from the files.\n */\n async read() {\n if (this.nextHeight === undefined || !this.range || this.range.isEmpty || this.nextHeight > this.range.maxHeight)\n return undefined;\n let lastHeight = this.nextHeight + this.maxBufferSize / 80 - 1;\n lastHeight = Math.min(lastHeight, this.range.maxHeight);\n let file = await this.manager.getFileForHeight(this.nextHeight);\n if (!file)\n throw new sdk_1.WERR_INTERNAL(`logic error`);\n const readRange = new HeightRange_1.HeightRange(this.nextHeight, lastHeight);\n let buffers = new Uint8Array(readRange.length * 80);\n let offset = 0;\n while (file) {\n const buffer = await this.readBufferFromFile(file, readRange);\n if (!buffer)\n break;\n buffers.set(buffer, offset);\n offset += buffer.length;\n file = await this.manager.getFileForHeight(file.firstHeight + file.count);\n }\n if (!buffers.length || offset !== readRange.length * 80)\n return undefined;\n this.nextHeight = lastHeight + 1;\n return buffers;\n }\n}\nexports.BulkFileDataReader = BulkFileDataReader;\n//# sourceMappingURL=BulkFileDataManager.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFileDataManager.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFilesReader.js": -/*!*******************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFilesReader.js ***! - \*******************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkFilesReaderStorage = exports.BulkFilesReaderFs = exports.BulkFilesReader = void 0;\nconst HeightRange_1 = __webpack_require__(/*! ./HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nconst blockHeaderUtilities_1 = __webpack_require__(/*! ./blockHeaderUtilities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst sdk_2 = __webpack_require__(/*! ../../../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst BulkHeaderFile_1 = __webpack_require__(/*! ./BulkHeaderFile */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkHeaderFile.js\");\n/**\n * Breaks available bulk headers stored in multiple files into a sequence of buffers with\n * limited maximum size.\n */\nclass BulkFilesReader {\n constructor(files, range, maxBufferSize) {\n /**\n * Maximum buffer size returned from `read()` in bytes.\n */\n this.maxBufferSize = 400 * 80;\n this.files = files;\n this.range = HeightRange_1.HeightRange.empty;\n this.setRange(range);\n this.setMaxBufferSize(maxBufferSize || 400 * 80);\n }\n setRange(range) {\n this.range = this.heightRange;\n if (range) {\n this.range = this.range.intersect(range);\n }\n this.nextHeight = this.range.isEmpty ? undefined : this.range.minHeight;\n }\n setMaxBufferSize(maxBufferSize) {\n this.maxBufferSize = maxBufferSize || 400 * 80;\n if (this.maxBufferSize % 80 !== 0)\n throw new Error('maxBufferSize must be a multiple of 80 bytes.');\n }\n getLastFile() {\n return this.files[this.files.length - 1];\n }\n get heightRange() {\n const last = this.getLastFile();\n if (!last || !this.files)\n return HeightRange_1.HeightRange.empty;\n const first = this.files[0];\n return new HeightRange_1.HeightRange(first.firstHeight, last.firstHeight + last.count - 1);\n }\n getFileForHeight(height) {\n if (!this.files)\n return undefined;\n return this.files.find(file => file.firstHeight <= height && file.firstHeight + file.count > height);\n }\n async readBufferForHeightOrUndefined(height) {\n const file = this.getFileForHeight(height);\n if (!file)\n return undefined;\n const buffer = await file.readDataFromFile(80, (height - file.firstHeight) * 80);\n return buffer;\n }\n async readBufferForHeight(height) {\n const header = await this.readBufferForHeightOrUndefined(height);\n if (!header)\n throw new Error(`Failed to read bulk header buffer at height=${height}`);\n return header;\n }\n async readHeaderForHeight(height) {\n const buffer = await this.readBufferForHeight(height);\n return (0, blockHeaderUtilities_1.deserializeBaseBlockHeader)(buffer, 0);\n }\n async readHeaderForHeightOrUndefined(height) {\n const buffer = await this.readBufferForHeightOrUndefined(height);\n return buffer ? (0, blockHeaderUtilities_1.deserializeBaseBlockHeader)(buffer, 0) : undefined;\n }\n /**\n * Returns the Buffer of block headers from the given `file` for the given `range`.\n * If `range` is undefined, the file's full height range is read.\n * The returned Buffer will only contain headers in `file` and in `range`\n * @param file\n * @param range\n */\n async readBufferFromFile(file, range) {\n // Constrain the range to the file's contents...\n let fileRange = file.heightRange;\n if (range)\n fileRange = fileRange.intersect(range);\n if (fileRange.isEmpty)\n return undefined;\n const position = (fileRange.minHeight - file.firstHeight) * 80;\n const length = fileRange.length * 80;\n return await file.readDataFromFile(length, position);\n }\n nextFile(file) {\n if (!file)\n return this.files[0];\n const i = this.files.indexOf(file);\n if (i < 0)\n throw new sdk_2.WERR_INVALID_PARAMETER(`file`, `a valid file from this.files`);\n return this.files[i + 1];\n }\n /**\n * @returns an array containing the next `maxBufferSize` bytes of headers from the files.\n */\n async read() {\n if (this.nextHeight === undefined || !this.range || this.nextHeight > this.range.maxHeight)\n return undefined;\n let lastHeight = this.nextHeight + this.maxBufferSize / 80 - 1;\n lastHeight = Math.min(lastHeight, this.range.maxHeight);\n let file = this.getFileForHeight(this.nextHeight);\n if (!file)\n throw new sdk_2.WERR_INTERNAL(`logic error`);\n const readRange = new HeightRange_1.HeightRange(this.nextHeight, lastHeight);\n let buffers = new Uint8Array(readRange.length * 80);\n let offset = 0;\n while (file) {\n const buffer = await this.readBufferFromFile(file, readRange);\n if (!buffer)\n break;\n buffers.set(buffer, offset);\n offset += buffer.length;\n file = this.nextFile(file);\n }\n if (!buffers.length || offset !== readRange.length * 80)\n return undefined;\n this.nextHeight = lastHeight + 1;\n return buffers;\n }\n /**\n * Reset the reading process and adjust the range to be read to a new subset of what's available...\n * @param range new range for subsequent `read` calls to return.\n * @param maxBufferSize optionally update largest buffer size for `read` to return\n */\n resetRange(range, maxBufferSize) {\n this.setRange(range);\n this.setMaxBufferSize(maxBufferSize || 400 * 80);\n }\n async validateFiles() {\n let lastChainWork = '00'.repeat(32);\n let lastHeaderHash = '00'.repeat(32);\n for (const file of this.files) {\n if (file.prevChainWork !== lastChainWork)\n throw new sdk_2.WERR_INVALID_OPERATION(`prevChainWork mismatch for file ${file.fileName}: expected ${file.prevChainWork}, got ${lastChainWork}`);\n if (file.prevHash !== lastHeaderHash)\n throw new sdk_2.WERR_INVALID_OPERATION(`prevHash mismatch for file ${file.fileName}: expected ${file.prevHash}, got ${lastHeaderHash}`);\n const data = await file.ensureData();\n if (data.length !== file.count * 80)\n throw new sdk_2.WERR_INVALID_OPERATION(`data length mismatch for file ${file.fileName}: expected ${file.count * 80} bytes, got ${data.length} bytes`);\n const fileHash = await file.computeFileHash();\n if (!file.fileHash)\n throw new sdk_2.WERR_INVALID_OPERATION(`fileHash missing for file ${file.fileName}`);\n if (file.fileHash !== fileHash)\n throw new sdk_2.WERR_INVALID_OPERATION(`fileHash mismatch for file ${file.fileName}: expected ${file.fileHash}, got ${fileHash}`);\n ({ lastHeaderHash, lastChainWork } = (0, blockHeaderUtilities_1.validateBufferOfHeaders)(data, lastHeaderHash, 0, file.count, lastChainWork));\n if (file.lastHash !== lastHeaderHash)\n throw new sdk_2.WERR_INVALID_OPERATION(`lastHash mismatch for file ${file.fileName}: expected ${file.lastHash}, got ${lastHeaderHash}`);\n if (file.lastChainWork !== lastChainWork)\n throw new sdk_2.WERR_INVALID_OPERATION(`lastChainWork mismatch for file ${file.fileName}: expected ${file.lastChainWork}, got ${lastChainWork}`);\n file.validated = true;\n }\n }\n async exportHeadersToFs(toFs, toHeadersPerFile, toFolder) {\n if (!this.files || this.files.length === 0 || this.files[0].count === 0)\n throw new sdk_2.WERR_INVALID_OPERATION('no headers currently available to export');\n if (!this.files[0].chain)\n throw new sdk_2.WERR_INVALID_OPERATION('chain is not defined for the first file');\n const chain = this.files[0].chain;\n const toFileName = (i) => `${chain}Net_${i}.headers`;\n const toPath = (i) => toFs.pathJoin(toFolder, toFileName(i));\n const toJsonPath = () => toFs.pathJoin(toFolder, `${chain}NetBlockHeaders.json`);\n const toBulkFiles = {\n rootFolder: toFolder,\n jsonFilename: `${chain}NetBlockHeaders.json`,\n headersPerFile: toHeadersPerFile,\n files: []\n };\n const bf0 = this.files[0];\n let firstHeight = bf0.firstHeight;\n let lastHeaderHash = bf0.prevHash;\n let lastChainWork = bf0.prevChainWork;\n const reader = new BulkFilesReader(this.files, this.heightRange, toHeadersPerFile * 80);\n let i = -1;\n for (;;) {\n i++;\n const data = await reader.read();\n if (!data || data.length === 0) {\n break;\n }\n const last = (0, blockHeaderUtilities_1.validateBufferOfHeaders)(data, lastHeaderHash, 0, undefined, lastChainWork);\n await toFs.writeFile(toPath(i), data);\n const fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_1.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(data)), 'base64');\n const file = {\n chain,\n count: data.length / 80,\n fileHash,\n fileName: toFileName(i),\n firstHeight,\n lastChainWork: last.lastChainWork,\n lastHash: last.lastHeaderHash,\n prevChainWork: lastChainWork,\n prevHash: lastHeaderHash\n };\n toBulkFiles.files.push(file);\n firstHeight += file.count;\n lastHeaderHash = file.lastHash;\n lastChainWork = file.lastChainWork;\n }\n await toFs.writeFile(toJsonPath(), (0, utilityHelpers_noBuffer_1.asUint8Array)(JSON.stringify(toBulkFiles), 'utf8'));\n }\n}\nexports.BulkFilesReader = BulkFilesReader;\nclass BulkFilesReaderFs extends BulkFilesReader {\n constructor(fs, files, range, maxBufferSize) {\n super(files, range, maxBufferSize);\n this.fs = fs;\n }\n /**\n * Return a BulkFilesReader configured to access the intersection of `range` and available headers.\n * @param rootFolder\n * @param jsonFilename\n * @param range\n * @returns\n */\n static async fromFs(fs, rootFolder, jsonFilename, range, maxBufferSize) {\n const filesInfo = await this.readJsonFile(fs, rootFolder, jsonFilename);\n const readerFiles = filesInfo.files.map(file => new BulkHeaderFile_1.BulkHeaderFileFs(file, fs, rootFolder));\n return new BulkFilesReaderFs(fs, readerFiles, range, maxBufferSize);\n }\n static async writeEmptyJsonFile(fs, rootFolder, jsonFilename) {\n const json = JSON.stringify({ files: [], rootFolder });\n await fs.writeFile(fs.pathJoin(rootFolder, jsonFilename), (0, utilityHelpers_noBuffer_1.asUint8Array)(json, 'utf8'));\n return json;\n }\n static async readJsonFile(fs, rootFolder, jsonFilename, failToEmptyRange = true) {\n const filePath = (file) => fs.pathJoin(rootFolder, file);\n const jsonPath = filePath(jsonFilename);\n let json;\n try {\n json = (0, utilityHelpers_noBuffer_1.asString)(await fs.readFile(jsonPath), 'utf8');\n }\n catch (uerr) {\n if (!failToEmptyRange)\n throw new sdk_2.WERR_INVALID_PARAMETER(`${rootFolder}/${jsonFilename}`, `a valid, existing JSON file.`);\n json = await this.writeEmptyJsonFile(fs, rootFolder, jsonFilename);\n }\n const readerFiles = JSON.parse(json);\n readerFiles.jsonFilename = jsonFilename;\n readerFiles.rootFolder = rootFolder;\n return readerFiles;\n }\n}\nexports.BulkFilesReaderFs = BulkFilesReaderFs;\nclass BulkFilesReaderStorage extends BulkFilesReader {\n constructor(storage, files, range, maxBufferSize) {\n super(files, range, maxBufferSize);\n }\n static async fromStorage(storage, fetch, range, maxBufferSize) {\n const files = await storage.bulkManager.getBulkFiles(true);\n const readerFiles = files.map(file => new BulkHeaderFile_1.BulkHeaderFileStorage(file, storage, fetch));\n return new BulkFilesReaderStorage(storage, readerFiles, range, maxBufferSize);\n }\n}\nexports.BulkFilesReaderStorage = BulkFilesReaderStorage;\n//# sourceMappingURL=BulkFilesReader.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkFilesReader.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkHeaderFile.js": -/*!******************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkHeaderFile.js ***! - \******************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.BulkHeaderFiles = exports.BulkHeaderFileStorage = exports.BulkHeaderFileFs = exports.BulkHeaderFile = void 0;\nconst HeightRange_1 = __webpack_require__(/*! ./HeightRange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nclass BulkHeaderFile {\n constructor(info) {\n this.chain = info.chain;\n this.count = info.count;\n this.data = info.data;\n this.fileHash = info.fileHash;\n this.fileId = info.fileId;\n this.fileName = info.fileName;\n this.firstHeight = info.firstHeight;\n this.lastChainWork = info.lastChainWork;\n this.lastHash = info.lastHash;\n this.prevChainWork = info.prevChainWork;\n this.prevHash = info.prevHash;\n this.sourceUrl = info.sourceUrl;\n this.validated = info.validated;\n }\n get heightRange() {\n return new HeightRange_1.HeightRange(this.firstHeight, this.firstHeight + this.count - 1);\n }\n async ensureData() {\n if (!this.data)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`data is undefined and no ensureData() override`);\n return this.data;\n }\n /**\n * Whenever reloading data from a backing store, validated fileHash must be re-verified\n * @returns the sha256 hash of the file's data as base64 string.\n */\n async computeFileHash() {\n if (!this.data)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`requires defined data`);\n return (0, utilityHelpers_noBuffer_1.asString)(sdk_1.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(this.data)), 'base64');\n }\n async releaseData() {\n this.data = undefined;\n }\n toCdnInfo() {\n return {\n count: this.count,\n fileHash: this.fileHash,\n fileName: this.fileName,\n firstHeight: this.firstHeight,\n lastChainWork: this.lastChainWork,\n lastHash: this.lastHash,\n prevChainWork: this.prevChainWork,\n prevHash: this.prevHash\n };\n }\n toStorageInfo() {\n return {\n count: this.count,\n fileHash: this.fileHash,\n fileName: this.fileName,\n firstHeight: this.firstHeight,\n lastChainWork: this.lastChainWork,\n lastHash: this.lastHash,\n prevChainWork: this.prevChainWork,\n prevHash: this.prevHash,\n chain: this.chain,\n validated: this.validated,\n sourceUrl: this.sourceUrl,\n fileId: this.fileId\n };\n }\n}\nexports.BulkHeaderFile = BulkHeaderFile;\nclass BulkHeaderFileFs extends BulkHeaderFile {\n constructor(info, fs, rootFolder) {\n super(info);\n this.fs = fs;\n this.rootFolder = rootFolder;\n }\n async readDataFromFile(length, offset) {\n if (this.data) {\n return this.data.slice(offset, offset + length);\n }\n const f = await this.fs.openReadableFile(this.fs.pathJoin(this.rootFolder, this.fileName));\n try {\n const buffer = await f.read(length, offset);\n return buffer;\n }\n finally {\n await f.close();\n }\n }\n async ensureData() {\n if (this.data)\n return this.data;\n this.data = await this.readDataFromFile(this.count * 80, 0);\n if (!this.data)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`failed to read data for ${this.fileName}`);\n if (this.validated) {\n const hash = await this.computeFileHash();\n if (hash !== this.fileHash)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`BACKING FILE DATA CORRUPTION: invalid fileHash for ${this.fileName}`);\n }\n return this.data;\n }\n}\nexports.BulkHeaderFileFs = BulkHeaderFileFs;\nclass BulkHeaderFileStorage extends BulkHeaderFile {\n constructor(info, storage, fetch) {\n super(info);\n this.storage = storage;\n this.fetch = fetch;\n }\n async readDataFromFile(length, offset) {\n return (await this.ensureData()).slice(offset, offset + length);\n }\n async ensureData() {\n if (this.data)\n return this.data;\n if (!this.sourceUrl) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('sourceUrl', 'defined. Or data must be defined.');\n }\n const url = this.fetch.pathJoin(this.sourceUrl, this.fileName);\n this.data = await this.fetch.download(url);\n if (!this.data)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`failed to download data from ${url}`);\n if (this.validated) {\n const hash = await this.computeFileHash();\n if (hash !== this.fileHash)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`BACKING DOWNLOAD DATA CORRUPTION: invalid fileHash for ${this.fileName}`);\n }\n return this.data;\n }\n}\nexports.BulkHeaderFileStorage = BulkHeaderFileStorage;\nclass BulkHeaderFiles {\n constructor(rootFolder, jsonFilename, files, headersPerFile) {\n this.rootFolder = rootFolder;\n this.jsonFilename = jsonFilename;\n this.files = files;\n this.headersPerFile = headersPerFile;\n }\n}\nexports.BulkHeaderFiles = BulkHeaderFiles;\n//# sourceMappingURL=BulkHeaderFile.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/BulkHeaderFile.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js": -/*!********************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js ***! - \********************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ChaintracksFetch = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nclass ChaintracksFetch {\n constructor() {\n this.httpClient = (0, sdk_1.defaultHttpClient)();\n }\n async download(url) {\n const response = await fetch(url, {\n method: 'GET',\n headers: {\n 'Content-Type': 'application/octet-stream'\n }\n });\n if (!response.ok) {\n throw new Error(`Failed to download from ${url}: ${response.statusText}`);\n }\n const data = await response.arrayBuffer();\n return new Uint8Array(data);\n }\n async fetchJson(url) {\n const requestJsonOptions = {\n method: 'GET',\n headers: {\n Accept: 'application/json'\n }\n };\n const response = await fetch(url, requestJsonOptions);\n if (!response.ok) {\n throw new Error(`Failed to fetch JSON from ${url}: ${response.statusText}`);\n }\n const json = (await response.json());\n return json;\n }\n pathJoin(baseUrl, subpath) {\n // Ensure the subpath doesn't start with a slash to avoid issues\n const cleanSubpath = subpath.replace(/^\\/+/, '');\n if (!baseUrl.endsWith('/'))\n baseUrl += '/';\n // Create a new URL object and append the subpath\n const url = new URL(cleanSubpath, baseUrl);\n return url.toString();\n }\n}\nexports.ChaintracksFetch = ChaintracksFetch;\n//# sourceMappingURL=ChaintracksFetch.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/ChaintracksFetch.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js": -/*!***************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js ***! - \***************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.HeightRange = void 0;\nclass HeightRange {\n constructor(minHeight, maxHeight) {\n this.minHeight = minHeight;\n this.maxHeight = maxHeight;\n }\n /**\n * @param headers\n * @returns range of height values from the given headers, or the empty range if there are no headers.\n */\n static from(headers) {\n if (headers.length === 0)\n return HeightRange.empty;\n const minHeight = headers.reduce((min, h) => Math.min(min, h.height), headers[0].height);\n const maxHeight = headers.reduce((max, h) => Math.max(max, h.height), headers[0].height);\n return new HeightRange(minHeight, maxHeight);\n }\n get length() {\n return Math.max(0, this.maxHeight - this.minHeight + 1);\n }\n get isEmpty() {\n return this.minHeight > this.maxHeight;\n }\n toString() {\n return this.isEmpty ? '' : `${this.minHeight}-${this.maxHeight}`;\n }\n /**\n * @param range HeightRange or single height value.\n * @returns true if `range` is entirely within this range.\n */\n contains(range) {\n if (typeof range === 'number') {\n return this.minHeight <= range && this.maxHeight >= range;\n }\n return this.minHeight <= range.minHeight && this.maxHeight >= range.maxHeight;\n }\n /**\n * Return the intersection with another height range.\n *\n * Intersection with an empty range is always empty.\n *\n * The result is always a single, possibly empty, range.\n * @param range\n * @returns\n */\n intersect(range) {\n return new HeightRange(Math.max(this.minHeight, range.minHeight), Math.min(this.maxHeight, range.maxHeight));\n }\n /**\n * Return the union with another height range.\n *\n * Only valid if the two ranges overlap or touch, or one is empty.\n *\n * Throws an error if the union would create two disjoint ranges.\n *\n * @param range\n * @returns\n */\n union(range) {\n if (this.isEmpty)\n return range.copy();\n if (range.isEmpty)\n return this.copy();\n if (this.maxHeight + 1 < range.minHeight || range.maxHeight + 1 < this.minHeight)\n throw new Error('Union of ranges with a gap between them is not supported.');\n return new HeightRange(Math.min(this.minHeight, range.minHeight), Math.max(this.maxHeight, range.maxHeight));\n }\n /**\n * Returns `range` subtracted from this range.\n *\n * Throws an error if the subtraction would create two disjoint ranges.\n *\n * @param range\n * @returns\n */\n subtract(range) {\n if (this.isEmpty || range.isEmpty)\n return this.copy();\n if (this.minHeight < range.minHeight && this.maxHeight > range.maxHeight)\n throw new Error('Subtraction of range that creates two disjoint ranges is not supported.');\n if (range.maxHeight < this.minHeight || range.minHeight > this.maxHeight)\n // Leave untouched. Subtracted is either all lower or all higher.\n return this.copy();\n if (range.minHeight <= this.minHeight && range.maxHeight < this.maxHeight)\n // Remove a chunk on the low side.\n return new HeightRange(range.maxHeight + 1, this.maxHeight);\n if (range.minHeight <= this.minHeight && range.maxHeight >= this.maxHeight)\n // Remove the whole thing\n return new HeightRange(this.minHeight, this.minHeight - 1); // empty\n if (range.minHeight <= this.maxHeight && range.maxHeight >= this.maxHeight)\n // Remove a chunk on the high side.\n return new HeightRange(this.minHeight, range.minHeight - 1);\n throw new Error('All cases should have been handled :-) .');\n }\n /**\n * If `range` is not empty and this is not empty, returns a new range minHeight\n * replaced by to range.maxHeight + 1.\n *\n * Otherwise returns a copy of this range.\n *\n * This returns the portion of this range that is strictly above `range`.\n */\n above(range) {\n if (range.isEmpty || this.isEmpty)\n return this.copy();\n return new HeightRange(range.maxHeight + 1, this.maxHeight);\n }\n /**\n * Return a copy of this range.\n */\n copy() {\n return new HeightRange(this.minHeight, this.maxHeight);\n }\n}\nexports.HeightRange = HeightRange;\nHeightRange.empty = new HeightRange(0, -1);\n//# sourceMappingURL=HeightRange.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/HeightRange.js?\n}"); - -/***/ }), - -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/SingleWriterMultiReaderLock.js": -/*!*******************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/SingleWriterMultiReaderLock.js ***! - \*******************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.SingleWriterMultiReaderLock = void 0;\n/**\n * A reader-writer lock to manage concurrent access.\n * Allows multiple readers or one writer at a time.\n */\nclass SingleWriterMultiReaderLock {\n constructor() {\n this.readers = 0;\n this.writerActive = false;\n this.readerQueue = [];\n this.writerQueue = [];\n }\n checkQueues() {\n if (this.writerActive || this.readers > 0)\n return;\n if (this.writerQueue.length > 0) {\n // If there are waiting writers and no active readers or writers, start the next writer\n const resolve = this.writerQueue.shift();\n resolve();\n }\n else if (this.readerQueue.length > 0) {\n // If there are waiting readers and no waiting writers, start all readers\n const readers = this.readerQueue.splice(0);\n for (const resolve of readers) {\n resolve();\n }\n }\n }\n async withReadLock(fn) {\n if (!this.writerActive && this.writerQueue.length === 0) {\n // Fast path: no active writer or waiting writers, proceed immediately\n this.readers++;\n try {\n return await fn();\n }\n finally {\n this.readers--;\n this.checkQueues();\n }\n }\n else {\n // Queue the reader until writers are done\n const promise = new Promise(resolve => {\n this.readerQueue.push(resolve);\n });\n await promise;\n this.readers++;\n try {\n return await fn();\n }\n finally {\n this.readers--;\n this.checkQueues();\n }\n }\n }\n async withWriteLock(fn) {\n if (!this.writerActive && this.readers === 0) {\n // Fast path: no active writer or readers, proceed immediately\n this.writerActive = true;\n try {\n return await fn();\n }\n finally {\n this.writerActive = false;\n this.checkQueues();\n }\n }\n else {\n const promise = new Promise(resolve => {\n this.writerQueue.push(resolve);\n });\n await promise;\n this.writerActive = true;\n try {\n return await fn();\n }\n finally {\n this.writerActive = false;\n this.checkQueues();\n }\n }\n }\n}\nexports.SingleWriterMultiReaderLock = SingleWriterMultiReaderLock;\n//# sourceMappingURL=SingleWriterMultiReaderLock.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/SingleWriterMultiReaderLock.js?\n}"); +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n__exportStar(__webpack_require__(/*! ./ChaintracksServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js\"), exports);\n__exportStar(__webpack_require__(/*! ./Api/BlockHeaderApi */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/Api/BlockHeaderApi.js\"), exports);\n//# sourceMappingURL=index.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.js?\n}"); /***/ }), @@ -3410,7 +3190,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.sha256HashOfBinaryFile = sha256HashOfBinaryFile;\nexports.validateBulkFileData = validateBulkFileData;\nexports.validateBufferOfHeaders = validateBufferOfHeaders;\nexports.validateGenesisHeader = validateGenesisHeader;\nexports.workBNtoBuffer = workBNtoBuffer;\nexports.isMoreWork = isMoreWork;\nexports.addWork = addWork;\nexports.subWork = subWork;\nexports.convertBitsToTarget = convertBitsToTarget;\nexports.convertBitsToWork = convertBitsToWork;\nexports.deserializeBaseBlockHeaders = deserializeBaseBlockHeaders;\nexports.deserializeBlockHeaders = deserializeBlockHeaders;\nexports.validateHeaderFormat = validateHeaderFormat;\nexports.validateHeaderDifficulty = validateHeaderDifficulty;\nexports.blockHash = blockHash;\nexports.serializeBaseBlockHeader = serializeBaseBlockHeader;\nexports.serializeBaseBlockHeaders = serializeBaseBlockHeaders;\nexports.deserializeBaseBlockHeader = deserializeBaseBlockHeader;\nexports.deserializeBlockHeader = deserializeBlockHeader;\nexports.genesisHeader = genesisHeader;\nexports.genesisBuffer = genesisBuffer;\nexports.swapByteOrder = swapByteOrder;\nexports.convertUint32ToBuffer = convertUint32ToBuffer;\nexports.writeUInt32LE = writeUInt32LE;\nexports.writeUInt32BE = writeUInt32BE;\nexports.readUInt32LE = readUInt32LE;\nexports.readUInt32BE = readUInt32BE;\nexports.convertBufferToUint32 = convertBufferToUint32;\nconst dirtyHashes_1 = __webpack_require__(/*! ./dirtyHashes */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/dirtyHashes.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst ReaderUint8Array_1 = __webpack_require__(/*! ../../../../utility/ReaderUint8Array */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ReaderUint8Array.js\");\nconst validBulkHeaderFilesByFileHash_1 = __webpack_require__(/*! ./validBulkHeaderFilesByFileHash */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/validBulkHeaderFilesByFileHash.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\n/**\n * Computes sha256 hash of file contents read as bytes with no encoding.\n * @param filepath Full filepath to file.\n * @param bufferSize Optional read buffer size to use. Defaults to 80,000 bytes. Currently ignored.\n * @returns `{hash, length}` where `hash` is base64 string form of file hash and `length` is file length in bytes.\n */\nasync function sha256HashOfBinaryFile(fs, filepath, bufferSize = 80000) {\n const sha256 = new sdk_1.Hash.SHA256();\n const bytes = await fs.readFile(filepath);\n const length = bytes.length;\n sha256.update((0, utilityHelpers_noBuffer_1.asArray)(bytes));\n return { hash: sdk_1.Utils.toBase64(sha256.digest()), length };\n}\n/**\n * Validates the contents of a bulk header file.\n * @param bf BulkHeaderFileInfo containing `data` to validate.\n * @param prevHash Required previous header hash.\n * @param prevChainWork Required previous chain work.\n * @param fetch Optional ChaintracksFetchApi instance for fetching data.\n * @returns Validated BulkHeaderFileInfo with `validated` set to true.\n */\nasync function validateBulkFileData(bf, prevHash, prevChainWork, fetch) {\n const vbf = { ...bf };\n if (!vbf.data && vbf.sourceUrl && fetch) {\n const url = fetch.pathJoin(vbf.sourceUrl, vbf.fileName);\n vbf.data = await fetch.download(url);\n }\n if (!vbf.data)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`bulk file ${vbf.fileName} data is unavailable`);\n if (vbf.count <= 0)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('bf.count', `expected count to be greater than 0, but got ${vbf.count}`);\n if (vbf.data.length !== vbf.count * 80)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('bf.data', `bulk file ${vbf.fileName} data length ${vbf.data.length} does not match expected count ${vbf.count}`);\n vbf.fileHash = (0, utilityHelpers_noBuffer_1.asString)(sdk_1.Hash.sha256((0, utilityHelpers_noBuffer_1.asArray)(vbf.data)), 'base64');\n if (bf.fileHash && bf.fileHash !== vbf.fileHash)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('bf.fileHash', `expected ${bf.fileHash} but got ${vbf.fileHash}`);\n if (!(0, validBulkHeaderFilesByFileHash_1.isKnownValidBulkHeaderFile)(vbf)) {\n const { lastHeaderHash, lastChainWork } = validateBufferOfHeaders(vbf.data, prevHash, 0, undefined, prevChainWork);\n vbf.lastHash = lastHeaderHash;\n vbf.lastChainWork = lastChainWork;\n if (vbf.firstHeight === 0) {\n validateGenesisHeader(vbf.data, vbf.chain);\n }\n }\n vbf.validated = true;\n return vbf;\n}\n/**\n * Validate headers contained in an array of bytes. The headers must be consecutive block headers, 80 bytes long,\n * where the hash of each header equals the previousHash of the following header.\n * @param buffer Buffer of headers to be validated.\n * @param previousHash Expected previousHash of first header.\n * @param offset Optional starting offset within `buffer`.\n * @param count Optional number of headers to validate. Validates to end of buffer if missing.\n * @returns Header hash of last header validated or previousHash if there where none.\n */\nfunction validateBufferOfHeaders(buffer, previousHash, offset = 0, count = -1, previousChainWork) {\n if (count < 0)\n count = Math.floor((buffer.length - offset) / 80);\n count = Math.max(0, count);\n let lastHeaderHash = previousHash;\n let lastChainWork = previousChainWork;\n for (let i = 0; i < count; i++) {\n const headerStart = offset + i * 80;\n const headerEnd = headerStart + 80;\n if (headerEnd > buffer.length) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('buffer', `multiple of 80 bytes long. header ${i} missing bytes for header at offset ${headerStart} in buffer of length ${buffer.length}`);\n }\n const header = buffer.slice(headerStart, headerEnd);\n const h = deserializeBaseBlockHeader(header);\n const hashPrev = (0, utilityHelpers_noBuffer_1.asString)(header.slice(4, 36).reverse());\n if (lastHeaderHash !== hashPrev)\n throw { message: `header ${i} invalid previousHash ${lastHeaderHash} vs ${hashPrev}` };\n lastHeaderHash = (0, utilityHelpers_noBuffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(header));\n (0, dirtyHashes_1.validateAgainstDirtyHashes)(lastHeaderHash);\n if (lastChainWork) {\n lastChainWork = addWork(lastChainWork, convertBitsToWork(h.bits));\n }\n }\n return { lastHeaderHash, lastChainWork };\n}\n/**\n * Verifies that buffer begins with valid genesis block header for the specified chain.\n * @param buffer\n * @param chain\n */\nfunction validateGenesisHeader(buffer, chain) {\n const header = buffer.slice(0, 80);\n const h = deserializeBlockHeader(header, 0, 0);\n const gh = genesisHeader(chain);\n if (h.bits !== gh.bits ||\n h.previousHash !== gh.previousHash ||\n h.merkleRoot !== gh.merkleRoot ||\n h.time !== gh.time ||\n h.nonce !== gh.nonce ||\n h.version !== gh.version ||\n h.height !== gh.height ||\n h.hash !== gh.hash) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('buffer', `genesis header for chain ${chain}`);\n }\n}\n/**\n * @param work chainWork as a BigNumber\n * @returns Converted chainWork value from BN to hex string of 32 bytes.\n */\nfunction workBNtoBuffer(work) {\n return work.toString(16).padStart(64, '0');\n}\n/**\n * Returns true if work1 is more work (greater than) work2\n */\nfunction isMoreWork(work1, work2) {\n return new sdk_1.BigNumber((0, utilityHelpers_noBuffer_1.asArray)(work1), 16).gt(new sdk_1.BigNumber((0, utilityHelpers_noBuffer_1.asArray)(work2), 16));\n}\n/**\n * Add two Buffer encoded chainwork values\n * @returns Sum of work1 + work2 as Buffer encoded chainWork value\n */\nfunction addWork(work1, work2) {\n const sum = new sdk_1.BigNumber(work1, 16).add(new sdk_1.BigNumber(work2, 16));\n return workBNtoBuffer(sum);\n}\n/**\n * Subtract Buffer encoded chainwork values\n * @returns work1 - work2 as Buffer encoded chainWork value\n */\nfunction subWork(work1, work2) {\n const sum = new sdk_1.BigNumber(work1, 16).sub(new sdk_1.BigNumber(work2, 16));\n return workBNtoBuffer(sum);\n}\n/**\n * Computes \"target\" value for 4 byte Bitcoin block header \"bits\" value.\n * @param bits number or converted from Buffer using `readUint32LE`\n * @returns 32 byte Buffer with \"target\" value\n */\nfunction convertBitsToTarget(bits) {\n if (Array.isArray(bits))\n bits = readUInt32LE(bits, 0);\n const shift = (bits >> 24) & 0xff;\n const data = bits & 0x007fffff;\n const target = new sdk_1.BigNumber(data);\n if (shift <= 3) {\n target.iushrn(8 * (3 - shift));\n }\n else {\n target.iushln(8 * (shift - 3));\n }\n return target;\n}\n/**\n * Computes \"chainWork\" value for 4 byte Bitcoin block header \"bits\" value.\n * @param bits number or converted from Buffer using `readUint32LE`\n * @returns 32 byte Buffer with \"chainWork\" value\n */\nfunction convertBitsToWork(bits) {\n const target = convertBitsToTarget(bits);\n // convert target to work\n const work = target.notn(256).div(target.addn(1)).addn(1);\n return work.toString(16).padStart(64, '0');\n}\nfunction deserializeBaseBlockHeaders(buffer, offset = 0, count) {\n const headers = [];\n while ((!count || headers.length < count) && offset + 80 <= buffer.length && offset >= 0) {\n headers.push(deserializeBaseBlockHeader(buffer, offset));\n offset += 80;\n }\n return headers;\n}\nfunction deserializeBlockHeaders(firstHeight, buffer, offset = 0, count) {\n const headers = [];\n let nextHeight = firstHeight;\n while ((!count || headers.length < count) && offset + 80 <= buffer.length && offset >= 0) {\n const baseBuffer = buffer.slice(offset, offset + 80);\n const base = deserializeBaseBlockHeader(baseBuffer);\n const header = {\n ...base,\n height: nextHeight++,\n hash: (0, utilityHelpers_noBuffer_1.asString)(blockHash(baseBuffer))\n };\n headers.push(header);\n offset += 80;\n }\n return headers;\n}\n/**\n * Given a block header, ensures that its format is correct. This does not\n * check its difficulty or validity relative to the chain of headers.\n *\n * Throws on format errors.\n *\n * @param The header to validate\n *\n * @returns true if the header is correctly formatted\n */\nfunction validateHeaderFormat(header) {\n const ALLOWED_KEYS = {\n version: true,\n previousHash: true,\n merkleRoot: true,\n time: true,\n bits: true,\n nonce: true,\n height: true,\n hash: true\n };\n const UINT_MAX = 0xffffffff;\n /**\n * Root object checks\n */\n if (typeof header === 'undefined') {\n throw new Error('Missing header.');\n }\n if (typeof header !== 'object') {\n throw new Error('Header must be an object.');\n }\n if (!Object.keys(header).every(key => ALLOWED_KEYS[key])) {\n throw new Error('Header contains extra properties.');\n }\n /**\n * Version\n */\n if (typeof header.version !== 'number') {\n throw new Error('Header version must be a number.');\n }\n if (!Number.isInteger(header.version)) {\n throw new Error('Header version must be an integer.');\n }\n if (header.version < 0 || header.version > UINT_MAX) {\n throw new Error(`Header version must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Height\n */\n if (typeof header.height !== 'number') {\n throw new Error('Header height must be a number.');\n }\n if (!Number.isInteger(header.height)) {\n throw new Error('Header height must be an integer.');\n }\n if (header.height < 0 || header.height > UINT_MAX / 2) {\n throw new Error(`Header version must be between 0 and ${UINT_MAX / 2}.`);\n }\n /**\n * Previous hash\n */\n if (header.previousHash.length !== 64) {\n throw new Error('Header previousHash must be 32 hex bytes.');\n }\n /**\n * Merkle root\n */\n if (header.merkleRoot.length !== 64) {\n throw new Error('Header merkleRoot must be 32 hex bytes.');\n }\n /**\n * Time\n */\n if (typeof header.time !== 'number') {\n throw new Error('Header time must be a number.');\n }\n if (!Number.isInteger(header.time)) {\n throw new Error('Header time must be an integer.');\n }\n if (header.time < 0 || header.time > UINT_MAX) {\n throw new Error(`Header time must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Bits\n */\n if (typeof header.bits !== 'number') {\n throw new Error('Header bits must be a number.');\n }\n if (!Number.isInteger(header.bits)) {\n throw new Error('Header bits must be an integer.');\n }\n if (header.bits < 0 || header.bits > UINT_MAX) {\n throw new Error(`Header bits must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Nonce\n */\n if (typeof header.nonce !== 'number') {\n throw new Error('Header nonce must be a number.');\n }\n if (!Number.isInteger(header.nonce)) {\n throw new Error('Header nonce must be an integer.');\n }\n if (header.nonce < 0 || header.nonce > UINT_MAX) {\n throw new Error(`Header nonce must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Hash\n */\n if (header.hash.length !== 64) {\n throw new Error('Header hash must be 32 hex bytes.');\n }\n if (header.hash !== (0, utilityHelpers_noBuffer_1.asString)(blockHash(header))) {\n throw new Error('Header hash is invalid.');\n }\n}\n/**\n * Ensures that a header has a valid proof-of-work\n * Requires chain is 'main'\n *\n * @param header The header to validate\n *\n * @returns true if the header is valid\n */\nfunction validateHeaderDifficulty(hash, bits) {\n const hashBN = new sdk_1.BigNumber((0, utilityHelpers_noBuffer_1.asArray)(hash));\n const target = convertBitsToTarget(bits);\n if (hashBN.lte(target))\n return true;\n throw new Error('Block hash is not less than specified target.');\n}\n/**\n * Computes double sha256 hash of bitcoin block header\n * bytes are reversed to bigendian order\n *\n * If header is a Buffer, it is required to 80 bytes long\n * and in standard block header serialized encoding.\n *\n * @returns doule sha256 hash of header bytes reversed\n * @publicbody\n */\nfunction blockHash(header) {\n const a = !Array.isArray(header) && !(header instanceof Uint8Array) ? serializeBaseBlockHeader(header) : header;\n if (a.length !== 80)\n throw new Error('Block header must be 80 bytes long.');\n return (0, utilityHelpers_noBuffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(a));\n}\n/**\n * Serializes a block header as an 80 byte Buffer.\n * The exact serialized format is defined in the Bitcoin White Paper\n * such that computing a double sha256 hash of the buffer computes\n * the block hash for the header.\n * @returns 80 byte Buffer\n * @publicbody\n */\nfunction serializeBaseBlockHeader(header, buffer, offset) {\n const writer = new sdk_1.Utils.Writer();\n writer.writeUInt32LE(header.version);\n writer.write((0, utilityHelpers_noBuffer_1.asArray)(header.previousHash).reverse());\n writer.write((0, utilityHelpers_noBuffer_1.asArray)(header.merkleRoot).reverse());\n writer.writeUInt32LE(header.time);\n writer.writeUInt32LE(header.bits);\n writer.writeUInt32LE(header.nonce);\n const data = writer.toArray();\n if (buffer) {\n offset || (offset = 0);\n for (let i = 0; i < data.length; i++) {\n if (offset + i >= buffer.length) {\n throw new Error(`Buffer overflow at offset ${offset + i} for data length ${data.length}`);\n }\n buffer[offset + i] = data[i];\n }\n }\n return data;\n}\nfunction serializeBaseBlockHeaders(headers) {\n const data = new Uint8Array(headers.length * 80);\n let i = -1;\n for (const header of headers) {\n i++;\n const d = serializeBaseBlockHeader(header);\n data.set(d, i * 80);\n }\n return data;\n}\n/**\n * Deserialize a BaseBlockHeader from an 80 byte buffer\n * @publicbody\n */\nfunction deserializeBaseBlockHeader(buffer, offset = 0) {\n const reader = ReaderUint8Array_1.ReaderUint8Array.makeReader(buffer, offset);\n const header = {\n version: reader.readUInt32LE(),\n previousHash: (0, utilityHelpers_noBuffer_1.asString)(reader.read(32).reverse()),\n merkleRoot: (0, utilityHelpers_noBuffer_1.asString)(reader.read(32).reverse()),\n time: reader.readUInt32LE(),\n bits: reader.readUInt32LE(),\n nonce: reader.readUInt32LE()\n };\n return header;\n}\nfunction deserializeBlockHeader(buffer, offset = 0, height) {\n const base = deserializeBaseBlockHeader(buffer, offset);\n const header = {\n ...base,\n height,\n hash: (0, utilityHelpers_noBuffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(buffer.slice(offset, offset + 80)))\n };\n return header;\n}\n/**\n * Returns the genesis block for the specified chain.\n * @publicbody\n */\nfunction genesisHeader(chain) {\n return chain === 'main'\n ? {\n version: 1,\n previousHash: '0000000000000000000000000000000000000000000000000000000000000000',\n merkleRoot: '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',\n time: 1231006505,\n bits: 486604799,\n nonce: 2083236893,\n height: 0,\n hash: '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'\n }\n : {\n version: 1,\n previousHash: '0000000000000000000000000000000000000000000000000000000000000000',\n merkleRoot: '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',\n time: 1296688602,\n bits: 486604799,\n nonce: 414098458,\n height: 0,\n hash: '000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943'\n };\n}\n/**\n * Returns the genesis block for the specified chain.\n * @publicbody\n */\nfunction genesisBuffer(chain) {\n return serializeBaseBlockHeader(genesisHeader(chain));\n}\n/**\n * Returns a copy of a Buffer with byte order reversed.\n * @returns new buffer with byte order reversed.\n * @publicbody\n */\nfunction swapByteOrder(buffer) {\n return buffer.slice().reverse();\n}\n/**\n * @param num a number value in the Uint32 value range\n * @param littleEndian true for little-endian byte order in Buffer\n * @returns four byte buffer with Uint32 number encoded\n * @publicbody\n */\nfunction convertUint32ToBuffer(n, littleEndian = true) {\n const a = [\n n & 0xff, // lowest byte\n (n >> 8) & 0xff,\n (n >> 16) & 0xff,\n (n >> 24) & 0xff // highest byte\n ];\n return littleEndian ? a : a.reverse();\n}\nfunction writeUInt32LE(n, a, offset) {\n a[offset++] = n & 0xff; // lowest byte\n a[offset++] = (n >> 8) & 0xff;\n a[offset++] = (n >> 16) & 0xff;\n a[offset++] = (n >> 24) & 0xff; // highest byte\n return offset;\n}\nfunction writeUInt32BE(n, a, offset) {\n a[offset++] = (n >> 24) & 0xff; // highest byte\n a[offset++] = (n >> 16) & 0xff;\n a[offset++] = (n >> 8) & 0xff;\n a[offset++] = n & 0xff; // lowest byte\n return offset;\n}\nfunction readUInt32LE(a, offset) {\n return a[offset++] | (a[offset++] << 8) | (a[offset++] << 16) | (a[offset++] << 24);\n}\nfunction readUInt32BE(a, offset) {\n return (a[offset++] << 24) | (a[offset++] << 16) | (a[offset++] << 8) | a[offset++];\n}\n/**\n * @param buffer four byte buffer with Uint32 number encoded\n * @param littleEndian true for little-endian byte order in Buffer\n * @returns a number value in the Uint32 value range\n * @publicbody\n */\nfunction convertBufferToUint32(buffer, littleEndian = true) {\n const a = littleEndian ? buffer : buffer.slice().reverse();\n const n = a[0] | (a[1] << 8) | (a[2] << 16) | (a[3] << 24);\n return n;\n}\n//# sourceMappingURL=blockHeaderUtilities.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.validateBufferOfHeaders = validateBufferOfHeaders;\nexports.workBNtoBuffer = workBNtoBuffer;\nexports.isMoreWork = isMoreWork;\nexports.addWork = addWork;\nexports.subWork = subWork;\nexports.convertBitsToTarget = convertBitsToTarget;\nexports.convertBitsToWork = convertBitsToWork;\nexports.deserializeBaseBlockHeaders = deserializeBaseBlockHeaders;\nexports.deserializeBlockHeaders = deserializeBlockHeaders;\nexports.extractHashesAndRoots = extractHashesAndRoots;\nexports.validateHeaderFormat = validateHeaderFormat;\nexports.validateHeaderDifficulty = validateHeaderDifficulty;\nexports.blockHash = blockHash;\nexports.serializeBlockHeader = serializeBlockHeader;\nexports.deserializeBlockHeader = deserializeBlockHeader;\nexports.genesisHeader = genesisHeader;\nexports.genesisBuffer = genesisBuffer;\nexports.swapByteOrder = swapByteOrder;\nexports.convertUint32ToBuffer = convertUint32ToBuffer;\nexports.writeUInt32LE = writeUInt32LE;\nexports.writeUInt32BE = writeUInt32BE;\nexports.readUInt32LE = readUInt32LE;\nexports.readUInt32BE = readUInt32BE;\nexports.convertBufferToUint32 = convertBufferToUint32;\nconst dirtyHashes_1 = __webpack_require__(/*! ./dirtyHashes */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/dirtyHashes.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_buffer_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers.buffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.buffer.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\n/**\n * Computes sha256 hash of file contents read as bytes with no encoding.\n * @param filepath Full filepath to file.\n * @param bufferSize Optional read buffer size to use. Defaults to 80,000 bytes.\n * @returns `{hash, length}` where `hash` is base64 string form of file hash and `length` is file length in bytes.\n */\n/*\nexport async function sha256HashOfBinaryFile(\n filepath: string,\n bufferSize = 80000\n): Promise<{ hash: string; length: number }> {\n const file = await fs.open(filepath, 'r')\n try {\n let length = 0\n\n const sha256 = new Hash.SHA256()\n const readBuf = Buffer.alloc(bufferSize)\n\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const rr = await file.read(readBuf, 0, readBuf.length)\n if (!rr.bytesRead) break\n length += rr.bytesRead\n sha256.update(asArray(rr.buffer))\n }\n\n return { hash: Utils.toBase64(sha256.digest()), length }\n } finally {\n await file.close()\n }\n}\n*/\n/**\n * Validate headers contained in an array of bytes. The headers must be consecutive block headers, 80 bytes long,\n * where the hash of each header equals the previousHash of the following header.\n * @param buffer Buffer of headers to be validated.\n * @param previousHash Expected previousHash of first header.\n * @param offset Optional starting offset within `buffer`.\n * @param count Optional number of headers to validate. Validates to end of buffer if missing.\n * @returns Header hash of last header validated or previousHash if there where none.\n */\nfunction validateBufferOfHeaders(buffer, previousHash, offset = 0, count = -1) {\n if (count < 0)\n count = Math.floor((buffer.length - offset) / 80);\n count = Math.max(0, count);\n let lastHeaderHash = previousHash;\n for (let i = 0; i < count; i++) {\n const headerStart = offset + i * 80;\n const headerEnd = headerStart + 80;\n if (headerEnd > buffer.length) {\n throw {\n message: `header ${i} missing bytes for header at offset ${headerStart} in buffer of length ${buffer.length}`\n };\n }\n const header = buffer.slice(headerStart, headerEnd);\n const hashPrev = (0, utilityHelpers_buffer_1.asString)(header.slice(4, 36).reverse());\n if (lastHeaderHash !== hashPrev)\n throw { message: `header ${i} invalid previousHash ${lastHeaderHash} vs ${hashPrev}` };\n lastHeaderHash = (0, utilityHelpers_buffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(header));\n (0, dirtyHashes_1.validateAgainstDirtyHashes)(lastHeaderHash);\n }\n return lastHeaderHash;\n}\n/**\n * @param work chainWork as a BigNumber\n * @returns Converted chainWork value from BN to hex string of 32 bytes.\n */\nfunction workBNtoBuffer(work) {\n return work.toString(16).padStart(64, '0');\n}\n/**\n * Returns true if work1 is more work (greater than) work2\n */\nfunction isMoreWork(work1, work2) {\n return new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(work1), 16).gt(new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(work2), 16));\n}\n/**\n * Add two Buffer encoded chainwork values\n * @returns Sum of work1 + work2 as Buffer encoded chainWork value\n */\nfunction addWork(work1, work2) {\n const sum = new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(work1), 16).add(new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(work2), 16));\n return workBNtoBuffer(sum);\n}\n/**\n * Subtract Buffer encoded chainwork values\n * @returns work1 - work2 as Buffer encoded chainWork value\n */\nfunction subWork(work1, work2) {\n const sum = new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(work1), 16).sub(new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(work2), 16));\n return workBNtoBuffer(sum);\n}\n/**\n * Computes \"target\" value for 4 byte Bitcoin block header \"bits\" value.\n * @param bits number or converted from Buffer using `readUint32LE`\n * @returns 32 byte Buffer with \"target\" value\n */\nfunction convertBitsToTarget(bits) {\n if (Array.isArray(bits))\n bits = readUInt32LE(bits, 0);\n const shift = (bits >> 24) & 0xff;\n const data = bits & 0x007fffff;\n const target = new sdk_1.BigNumber(data);\n if (shift <= 3) {\n target.iushrn(8 * (3 - shift));\n }\n else {\n target.iushln(8 * (shift - 3));\n }\n return target;\n}\n/**\n * Computes \"chainWork\" value for 4 byte Bitcoin block header \"bits\" value.\n * @param bits number or converted from Buffer using `readUint32LE`\n * @returns 32 byte Buffer with \"chainWork\" value\n */\nfunction convertBitsToWork(bits) {\n const target = convertBitsToTarget(bits);\n // convert target to work\n const work = target.notn(256).div(target.addn(1)).addn(1);\n return work.toString(16).padStart(64, '0');\n}\nfunction deserializeBaseBlockHeaders(buffer, offset = 0, count) {\n const headers = [];\n while ((!count || headers.length < count) && offset + 80 <= buffer.length && offset >= 0) {\n headers.push(deserializeBlockHeader(buffer, offset));\n offset += 80;\n }\n return headers;\n}\nfunction deserializeBlockHeaders(firstHeight, buffer, offset = 0, count) {\n const headers = [];\n let nextHeight = firstHeight;\n while ((!count || headers.length < count) && offset + 80 <= buffer.length && offset >= 0) {\n const baseBuffer = buffer.slice(offset, offset + 80);\n const base = deserializeBlockHeader(baseBuffer);\n const header = {\n ...base,\n height: nextHeight++,\n hash: (0, utilityHelpers_buffer_1.asString)(blockHash(baseBuffer))\n };\n headers.push(header);\n offset += 80;\n }\n return headers;\n}\n/**\n * Extract an array of block hashes and of merkleRoots from a buffer of serialized block headers.\n * @param buffer\n */\nfunction extractHashesAndRoots(buffer) {\n const hashes = [];\n const merkleRoots = [];\n for (let i = 0; i < buffer.length / 80; i++) {\n const offset = i * 80;\n const hash = (0, utilityHelpers_buffer_1.asBuffer)((0, utilityHelpers_1.doubleSha256LE)((0, utilityHelpers_buffer_1.asArray)(buffer.subarray(offset, 80 + offset))).reverse());\n const merkleRoot = buffer.subarray(36 + offset, 68 + offset).reverse();\n hashes.push(hash);\n merkleRoots.push(merkleRoot);\n }\n return { hashes, merkleRoots };\n}\n/**\n * Given a block header, ensures that its format is correct. This does not\n * check its difficulty or validity relative to the chain of headers.\n *\n * Throws on format errors.\n *\n * @param The header to validate\n *\n * @returns true if the header is correctly formatted\n */\nfunction validateHeaderFormat(header) {\n const ALLOWED_KEYS = {\n version: true,\n previousHash: true,\n merkleRoot: true,\n time: true,\n bits: true,\n nonce: true,\n height: true,\n hash: true\n };\n const UINT_MAX = 0xffffffff;\n /**\n * Root object checks\n */\n if (typeof header === 'undefined') {\n throw new Error('Missing header.');\n }\n if (typeof header !== 'object') {\n throw new Error('Header must be an object.');\n }\n if (!Object.keys(header).every(key => ALLOWED_KEYS[key])) {\n throw new Error('Header contains extra properties.');\n }\n /**\n * Version\n */\n if (typeof header.version !== 'number') {\n throw new Error('Header version must be a number.');\n }\n if (!Number.isInteger(header.version)) {\n throw new Error('Header version must be an integer.');\n }\n if (header.version < 0 || header.version > UINT_MAX) {\n throw new Error(`Header version must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Height\n */\n if (typeof header.height !== 'number') {\n throw new Error('Header height must be a number.');\n }\n if (!Number.isInteger(header.height)) {\n throw new Error('Header height must be an integer.');\n }\n if (header.height < 0 || header.height > UINT_MAX / 2) {\n throw new Error(`Header version must be between 0 and ${UINT_MAX / 2}.`);\n }\n /**\n * Previous hash\n */\n if (header.previousHash.length !== 64) {\n throw new Error('Header previousHash must be 32 hex bytes.');\n }\n /**\n * Merkle root\n */\n if (header.merkleRoot.length !== 64) {\n throw new Error('Header merkleRoot must be 32 hex bytes.');\n }\n /**\n * Time\n */\n if (typeof header.time !== 'number') {\n throw new Error('Header time must be a number.');\n }\n if (!Number.isInteger(header.time)) {\n throw new Error('Header time must be an integer.');\n }\n if (header.time < 0 || header.time > UINT_MAX) {\n throw new Error(`Header time must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Bits\n */\n if (typeof header.bits !== 'number') {\n throw new Error('Header bits must be a number.');\n }\n if (!Number.isInteger(header.bits)) {\n throw new Error('Header bits must be an integer.');\n }\n if (header.bits < 0 || header.bits > UINT_MAX) {\n throw new Error(`Header bits must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Nonce\n */\n if (typeof header.nonce !== 'number') {\n throw new Error('Header nonce must be a number.');\n }\n if (!Number.isInteger(header.nonce)) {\n throw new Error('Header nonce must be an integer.');\n }\n if (header.nonce < 0 || header.nonce > UINT_MAX) {\n throw new Error(`Header nonce must be between 0 and ${UINT_MAX}.`);\n }\n /**\n * Hash\n */\n if (header.hash.length !== 64) {\n throw new Error('Header hash must be 32 hex bytes.');\n }\n if (header.hash !== (0, utilityHelpers_buffer_1.asString)(blockHash(header))) {\n throw new Error('Header hash is invalid.');\n }\n}\n/**\n * Ensures that a header has a valid proof-of-work\n * Requires chain is 'main'\n *\n * @param header The header to validate\n *\n * @returns true if the header is valid\n */\nfunction validateHeaderDifficulty(hash, bits) {\n const hashBN = new sdk_1.BigNumber((0, utilityHelpers_buffer_1.asArray)(hash));\n const target = convertBitsToTarget(bits);\n if (hashBN.lte(target))\n return true;\n throw new Error('Block hash is not less than specified target.');\n}\n/**\n * Computes double sha256 hash of bitcoin block header\n * bytes are reversed to bigendian order\n *\n * If header is a Buffer, it is required to 80 bytes long\n * and in standard block header serialized encoding.\n *\n * @returns doule sha256 hash of header bytes reversed\n * @publicbody\n */\nfunction blockHash(header) {\n const a = !Array.isArray(header) ? serializeBlockHeader(header) : header;\n if (a.length !== 80)\n throw new Error('Block header must be 80 bytes long.');\n return (0, utilityHelpers_buffer_1.asString)((0, utilityHelpers_1.doubleSha256BE)(a));\n}\n/**\n * Serializes a block header as an 80 byte Buffer.\n * The exact serialized format is defined in the Bitcoin White Paper\n * such that computing a double sha256 hash of the buffer computes\n * the block hash for the header.\n * @returns 80 byte Buffer\n * @publicbody\n */\nfunction serializeBlockHeader(header, buffer, offset) {\n const writer = new sdk_1.Utils.Writer();\n writer.writeUInt32LE(header.version);\n writer.write((0, utilityHelpers_buffer_1.asArray)(header.previousHash).reverse());\n writer.write((0, utilityHelpers_buffer_1.asArray)(header.merkleRoot).reverse());\n writer.writeUInt32LE(header.time);\n writer.writeUInt32LE(header.bits);\n writer.writeUInt32LE(header.nonce);\n const data = writer.toArray();\n if (buffer) {\n offset || (offset = 0);\n buffer.splice(offset, buffer.length, ...data);\n }\n return data;\n}\n/**\n * Deserialize a block header from an 80 byte buffer\n * @publicbody\n */\nfunction deserializeBlockHeader(buffer, offset = 0) {\n const reader = new sdk_1.Utils.Reader(buffer, offset);\n const header = {\n version: reader.readUInt32LE(),\n previousHash: (0, utilityHelpers_buffer_1.asString)(reader.read(32).reverse()),\n merkleRoot: (0, utilityHelpers_buffer_1.asString)(reader.read(32).reverse()),\n time: reader.readUInt32LE(),\n bits: reader.readUInt32LE(),\n nonce: reader.readUInt32LE()\n };\n return header;\n}\n/**\n * Returns the genesis block for the specified chain.\n * @publicbody\n */\nfunction genesisHeader(chain) {\n return chain === 'main'\n ? {\n version: 1,\n previousHash: '0000000000000000000000000000000000000000000000000000000000000000',\n merkleRoot: '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',\n time: 1231006505,\n bits: 486604799,\n nonce: 2083236893,\n height: 0,\n hash: '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'\n }\n : {\n version: 1,\n previousHash: '0000000000000000000000000000000000000000000000000000000000000000',\n merkleRoot: '4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',\n time: 1296688602,\n bits: 486604799,\n nonce: 414098458,\n height: 0,\n hash: '000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943'\n };\n}\n/**\n * Returns the genesis block for the specified chain.\n * @publicbody\n */\nfunction genesisBuffer(chain) {\n return serializeBlockHeader(genesisHeader(chain));\n}\n/**\n * Returns a copy of a Buffer with byte order reversed.\n * @returns new buffer with byte order reversed.\n * @publicbody\n */\nfunction swapByteOrder(buffer) {\n return buffer.slice().reverse();\n}\n/**\n * @param num a number value in the Uint32 value range\n * @param littleEndian true for little-endian byte order in Buffer\n * @returns four byte buffer with Uint32 number encoded\n * @publicbody\n */\nfunction convertUint32ToBuffer(n, littleEndian = true) {\n const a = [\n n & 0xff, // lowest byte\n (n >> 8) & 0xff,\n (n >> 16) & 0xff,\n (n >> 24) & 0xff // highest byte\n ];\n return littleEndian ? a : a.reverse();\n}\nfunction writeUInt32LE(n, a, offset) {\n a[offset++] = n & 0xff; // lowest byte\n a[offset++] = (n >> 8) & 0xff;\n a[offset++] = (n >> 16) & 0xff;\n a[offset++] = (n >> 24) & 0xff; // highest byte\n return offset;\n}\nfunction writeUInt32BE(n, a, offset) {\n a[offset++] = (n >> 24) & 0xff; // highest byte\n a[offset++] = (n >> 16) & 0xff;\n a[offset++] = (n >> 8) & 0xff;\n a[offset++] = n & 0xff; // lowest byte\n return offset;\n}\nfunction readUInt32LE(a, offset) {\n return a[offset++] | (a[offset++] << 8) | (a[offset++] << 16) | (a[offset++] << 24);\n}\nfunction readUInt32BE(a, offset) {\n return (a[offset++] << 24) | (a[offset++] << 16) | (a[offset++] << 8) | a[offset++];\n}\n/**\n * @param buffer four byte buffer with Uint32 number encoded\n * @param littleEndian true for little-endian byte order in Buffer\n * @returns a number value in the Uint32 value range\n * @publicbody\n */\nfunction convertBufferToUint32(buffer, littleEndian = true) {\n const a = littleEndian ? buffer : buffer.slice().reverse();\n const n = a[0] | (a[1] << 8) | (a[2] << 16) | (a[3] << 24);\n return n;\n}\n//# sourceMappingURL=blockHeaderUtilities.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/blockHeaderUtilities.js?\n}"); /***/ }), @@ -3425,14 +3205,14 @@ /***/ }), -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/validBulkHeaderFilesByFileHash.js": -/*!**********************************************************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/validBulkHeaderFilesByFileHash.js ***! - \**********************************************************************************************************************************/ -/***/ ((__unused_webpack_module, exports) => { +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/index.js": +/*!****************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/index.js ***! + \****************************************************************************************/ +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.validBulkHeaderFiles = void 0;\nexports.isKnownValidBulkHeaderFile = isKnownValidBulkHeaderFile;\nexports.validBulkHeaderFilesByFileHash = validBulkHeaderFilesByFileHash;\nfunction isKnownValidBulkHeaderFile(vbf) {\n if (!vbf || !vbf.fileHash)\n return false;\n const bf = validBulkHeaderFilesByFileHash()[vbf.fileHash];\n if (!bf ||\n bf.firstHeight !== vbf.firstHeight ||\n bf.count !== vbf.count ||\n bf.prevChainWork !== vbf.prevChainWork ||\n bf.prevHash !== vbf.prevHash ||\n bf.lastChainWork !== vbf.lastChainWork ||\n bf.lastHash !== vbf.lastHash ||\n bf.chain !== vbf.chain) {\n return false;\n }\n return true;\n}\nlet _validBulkHeaderFilesByFileHash;\nfunction validBulkHeaderFilesByFileHash() {\n if (!_validBulkHeaderFilesByFileHash) {\n _validBulkHeaderFilesByFileHash = {};\n for (const vbf of exports.validBulkHeaderFiles) {\n if (vbf.fileHash) {\n _validBulkHeaderFilesByFileHash[vbf.fileHash] = vbf;\n }\n }\n }\n return _validBulkHeaderFilesByFileHash;\n}\nexports.validBulkHeaderFiles = [\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_0.headers',\n firstHeight: 0,\n prevHash: '0000000000000000000000000000000000000000000000000000000000000000',\n count: 100000,\n lastHash: '000000004956cc2edd1a8caa05eacfa3c69f4c490bfc9ace820257834115ab35',\n fileHash: 'gAJPUfI2DfAabJTOBxT1rwy1cS4/QULaQHaQWa1RWNk=',\n lastChainWork: '000000000000000000000000000000000000000000000000004143c00b3d47b8',\n prevChainWork: '0000000000000000000000000000000000000000000000000000000000000000',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_1.headers',\n firstHeight: 100000,\n prevHash: '000000004956cc2edd1a8caa05eacfa3c69f4c490bfc9ace820257834115ab35',\n count: 100000,\n lastHash: '0000000000c470c4a573272aa4a680c93fc4c2f5df8ce9546441796f73277334',\n fileHash: 'OIJ010bnIbFobNppJzCNE9jFI1uANz0iNGvqpoG2xq4=',\n lastChainWork: '00000000000000000000000000000000000000000000000004504f3a4e71aa13',\n prevChainWork: '000000000000000000000000000000000000000000000000004143c00b3d47b8',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_2.headers',\n firstHeight: 200000,\n prevHash: '0000000000c470c4a573272aa4a680c93fc4c2f5df8ce9546441796f73277334',\n count: 100000,\n lastHash: '00000000dfe970844d1bf983d0745f709368b5c66224837a17ed633f0dabd300',\n fileHash: 'hZXE3im7V4tE0oROWM2mGB9xPXEcpVLRIYUPaYT3VV0=',\n lastChainWork: '00000000000000000000000000000000000000000000000062378b066f9fba96',\n prevChainWork: '00000000000000000000000000000000000000000000000004504f3a4e71aa13',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_3.headers',\n firstHeight: 300000,\n prevHash: '00000000dfe970844d1bf983d0745f709368b5c66224837a17ed633f0dabd300',\n count: 100000,\n lastHash: '0000000001127c76ac45f605f9300dfa96a8054533b96413883fdc4378aeb42d',\n fileHash: 'BGZxsk/Ooa4BOaoBEMOor+B8wL9ghW5A0We2G2fmyLE=',\n lastChainWork: '0000000000000000000000000000000000000000000000040da9d61d8e129a53',\n prevChainWork: '00000000000000000000000000000000000000000000000062378b066f9fba96',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_4.headers',\n firstHeight: 400000,\n prevHash: '0000000001127c76ac45f605f9300dfa96a8054533b96413883fdc4378aeb42d',\n count: 100000,\n lastHash: '0000000001965655a870175b510326e6393114d293896ddb237709eecb381ab8',\n fileHash: '3DjOpFnatZ0OKrpACATfAtBITX2s8JjfYTAnDHVkGuw=',\n lastChainWork: '00000000000000000000000000000000000000000000000461063a8389300d36',\n prevChainWork: '0000000000000000000000000000000000000000000000040da9d61d8e129a53',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_5.headers',\n firstHeight: 500000,\n prevHash: '0000000001965655a870175b510326e6393114d293896ddb237709eecb381ab8',\n count: 100000,\n lastHash: '000000000000bb1644b4d9a643b165a52b3ffba077f2a12b8bd1f0a6b6cc0fbc',\n fileHash: 'wF008GqnZzAYsOwnmyFzIOmrJthHE3bq6oUg1FvHG1Y=',\n lastChainWork: '0000000000000000000000000000000000000000000000067a8291cfec0aa549',\n prevChainWork: '00000000000000000000000000000000000000000000000461063a8389300d36',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_6.headers',\n firstHeight: 600000,\n prevHash: '000000000000bb1644b4d9a643b165a52b3ffba077f2a12b8bd1f0a6b6cc0fbc',\n count: 100000,\n lastHash: '0000000000003e784511e93aca014ecaa6d4ba3637cf373f4b84dcac7c70cca0',\n fileHash: 'uc7IW6NRXXtX3oGWwOYjtetTaZ+1zhvijNEwPbK+rAs=',\n lastChainWork: '0000000000000000000000000000000000000000000000078286c7f42f7ec693',\n prevChainWork: '0000000000000000000000000000000000000000000000067a8291cfec0aa549',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_7.headers',\n firstHeight: 700000,\n prevHash: '0000000000003e784511e93aca014ecaa6d4ba3637cf373f4b84dcac7c70cca0',\n count: 100000,\n lastHash: '0000000000068f8658ff71cbf8f5b31c837cc6df5bf53e40f05459d4267b53e6',\n fileHash: 'yfomaIGZyoW/m7YdpZYNozeNrUmJBwaF0PpLdSADWJE=',\n lastChainWork: '00000000000000000000000000000000000000000000000a551ea869597d2a74',\n prevChainWork: '0000000000000000000000000000000000000000000000078286c7f42f7ec693',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_8.headers',\n firstHeight: 800000,\n prevHash: '0000000000068f8658ff71cbf8f5b31c837cc6df5bf53e40f05459d4267b53e6',\n count: 100000,\n lastHash: '0000000000214fbb71abe4695d935b8e089d306899c4a90124b1bc6806e6e299',\n fileHash: '/AIS2PYHdMJBmRF9ECsZmCphoqhDyFWs+aO+3GIpPhg=',\n lastChainWork: '00000000000000000000000000000000000000000000000eb93c12a85efec237',\n prevChainWork: '00000000000000000000000000000000000000000000000a551ea869597d2a74',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_9.headers',\n firstHeight: 900000,\n prevHash: '0000000000214fbb71abe4695d935b8e089d306899c4a90124b1bc6806e6e299',\n count: 100000,\n lastHash: '00000000002208a5fee5b9baa4b5519d2cd8ab405754fca13704dc667448f21a',\n fileHash: 'lJtRGLYlMnHe6r0xuJJWauJA7DKL4ZYOqkYmUD2iwbM=',\n lastChainWork: '000000000000000000000000000000000000000000000017e96a5ada9f4a8bfb',\n prevChainWork: '00000000000000000000000000000000000000000000000eb93c12a85efec237',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_10.headers',\n firstHeight: 1000000,\n prevHash: '00000000002208a5fee5b9baa4b5519d2cd8ab405754fca13704dc667448f21a',\n count: 100000,\n lastHash: '000000000005bc8878ba47a47129c3e21f32f8c10b9658f9ee6db16a83870162',\n fileHash: 'tfWVFoIp4A6yXd2c0YietQ7hYlmLf7O884baego+D4E=',\n lastChainWork: '000000000000000000000000000000000000000000000021bf46518c698a4bc8',\n prevChainWork: '000000000000000000000000000000000000000000000017e96a5ada9f4a8bfb',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_11.headers',\n firstHeight: 1100000,\n prevHash: '000000000005bc8878ba47a47129c3e21f32f8c10b9658f9ee6db16a83870162',\n count: 100000,\n lastHash: '00000000f8bf61018ddd77d23c112e874682704a290252f635e7df06c8a317b8',\n fileHash: 'S0Y9WXGFFJLRsRkQRNvrtImOezjReEQ1eDdB2x5M6Mw=',\n lastChainWork: '0000000000000000000000000000000000000000000000288b285ca9b1bb8065',\n prevChainWork: '000000000000000000000000000000000000000000000021bf46518c698a4bc8',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_12.headers',\n firstHeight: 1200000,\n prevHash: '00000000f8bf61018ddd77d23c112e874682704a290252f635e7df06c8a317b8',\n count: 100000,\n lastHash: '0000000000000165e6678be46ec2b15c587611b86da7147f7069a0e7175d62da',\n fileHash: 'eFHQB8EaSfs4EKZxVsLhX8UA79kpOI4dR6j/z9P8frI=',\n lastChainWork: '0000000000000000000000000000000000000000000000542144c6af6e9258ea',\n prevChainWork: '0000000000000000000000000000000000000000000000288b285ca9b1bb8065',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_13.headers',\n firstHeight: 1300000,\n prevHash: '0000000000000165e6678be46ec2b15c587611b86da7147f7069a0e7175d62da',\n count: 100000,\n lastHash: '00000000000002ef0a47d0f242ab280bded8f4780bad506c71f2e1d2771becd4',\n fileHash: '2MFJLBjHOBnuaDAICQFCL3y+6ejj0k92gbcmLWa1/Xc=',\n lastChainWork: '0000000000000000000000000000000000000000000000dcc85f546d353f7b08',\n prevChainWork: '0000000000000000000000000000000000000000000000542144c6af6e9258ea',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_14.headers',\n firstHeight: 1400000,\n prevHash: '00000000000002ef0a47d0f242ab280bded8f4780bad506c71f2e1d2771becd4',\n count: 100000,\n lastHash: '0000000000000168de8736c8a424fd5ebe1dcf0a030ed5fa0699b8c0fafc0b5e',\n fileHash: 'lWmP/pOR5ciEnu5tjIrf7OTEaiaMcfqFZQQYT7QH6qg=',\n lastChainWork: '00000000000000000000000000000000000000000000011bed7ab81a56a65cbc',\n prevChainWork: '0000000000000000000000000000000000000000000000dcc85f546d353f7b08',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_15.headers',\n firstHeight: 1500000,\n prevHash: '0000000000000168de8736c8a424fd5ebe1dcf0a030ed5fa0699b8c0fafc0b5e',\n count: 100000,\n lastHash: '00000000000005504bfd1a3ce4688c30c86740390102b6cd464a2fb5e0e3fed1',\n fileHash: '1bCf0R0RsoadANX+6H4NH1b3jNuTPyTayoS1SpQXa2Q=',\n lastChainWork: '000000000000000000000000000000000000000000000156c3b84396da4e60b9',\n prevChainWork: '00000000000000000000000000000000000000000000011bed7ab81a56a65cbc',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'testNet_16.headers',\n firstHeight: 1600000,\n prevHash: '00000000000005504bfd1a3ce4688c30c86740390102b6cd464a2fb5e0e3fed1',\n count: 77821,\n lastHash: '0000000065ef364929e71688b29320c5835fabd8a1c0b6d42b6726cb4afcc798',\n fileHash: 'AK1FlgOaPVFOeG2x+Tp7htOt15UaSpHXZjgx3F263x8=',\n lastChainWork: '00000000000000000000000000000000000000000000015814b641eb5d72e2ef',\n prevChainWork: '000000000000000000000000000000000000000000000156c3b84396da4e60b9',\n chain: 'test',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_0.headers',\n firstHeight: 0,\n prevHash: '0000000000000000000000000000000000000000000000000000000000000000',\n count: 100000,\n lastHash: '000000000002d01c1fccc21636b607dfd930d31d01c3a62104612a1719011250',\n fileHash: 'DMXYETHMphmYRh5y0+qsJhj67ML5Ui4LE1eEZDYbnZE=',\n lastChainWork: '000000000000000000000000000000000000000000000000064492eaf00f2520',\n prevChainWork: '0000000000000000000000000000000000000000000000000000000000000000',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_1.headers',\n firstHeight: 100000,\n prevHash: '000000000002d01c1fccc21636b607dfd930d31d01c3a62104612a1719011250',\n count: 100000,\n lastHash: '00000000000003a20def7a05a77361b9657ff954b2f2080e135ea6f5970da215',\n fileHash: 'IID8O84Uny22i10fWHTQr6f9+9eFZ8dhVyegYPGSg+Q=',\n lastChainWork: '00000000000000000000000000000000000000000000001ac0479f335782cb80',\n prevChainWork: '000000000000000000000000000000000000000000000000064492eaf00f2520',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_2.headers',\n firstHeight: 200000,\n prevHash: '00000000000003a20def7a05a77361b9657ff954b2f2080e135ea6f5970da215',\n count: 100000,\n lastHash: '000000000000000067ecc744b5ae34eebbde14d21ca4db51652e4d67e155f07e',\n fileHash: 'wbfV/ZuPvLKHtRJN4QlHiKlpNncuqWA1dMJ6O9mhisc=',\n lastChainWork: '000000000000000000000000000000000000000000005a795f5d6ede10bc6d60',\n prevChainWork: '00000000000000000000000000000000000000000000001ac0479f335782cb80',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_3.headers',\n firstHeight: 300000,\n prevHash: '000000000000000067ecc744b5ae34eebbde14d21ca4db51652e4d67e155f07e',\n count: 100000,\n lastHash: '0000000000000000030034b661aed920a9bdf6bbfa6d2e7a021f78481882fa39',\n fileHash: '5pklz64as2MG6y9lQiiClZaA82f6xoK1xdzkSqOZLsA=',\n lastChainWork: '0000000000000000000000000000000000000000001229fea679a4cdc26e7460',\n prevChainWork: '000000000000000000000000000000000000000000005a795f5d6ede10bc6d60',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_4.headers',\n firstHeight: 400000,\n prevHash: '0000000000000000030034b661aed920a9bdf6bbfa6d2e7a021f78481882fa39',\n count: 100000,\n lastHash: '0000000000000000043831d6ebb013716f0580287ee5e5687e27d0ed72e6e523',\n fileHash: '2X78/S+Z/h5ELA63aC3xt6/o4G8JMcAOEiZ00ycKHsM=',\n lastChainWork: '0000000000000000000000000000000000000000007ae4707601d47bc6695487',\n prevChainWork: '0000000000000000000000000000000000000000001229fea679a4cdc26e7460',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_5.headers',\n firstHeight: 500000,\n prevHash: '0000000000000000043831d6ebb013716f0580287ee5e5687e27d0ed72e6e523',\n count: 100000,\n lastHash: '0000000000000000078f57b9a986b53b73f007c6b27b6f16409ca4eda83034e8',\n fileHash: 'Tzm60n66tIuq7wNdP6M1BH77iFzGCPbOMIl6smJ/LRg=',\n lastChainWork: '000000000000000000000000000000000000000000e8f2ea21f069a214067ed7',\n prevChainWork: '0000000000000000000000000000000000000000007ae4707601d47bc6695487',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_6.headers',\n firstHeight: 600000,\n prevHash: '0000000000000000078f57b9a986b53b73f007c6b27b6f16409ca4eda83034e8',\n count: 100000,\n lastHash: '000000000000000013abf3ab026610ed70e023476db8ce96f68637acdcbcf3cb',\n fileHash: 'O7SoyIDxhejB0Qs4rBO4OkfBK2yVZKhxra6YxZMhiIk=',\n lastChainWork: '0000000000000000000000000000000000000000012f32fb33b26aa239be0fc3',\n prevChainWork: '000000000000000000000000000000000000000000e8f2ea21f069a214067ed7',\n chain: 'main',\n validated: true\n },\n {\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders',\n fileName: 'mainNet_7.headers',\n firstHeight: 700000,\n prevHash: '000000000000000013abf3ab026610ed70e023476db8ce96f68637acdcbcf3cb',\n count: 100000,\n lastHash: '00000000000000000b6ae23bbe9f549844c20943d8c20b8ceedbae8aa1dde8e0',\n fileHash: '+0Wu2GrKgCv4o1yZfdWl60aAgvBj6Rt3xlWj8TQprUw=',\n lastChainWork: '000000000000000000000000000000000000000001483b2995af390c20b58320',\n prevChainWork: '0000000000000000000000000000000000000000012f32fb33b26aa239be0fc3',\n chain: 'main',\n validated: true\n },\n {\n chain: 'main',\n count: 100000,\n fileHash: 'xKYCsMzfbWdwq6RtEos4+4w7F3FroFMXb4tk4Z2gn5s=',\n fileName: 'mainNet_8.headers',\n firstHeight: 800000,\n lastChainWork: '000000000000000000000000000000000000000001664db1f2d50327928007e0',\n lastHash: '00000000000000000e7dcc27c06ee353bd37260b2e7e664314c204f0324a5087',\n prevChainWork: '000000000000000000000000000000000000000001483b2995af390c20b58320',\n prevHash: '00000000000000000b6ae23bbe9f549844c20943d8c20b8ceedbae8aa1dde8e0',\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders'\n },\n {\n chain: 'main',\n count: 7630,\n fileHash: 'R3JNRSzpFPvKXH2myRL+m420ycjrxRTcSI3aiMOJmfo=',\n fileName: 'mainNet_9.headers',\n firstHeight: 900000,\n lastChainWork: '00000000000000000000000000000000000000000167cca3f0721d58e023cf01',\n lastHash: '00000000000000000c119d65afcc66b640e98b839414c7e66d22b428ecb24a43',\n prevChainWork: '000000000000000000000000000000000000000001664db1f2d50327928007e0',\n prevHash: '00000000000000000e7dcc27c06ee353bd37260b2e7e664314c204f0324a5087',\n sourceUrl: 'https://cdn.projectbabbage.com/blockheaders'\n }\n];\n//# sourceMappingURL=validBulkHeaderFilesByFileHash.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/util/validBulkHeaderFilesByFileHash.js?\n}"); +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n__exportStar(__webpack_require__(/*! ./chaintracks */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/index.js\"), exports);\n__exportStar(__webpack_require__(/*! ./ChaintracksChainTracker */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/ChaintracksChainTracker.js\"), exports);\n__exportStar(__webpack_require__(/*! ./BHServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/BHServiceClient.js\"), exports);\n//# sourceMappingURL=index.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/index.js?\n}"); /***/ }), @@ -3443,7 +3223,18 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.createDefaultWalletServicesOptions = createDefaultWalletServicesOptions;\nexports.arcDefaultUrl = arcDefaultUrl;\nexports.arcGorillaPoolUrl = arcGorillaPoolUrl;\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst ChaintracksServiceClient_1 = __webpack_require__(/*! ./chaintracker/chaintracks/ChaintracksServiceClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/chaintracks/ChaintracksServiceClient.js\");\nfunction createDefaultWalletServicesOptions(chain, arcCallbackUrl, arcCallbackToken, arcApiKey) {\n const deploymentId = `wallet-toolbox-${(0, utilityHelpers_1.randomBytesHex)(16)}`;\n const taalApiKey = arcApiKey || chain === 'main'\n ? 'mainnet_9596de07e92300c6287e4393594ae39c' // no plan\n : 'testnet_0e6cf72133b43ea2d7861da2a38684e3'; // personal \"starter\" key\n //const chaintracksUrl = `https://npm-registry.babbage.systems:${chain === 'main' ? 8084 : 8083}`\n const chaintracksUrl = `https://${chain}net-chaintracks.babbage.systems`;\n const o = {\n chain,\n taalApiKey,\n bsvExchangeRate: {\n timestamp: new Date('2023-12-13'),\n base: 'USD',\n rate: 47.52\n },\n bsvUpdateMsecs: 1000 * 60 * 15, // 15 minutes\n fiatExchangeRates: {\n timestamp: new Date('2023-12-13'),\n base: 'USD',\n rates: {\n USD: 1,\n GBP: 0.8,\n EUR: 0.93\n }\n },\n fiatUpdateMsecs: 1000 * 60 * 60 * 24, // 24 hours\n disableMapiCallback: true, // Rely on WalletMonitor by default.\n exchangeratesapiKey: 'bd539d2ff492bcb5619d5f27726a766f',\n chaintracksFiatExchangeRatesUrl: `${chaintracksUrl}/getFiatExchangeRates`,\n chaintracks: new ChaintracksServiceClient_1.ChaintracksServiceClient(chain, chaintracksUrl),\n arcUrl: arcDefaultUrl(chain),\n arcConfig: {\n apiKey: arcApiKey !== null && arcApiKey !== void 0 ? arcApiKey : undefined,\n deploymentId,\n callbackUrl: arcCallbackUrl !== null && arcCallbackUrl !== void 0 ? arcCallbackUrl : undefined,\n callbackToken: arcCallbackToken !== null && arcCallbackToken !== void 0 ? arcCallbackToken : undefined\n },\n arcGorillaPoolUrl: arcGorillaPoolUrl(chain),\n arcGorillaPoolConfig: {\n apiKey: arcApiKey !== null && arcApiKey !== void 0 ? arcApiKey : undefined,\n deploymentId,\n callbackUrl: arcCallbackUrl !== null && arcCallbackUrl !== void 0 ? arcCallbackUrl : undefined,\n callbackToken: arcCallbackToken !== null && arcCallbackToken !== void 0 ? arcCallbackToken : undefined\n }\n };\n return o;\n}\nfunction arcDefaultUrl(chain) {\n const url = chain === 'main' ? 'https://arc.taal.com' : 'https://arc-test.taal.com';\n return url;\n}\nfunction arcGorillaPoolUrl(chain) {\n const url = chain === 'main' ? 'https://arc.gorillapool.io' : undefined;\n return url;\n}\n//# sourceMappingURL=createDefaultWalletServicesOptions.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/createDefaultWalletServicesOptions.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.createDefaultWalletServicesOptions = createDefaultWalletServicesOptions;\nexports.arcDefaultUrl = arcDefaultUrl;\nexports.arcGorillaPoolUrl = arcGorillaPoolUrl;\nconst index_client_1 = __webpack_require__(/*! ../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst chaintracker_1 = __webpack_require__(/*! ./chaintracker */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/chaintracker/index.js\");\nfunction createDefaultWalletServicesOptions(chain, arcCallbackUrl, arcCallbackToken, arcApiKey) {\n const deploymentId = `wallet-toolbox-${(0, index_client_1.randomBytesHex)(16)}`;\n const taalApiKey = arcApiKey || chain === 'main'\n ? 'mainnet_9596de07e92300c6287e4393594ae39c' // no plan\n : 'testnet_0e6cf72133b43ea2d7861da2a38684e3'; // personal \"starter\" key\n const o = {\n chain,\n taalApiKey,\n bsvExchangeRate: {\n timestamp: new Date('2023-12-13'),\n base: 'USD',\n rate: 47.52\n },\n bsvUpdateMsecs: 1000 * 60 * 15, // 15 minutes\n fiatExchangeRates: {\n timestamp: new Date('2023-12-13'),\n base: 'USD',\n rates: {\n USD: 1,\n GBP: 0.8,\n EUR: 0.93\n }\n },\n fiatUpdateMsecs: 1000 * 60 * 60 * 24, // 24 hours\n disableMapiCallback: true, // Rely on WalletMonitor by default.\n exchangeratesapiKey: 'bd539d2ff492bcb5619d5f27726a766f',\n chaintracksFiatExchangeRatesUrl: `https://npm-registry.babbage.systems:${chain === 'main' ? 8084 : 8083}/getFiatExchangeRates`,\n chaintracks: new chaintracker_1.ChaintracksServiceClient(chain, `https://npm-registry.babbage.systems:${chain === 'main' ? 8084 : 8083}`),\n arcUrl: arcDefaultUrl(chain),\n arcConfig: {\n apiKey: arcApiKey !== null && arcApiKey !== void 0 ? arcApiKey : undefined,\n deploymentId,\n callbackUrl: arcCallbackUrl !== null && arcCallbackUrl !== void 0 ? arcCallbackUrl : undefined,\n callbackToken: arcCallbackToken !== null && arcCallbackToken !== void 0 ? arcCallbackToken : undefined\n },\n arcGorillaPoolUrl: arcGorillaPoolUrl(chain),\n arcGorillaPoolConfig: {\n apiKey: arcApiKey !== null && arcApiKey !== void 0 ? arcApiKey : undefined,\n deploymentId,\n callbackUrl: arcCallbackUrl !== null && arcCallbackUrl !== void 0 ? arcCallbackUrl : undefined,\n callbackToken: arcCallbackToken !== null && arcCallbackToken !== void 0 ? arcCallbackToken : undefined\n }\n };\n return o;\n}\nfunction arcDefaultUrl(chain) {\n const url = chain === 'main' ? 'https://arc.taal.com' : 'https://arc-test.taal.com';\n return url;\n}\nfunction arcGorillaPoolUrl(chain) {\n const url = chain === 'main' ? 'https://arc.gorillapool.io' : undefined;\n return url;\n}\n//# sourceMappingURL=createDefaultWalletServicesOptions.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/createDefaultWalletServicesOptions.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/index.js": +/*!***************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/index.js ***! + \***************************************************************************/ +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n__exportStar(__webpack_require__(/*! ./Services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js\"), exports);\n//# sourceMappingURL=index.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/index.js?\n}"); /***/ }), @@ -3454,7 +3245,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ARC = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nfunction defaultDeploymentId() {\n return `ts-sdk-${sdk_1.Utils.toHex((0, sdk_1.Random)(16))}`;\n}\n/**\n * Represents an ARC transaction broadcaster.\n */\nclass ARC {\n constructor(URL, config, name) {\n this.name = name !== null && name !== void 0 ? name : 'ARC';\n this.URL = URL;\n if (typeof config === 'string') {\n this.apiKey = config;\n this.httpClient = (0, sdk_1.defaultHttpClient)();\n this.deploymentId = defaultDeploymentId();\n this.callbackToken = undefined;\n this.callbackUrl = undefined;\n }\n else {\n const configObj = config !== null && config !== void 0 ? config : {};\n const { apiKey, deploymentId, httpClient, callbackToken, callbackUrl, headers } = configObj;\n this.apiKey = apiKey;\n this.httpClient = httpClient !== null && httpClient !== void 0 ? httpClient : (0, sdk_1.defaultHttpClient)();\n this.deploymentId = deploymentId !== null && deploymentId !== void 0 ? deploymentId : defaultDeploymentId();\n this.callbackToken = callbackToken;\n this.callbackUrl = callbackUrl;\n this.headers = headers;\n }\n }\n /**\n * Constructs a dictionary of the default & supplied request headers.\n */\n requestHeaders() {\n const headers = {\n 'Content-Type': 'application/json',\n 'XDeployment-ID': this.deploymentId\n };\n if (this.apiKey != null && this.apiKey !== '') {\n headers.Authorization = `Bearer ${this.apiKey}`;\n }\n if (this.callbackUrl != null && this.callbackUrl !== '') {\n headers['X-CallbackUrl'] = this.callbackUrl;\n }\n if (this.callbackToken != null && this.callbackToken !== '') {\n headers['X-CallbackToken'] = this.callbackToken;\n }\n if (this.headers != null) {\n for (const key in this.headers) {\n headers[key] = this.headers[key];\n }\n }\n return headers;\n }\n /**\n * The ARC '/v1/tx' endpoint, as of 2025-02-17 supports all of the following hex string formats:\n * 1. Single serialized raw transaction.\n * 2. Single EF serialized raw transaction (untested).\n * 3. V1 serialized Beef (results returned reflect only the last transaction in the beef)\n *\n * The ARC '/v1/tx' endpoint, as of 2025-02-17 DOES NOT support the following hex string formats:\n * 1. V2 serialized Beef\n *\n * @param rawTx\n * @param txids\n * @returns\n */\n async postRawTx(rawTx, txids) {\n let txid = sdk_1.Utils.toHex((0, utilityHelpers_1.doubleSha256BE)(sdk_1.Utils.toArray(rawTx, 'hex')));\n if (txids) {\n txid = txids.slice(-1)[0];\n }\n else {\n txids = [txid];\n }\n const requestOptions = {\n method: 'POST',\n headers: this.requestHeaders(),\n data: { rawTx },\n signal: AbortSignal.timeout(1000 * 30) // 30 seconds timeout, error.code will be 'ABORT_ERR'\n };\n const r = {\n txid,\n status: 'success',\n notes: []\n };\n const url = `${this.URL}/v1/tx`;\n const nn = () => ({ name: this.name, when: new Date().toISOString() });\n const nne = () => ({ ...nn(), rawTx, txids: txids.join(','), url });\n try {\n const response = await this.httpClient.request(url, requestOptions);\n const { txid, extraInfo, txStatus, competingTxs } = response.data;\n const nnr = () => ({\n txid,\n extraInfo,\n txStatus,\n competingTxs: competingTxs === null || competingTxs === void 0 ? void 0 : competingTxs.join(',')\n });\n if (response.ok) {\n r.data = `${txStatus} ${extraInfo}`;\n if (r.txid !== txid)\n r.data += ` txid altered from ${r.txid} to ${txid}`;\n r.txid = txid;\n if (txStatus === 'DOUBLE_SPEND_ATTEMPTED' || txStatus === 'SEEN_IN_ORPHAN_MEMPOOL') {\n r.status = 'error';\n r.doubleSpend = true;\n r.competingTxs = competingTxs;\n r.notes.push({ ...nne(), ...nnr(), what: 'postRawTxDoubleSpend' });\n }\n else {\n r.notes.push({ ...nn(), ...nnr(), what: 'postRawTxSuccess' });\n }\n }\n else if (typeof response === 'string') {\n r.notes.push({ ...nne(), what: 'postRawTxString', response });\n r.status = 'error';\n // response is not normally a string\n r.serviceError = true;\n }\n else {\n r.status = 'error';\n // Treat unknown errors as service errors\n r.serviceError = true;\n const n = {\n ...nn(),\n ...nne(),\n ...nnr(),\n what: 'postRawTxError'\n };\n const ed = {};\n r.data = ed;\n const st = typeof response.status;\n if (st === 'number' || st === 'string') {\n n.status = response.status;\n ed.status = response.status.toString();\n }\n else {\n n.status = st;\n ed.status = 'ERR_UNKNOWN';\n }\n let d = response.data;\n if (d && typeof d === 'string') {\n n.data = response.data.slice(0, 128);\n try {\n d = JSON.parse(d);\n }\n catch (_a) {\n // Intentionally left empty\n }\n }\n else if (d && typeof d === 'object') {\n ed.more = d;\n ed.detail = d['detail'];\n if (typeof ed.detail !== 'string')\n ed.detail = undefined;\n if (ed.detail) {\n n.detail = ed.detail;\n }\n }\n r.notes.push(n);\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n r.status = 'error';\n r.serviceError = true;\n r.data = `${e.code} ${e.message}`;\n r.notes.push({\n ...nne(),\n what: 'postRawTxCatch',\n code: e.code,\n description: e.description\n });\n }\n return r;\n }\n /**\n * ARC does not natively support a postBeef end-point aware of multiple txids of interest in the Beef.\n *\n * It does process multiple new transactions, however, which allows results for all txids of interest\n * to be collected by the `/v1/tx/${txid}` endpoint.\n *\n * @param beef\n * @param txids\n * @returns\n */\n async postBeef(beef, txids) {\n const r = {\n name: this.name,\n status: 'success',\n txidResults: [],\n notes: []\n };\n const nn = () => ({ name: this.name, when: new Date().toISOString() });\n if (beef.version === sdk_1.BEEF_V2 && beef.txs.every(btx => !btx.isTxidOnly)) {\n beef.version = sdk_1.BEEF_V1;\n r.notes.push({ ...nn(), what: 'postBeefV2ToV1' });\n }\n const beefHex = beef.toHex();\n const prtr = await this.postRawTx(beefHex, txids);\n r.status = prtr.status;\n r.txidResults = [prtr];\n // Since postRawTx only returns results for a single txid,\n // replicate the basic results any additional txids.\n // TODO: Temporary hack...\n for (const txid of txids) {\n if (prtr.txid === txid)\n continue;\n const tr = {\n txid,\n status: 'success',\n notes: []\n };\n // For the extra txids, go back to the service for confirmation...\n const dr = await this.getTxData(txid);\n if (dr.txid !== txid) {\n tr.status = 'error';\n tr.data = 'internal error';\n tr.notes.push({\n ...nn(),\n what: 'postBeefGetTxDataInternal',\n txid,\n returnedTxid: dr.txid\n });\n }\n else if (dr.txStatus === 'SEEN_ON_NETWORK' || dr.txStatus === 'STORED') {\n tr.data = dr.txStatus;\n tr.notes.push({\n ...nn(),\n what: 'postBeefGetTxDataSuccess',\n txid,\n txStatus: dr.txStatus\n });\n }\n else {\n tr.status = 'error';\n tr.data = dr;\n tr.notes.push({\n ...nn(),\n what: 'postBeefGetTxDataError',\n txid,\n txStatus: dr.txStatus\n });\n }\n r.txidResults.push(tr);\n if (r.status === 'success' && tr.status === 'error')\n r.status = 'error';\n }\n return r;\n }\n /**\n * This seems to only work for recently submitted txids...but that's all we need to complete postBeef!\n * @param txid\n * @returns\n */\n async getTxData(txid) {\n const requestOptions = {\n method: 'GET',\n headers: this.requestHeaders()\n };\n const response = await this.httpClient.request(`${this.URL}/v1/tx/${txid}`, requestOptions);\n return response.data;\n }\n}\nexports.ARC = ARC;\n//# sourceMappingURL=ARC.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/ARC.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ARC = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nfunction defaultDeploymentId() {\n return `ts-sdk-${sdk_1.Utils.toHex((0, sdk_1.Random)(16))}`;\n}\n/**\n * Represents an ARC transaction broadcaster.\n */\nclass ARC {\n constructor(URL, config, name) {\n this.name = name !== null && name !== void 0 ? name : 'ARC';\n this.URL = URL;\n if (typeof config === 'string') {\n this.apiKey = config;\n this.httpClient = (0, sdk_1.defaultHttpClient)();\n this.deploymentId = defaultDeploymentId();\n this.callbackToken = undefined;\n this.callbackUrl = undefined;\n }\n else {\n const configObj = config !== null && config !== void 0 ? config : {};\n const { apiKey, deploymentId, httpClient, callbackToken, callbackUrl, headers } = configObj;\n this.apiKey = apiKey;\n this.httpClient = httpClient !== null && httpClient !== void 0 ? httpClient : (0, sdk_1.defaultHttpClient)();\n this.deploymentId = deploymentId !== null && deploymentId !== void 0 ? deploymentId : defaultDeploymentId();\n this.callbackToken = callbackToken;\n this.callbackUrl = callbackUrl;\n this.headers = headers;\n }\n }\n /**\n * Constructs a dictionary of the default & supplied request headers.\n */\n requestHeaders() {\n const headers = {\n 'Content-Type': 'application/json',\n 'XDeployment-ID': this.deploymentId\n };\n if (this.apiKey != null && this.apiKey !== '') {\n headers.Authorization = `Bearer ${this.apiKey}`;\n }\n if (this.callbackUrl != null && this.callbackUrl !== '') {\n headers['X-CallbackUrl'] = this.callbackUrl;\n }\n if (this.callbackToken != null && this.callbackToken !== '') {\n headers['X-CallbackToken'] = this.callbackToken;\n }\n if (this.headers != null) {\n for (const key in this.headers) {\n headers[key] = this.headers[key];\n }\n }\n return headers;\n }\n /**\n * The ARC '/v1/tx' endpoint, as of 2025-02-17 supports all of the following hex string formats:\n * 1. Single serialized raw transaction.\n * 2. Single EF serialized raw transaction (untested).\n * 3. V1 serialized Beef (results returned reflect only the last transaction in the beef)\n *\n * The ARC '/v1/tx' endpoint, as of 2025-02-17 DOES NOT support the following hex string formats:\n * 1. V2 serialized Beef\n *\n * @param rawTx\n * @param txids\n * @returns\n */\n async postRawTx(rawTx, txids) {\n let txid = sdk_1.Utils.toHex((0, index_client_1.doubleSha256BE)(sdk_1.Utils.toArray(rawTx, 'hex')));\n if (txids) {\n txid = txids.slice(-1)[0];\n }\n else {\n txids = [txid];\n }\n const requestOptions = {\n method: 'POST',\n headers: this.requestHeaders(),\n data: { rawTx },\n signal: AbortSignal.timeout(1000 * 30) // 30 seconds timeout, error.code will be 'ABORT_ERR'\n };\n const r = {\n txid,\n status: 'success',\n notes: []\n };\n const url = `${this.URL}/v1/tx`;\n const nn = () => ({ name: this.name, when: new Date().toISOString() });\n const nne = () => ({ ...nn(), rawTx, txids: txids.join(','), url });\n try {\n const response = await this.httpClient.request(url, requestOptions);\n const { txid, extraInfo, txStatus, competingTxs } = response.data;\n const nnr = () => ({\n txid,\n extraInfo,\n txStatus,\n competingTxs: competingTxs === null || competingTxs === void 0 ? void 0 : competingTxs.join(',')\n });\n if (response.ok) {\n r.data = `${txStatus} ${extraInfo}`;\n if (r.txid !== txid)\n r.data += ` txid altered from ${r.txid} to ${txid}`;\n r.txid = txid;\n if (txStatus === 'DOUBLE_SPEND_ATTEMPTED' || txStatus === 'SEEN_IN_ORPHAN_MEMPOOL') {\n r.status = 'error';\n r.doubleSpend = true;\n r.competingTxs = competingTxs;\n r.notes.push({ ...nne(), ...nnr(), what: 'postRawTxDoubleSpend' });\n }\n else {\n r.notes.push({ ...nn(), ...nnr(), what: 'postRawTxSuccess' });\n }\n }\n else if (typeof response === 'string') {\n r.notes.push({ ...nne(), what: 'postRawTxString', response });\n r.status = 'error';\n // response is not normally a string\n r.serviceError = true;\n }\n else {\n r.status = 'error';\n // Treat unknown errors as service errors\n r.serviceError = true;\n const n = {\n ...nn(),\n ...nne(),\n ...nnr(),\n what: 'postRawTxError'\n };\n const ed = {};\n r.data = ed;\n const st = typeof response.status;\n if (st === 'number' || st === 'string') {\n n.status = response.status;\n ed.status = response.status.toString();\n }\n else {\n n.status = st;\n ed.status = 'ERR_UNKNOWN';\n }\n let d = response.data;\n if (d && typeof d === 'string') {\n n.data = response.data.slice(0, 128);\n try {\n d = JSON.parse(d);\n }\n catch (_a) {\n // Intentionally left empty\n }\n }\n else if (d && typeof d === 'object') {\n ed.more = d;\n ed.detail = d['detail'];\n if (typeof ed.detail !== 'string')\n ed.detail = undefined;\n if (ed.detail) {\n n.detail = ed.detail;\n }\n }\n r.notes.push(n);\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n r.status = 'error';\n r.serviceError = true;\n r.data = `${e.code} ${e.message}`;\n r.notes.push({\n ...nne(),\n what: 'postRawTxCatch',\n code: e.code,\n description: e.description\n });\n }\n return r;\n }\n /**\n * ARC does not natively support a postBeef end-point aware of multiple txids of interest in the Beef.\n *\n * It does process multiple new transactions, however, which allows results for all txids of interest\n * to be collected by the `/v1/tx/${txid}` endpoint.\n *\n * @param beef\n * @param txids\n * @returns\n */\n async postBeef(beef, txids) {\n const r = {\n name: this.name,\n status: 'success',\n txidResults: [],\n notes: []\n };\n const nn = () => ({ name: this.name, when: new Date().toISOString() });\n if (beef.version === sdk_1.BEEF_V2 && beef.txs.every(btx => !btx.isTxidOnly)) {\n beef.version = sdk_1.BEEF_V1;\n r.notes.push({ ...nn(), what: 'postBeefV2ToV1' });\n }\n const beefHex = beef.toHex();\n const prtr = await this.postRawTx(beefHex, txids);\n r.status = prtr.status;\n r.txidResults = [prtr];\n // Since postRawTx only returns results for a single txid,\n // replicate the basic results any additional txids.\n // TODO: Temporary hack...\n for (const txid of txids) {\n if (prtr.txid === txid)\n continue;\n const tr = {\n txid,\n status: 'success',\n notes: []\n };\n // For the extra txids, go back to the service for confirmation...\n const dr = await this.getTxData(txid);\n if (dr.txid !== txid) {\n tr.status = 'error';\n tr.data = 'internal error';\n tr.notes.push({\n ...nn(),\n what: 'postBeefGetTxDataInternal',\n txid,\n returnedTxid: dr.txid\n });\n }\n else if (dr.txStatus === 'SEEN_ON_NETWORK' || dr.txStatus === 'STORED') {\n tr.data = dr.txStatus;\n tr.notes.push({\n ...nn(),\n what: 'postBeefGetTxDataSuccess',\n txid,\n txStatus: dr.txStatus\n });\n }\n else {\n tr.status = 'error';\n tr.data = dr;\n tr.notes.push({\n ...nn(),\n what: 'postBeefGetTxDataError',\n txid,\n txStatus: dr.txStatus\n });\n }\n r.txidResults.push(tr);\n if (r.status === 'success' && tr.status === 'error')\n r.status = 'error';\n }\n return r;\n }\n /**\n * This seems to only work for recently submitted txids...but that's all we need to complete postBeef!\n * @param txid\n * @returns\n */\n async getTxData(txid) {\n const requestOptions = {\n method: 'GET',\n headers: this.requestHeaders()\n };\n const response = await this.httpClient.request(`${this.URL}/v1/tx/${txid}`, requestOptions);\n return response.data;\n }\n}\nexports.ARC = ARC;\n//# sourceMappingURL=ARC.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/ARC.js?\n}"); /***/ }), @@ -3465,7 +3256,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Bitails = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst tscProofToMerklePath_1 = __webpack_require__(/*! ../../utility/tscProofToMerklePath */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/tscProofToMerklePath.js\");\n/**\n *\n */\nclass Bitails {\n constructor(chain = 'main', config = {}) {\n const { apiKey, httpClient } = config;\n this.chain = chain;\n this.URL = chain === 'main' ? `https://api.bitails.io/` : `https://test-api.bitails.io/`;\n this.httpClient = httpClient !== null && httpClient !== void 0 ? httpClient : (0, sdk_1.defaultHttpClient)();\n this.apiKey = apiKey !== null && apiKey !== void 0 ? apiKey : '';\n }\n getHttpHeaders() {\n const headers = {\n Accept: 'application/json'\n };\n if (typeof this.apiKey === 'string' && this.apiKey.trim() !== '') {\n headers.Authorization = this.apiKey;\n }\n return headers;\n }\n /**\n * Bitails does not natively support a postBeef end-point aware of multiple txids of interest in the Beef.\n *\n * Send rawTx in `txids` order from beef.\n *\n * @param beef\n * @param txids\n * @returns\n */\n async postBeef(beef, txids) {\n const nn = () => ({\n name: 'BitailsPostBeef',\n when: new Date().toISOString()\n });\n const nne = () => ({ ...nn(), beef: beef.toHex(), txids: txids.join(',') });\n const note = { ...nn(), what: 'postBeef' };\n const raws = [];\n for (const txid of txids) {\n const rawTx = sdk_1.Utils.toHex(beef.findTxid(txid).rawTx);\n raws.push(rawTx);\n }\n const r = await this.postRaws(raws, txids);\n r.notes.unshift(note);\n if (r.status !== 'success')\n r.notes.push({ ...nne(), what: 'postBeefError' });\n else\n r.notes.push({ ...nn(), what: 'postBeefSuccess' });\n return r;\n }\n /**\n * @param raws Array of raw transactions to broadcast as hex strings\n * @param txids Array of txids for transactions in raws for which results are requested, remaining raws are supporting only.\n * @returns\n */\n async postRaws(raws, txids) {\n const r = {\n name: 'BitailsPostRaws',\n status: 'success',\n txidResults: [],\n notes: []\n };\n const rawTxids = [];\n for (const raw of raws) {\n const txid = sdk_1.Utils.toHex((0, utilityHelpers_1.doubleSha256BE)(sdk_1.Utils.toArray(raw, 'hex')));\n // Results aren't always identified by txid.\n rawTxids.push(txid);\n if (!txids || txids.indexOf(txid) >= 0) {\n r.txidResults.push({\n txid,\n status: 'success',\n notes: []\n });\n }\n }\n const headers = this.getHttpHeaders();\n headers['Content-Type'] = 'application/json';\n //headers['Accept'] = 'text/json'\n const data = { raws: raws };\n const requestOptions = {\n method: 'POST',\n headers,\n data\n };\n const url = `${this.URL}tx/broadcast/multi`;\n const nn = () => ({\n name: 'BitailsPostRawTx',\n when: new Date().toISOString()\n });\n const nne = () => ({\n ...nn(),\n raws: raws.join(','),\n txids: r.txidResults.map(r => r.txid).join(','),\n url\n });\n try {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.ok) {\n // status: 201, statusText: 'Created'\n const btrs = response.data;\n if (btrs.length !== raws.length) {\n r.status = 'error';\n r.notes.push({ ...nne(), what: 'postRawsErrorResultsCount' });\n }\n else {\n // Check that each response result has a txid that matches corresponding rawTxids\n let i = -1;\n for (const btr of btrs) {\n i++;\n if (!btr.txid) {\n btr.txid = rawTxids[i];\n r.notes.push({ ...nn(), what: 'postRawsResultMissingTxids', i, rawsTxid: rawTxids[i] });\n }\n else if (btr.txid !== rawTxids[i]) {\n r.status = 'error';\n r.notes.push({ ...nn(), what: 'postRawsResultTxids', i, txid: btr.txid, rawsTxid: rawTxids[i] });\n }\n }\n if (r.status === 'success') {\n // btrs has correct number of results and each one has expected txid.\n // focus on results for requested txids\n for (const rt of r.txidResults) {\n const btr = btrs.find(btr => btr.txid === rt.txid);\n const txid = rt.txid;\n if (btr.error) {\n // code: -25, message: 'missing-inputs'\n // code: -27, message: 'already-in-mempool'\n const { code, message } = btr.error;\n if (code === -27) {\n rt.notes.push({ ...nne(), what: 'postRawsSuccessAlreadyInMempool' });\n }\n else {\n rt.status = 'error';\n if (code === -25) {\n rt.doubleSpend = true; // this is a possible double spend attempt\n rt.competingTxs = undefined; // not provided with any data for this.\n rt.notes.push({ ...nne(), what: 'postRawsErrorMissingInputs' });\n }\n else if (btr['code'] === 'ECONNRESET') {\n rt.notes.push({ ...nne(), what: 'postRawsErrorECONNRESET', txid, message });\n }\n else {\n rt.notes.push({ ...nne(), what: 'postRawsError', txid, code, message });\n }\n }\n }\n else {\n rt.notes.push({ ...nn(), what: 'postRawsSuccess' });\n }\n if (rt.status !== 'success' && r.status === 'success')\n r.status = 'error';\n }\n }\n }\n }\n else {\n r.status = 'error';\n const n = { ...nne(), what: 'postRawsError' };\n r.notes.push(n);\n }\n }\n catch (eu) {\n r.status = 'error';\n const e = WalletError_1.WalletError.fromUnknown(eu);\n const { code, description } = e;\n r.notes.push({ ...nne(), what: 'postRawsCatch', code, description });\n }\n return r;\n }\n /**\n *\n * @param txid\n * @param services\n * @returns\n */\n async getMerklePath(txid, services) {\n const r = { name: 'BitailsTsc', notes: [] };\n const url = `${this.URL}tx/${txid}/proof/tsc`;\n const nn = () => ({ name: 'BitailsProofTsc', when: new Date().toISOString(), txid, url });\n const headers = this.getHttpHeaders();\n const requestOptions = { method: 'GET', headers };\n try {\n const response = await this.httpClient.request(url, requestOptions);\n const nne = () => ({ ...nn(), txid, url, status: response.status, statusText: response.statusText });\n if (response.status === 404 && response.statusText === 'Not Found') {\n r.notes.push({ ...nn(), what: 'getMerklePathNotFound' });\n }\n else if (!response.ok || response.status !== 200 || response.statusText !== 'OK') {\n r.notes.push({ ...nne(), what: 'getMerklePathBadStatus' });\n }\n else if (!response.data) {\n r.notes.push({ ...nne(), what: 'getMerklePathNoData' });\n }\n else {\n const p = response.data;\n const header = await services.hashToHeader(p.target);\n if (header) {\n const proof = { index: p.index, nodes: p.nodes, height: header.height };\n r.merklePath = (0, tscProofToMerklePath_1.convertProofToMerklePath)(txid, proof);\n r.header = header;\n r.notes.push({ ...nne(), what: 'getMerklePathSuccess' });\n }\n else {\n r.notes.push({ ...nne(), what: 'getMerklePathNoHeader', target: p.target });\n }\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n const { code, description } = e;\n r.notes.push({ ...nn(), what: 'getMerklePathCatch', code, description });\n r.error = e;\n }\n return r;\n }\n}\nexports.Bitails = Bitails;\n//# sourceMappingURL=Bitails.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/Bitails.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.Bitails = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n *\n */\nclass Bitails {\n constructor(chain = 'main', config = {}) {\n const { apiKey, httpClient } = config;\n this.chain = chain;\n this.URL = chain === 'main' ? `https://api.bitails.io/` : `https://test-api.bitails.io/`;\n this.httpClient = httpClient !== null && httpClient !== void 0 ? httpClient : (0, sdk_1.defaultHttpClient)();\n this.apiKey = apiKey !== null && apiKey !== void 0 ? apiKey : '';\n }\n getHttpHeaders() {\n const headers = {\n Accept: 'application/json'\n };\n if (typeof this.apiKey === 'string' && this.apiKey.trim() !== '') {\n headers.Authorization = this.apiKey;\n }\n return headers;\n }\n /**\n * Bitails does not natively support a postBeef end-point aware of multiple txids of interest in the Beef.\n *\n * Send rawTx in `txids` order from beef.\n *\n * @param beef\n * @param txids\n * @returns\n */\n async postBeef(beef, txids) {\n const nn = () => ({\n name: 'BitailsPostBeef',\n when: new Date().toISOString()\n });\n const nne = () => ({ ...nn(), beef: beef.toHex(), txids: txids.join(',') });\n const note = { ...nn(), what: 'postBeef' };\n const raws = [];\n for (const txid of txids) {\n const rawTx = sdk_1.Utils.toHex(beef.findTxid(txid).rawTx);\n raws.push(rawTx);\n }\n const r = await this.postRaws(raws, txids);\n r.notes.unshift(note);\n if (r.status !== 'success')\n r.notes.push({ ...nne(), what: 'postBeefError' });\n else\n r.notes.push({ ...nn(), what: 'postBeefSuccess' });\n return r;\n }\n /**\n * @param raws Array of raw transactions to broadcast as hex strings\n * @param txids Array of txids for transactions in raws for which results are requested, remaining raws are supporting only.\n * @returns\n */\n async postRaws(raws, txids) {\n const r = {\n name: 'BitailsPostRaws',\n status: 'success',\n txidResults: [],\n notes: []\n };\n const rawTxids = [];\n for (const raw of raws) {\n const txid = sdk_1.Utils.toHex((0, index_client_1.doubleSha256BE)(sdk_1.Utils.toArray(raw, 'hex')));\n // Results aren't always identified by txid.\n rawTxids.push(txid);\n if (!txids || txids.indexOf(txid) >= 0) {\n r.txidResults.push({\n txid,\n status: 'success',\n notes: []\n });\n }\n }\n const headers = this.getHttpHeaders();\n headers['Content-Type'] = 'application/json';\n //headers['Accept'] = 'text/json'\n const data = { raws: raws };\n const requestOptions = {\n method: 'POST',\n headers,\n data\n };\n const url = `${this.URL}tx/broadcast/multi`;\n const nn = () => ({\n name: 'BitailsPostRawTx',\n when: new Date().toISOString()\n });\n const nne = () => ({\n ...nn(),\n raws: raws.join(','),\n txids: r.txidResults.map(r => r.txid).join(','),\n url\n });\n try {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.ok) {\n // status: 201, statusText: 'Created'\n const btrs = response.data;\n if (btrs.length !== raws.length) {\n r.status = 'error';\n r.notes.push({ ...nne(), what: 'postRawsErrorResultsCount' });\n }\n else {\n // Check that each response result has a txid that matches corresponding rawTxids\n let i = -1;\n for (const btr of btrs) {\n i++;\n if (!btr.txid) {\n btr.txid = rawTxids[i];\n r.notes.push({ ...nn(), what: 'postRawsResultMissingTxids', i, rawsTxid: rawTxids[i] });\n }\n else if (btr.txid !== rawTxids[i]) {\n r.status = 'error';\n r.notes.push({ ...nn(), what: 'postRawsResultTxids', i, txid: btr.txid, rawsTxid: rawTxids[i] });\n }\n }\n if (r.status === 'success') {\n // btrs has correct number of results and each one has expected txid.\n // focus on results for requested txids\n for (const rt of r.txidResults) {\n const btr = btrs.find(btr => btr.txid === rt.txid);\n const txid = rt.txid;\n if (btr.error) {\n // code: -25, message: 'missing-inputs'\n // code: -27, message: 'already-in-mempool'\n const { code, message } = btr.error;\n if (code === -27) {\n rt.notes.push({ ...nne(), what: 'postRawsSuccessAlreadyInMempool' });\n }\n else {\n rt.status = 'error';\n if (code === -25) {\n rt.doubleSpend = true; // this is a possible double spend attempt\n rt.competingTxs = undefined; // not provided with any data for this.\n rt.notes.push({ ...nne(), what: 'postRawsErrorMissingInputs' });\n }\n else if (btr['code'] === 'ECONNRESET') {\n rt.notes.push({ ...nne(), what: 'postRawsErrorECONNRESET', txid, message });\n }\n else {\n rt.notes.push({ ...nne(), what: 'postRawsError', txid, code, message });\n }\n }\n }\n else {\n rt.notes.push({ ...nn(), what: 'postRawsSuccess' });\n }\n if (rt.status !== 'success' && r.status === 'success')\n r.status = 'error';\n }\n }\n }\n }\n else {\n r.status = 'error';\n const n = { ...nne(), what: 'postRawsError' };\n r.notes.push(n);\n }\n }\n catch (eu) {\n r.status = 'error';\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n const { code, description } = e;\n r.notes.push({ ...nne(), what: 'postRawsCatch', code, description });\n }\n return r;\n }\n /**\n *\n * @param txid\n * @param services\n * @returns\n */\n async getMerklePath(txid, services) {\n const r = { name: 'BitailsTsc', notes: [] };\n const url = `${this.URL}tx/${txid}/proof/tsc`;\n const nn = () => ({ name: 'BitailsProofTsc', when: new Date().toISOString(), txid, url });\n const headers = this.getHttpHeaders();\n const requestOptions = { method: 'GET', headers };\n try {\n const response = await this.httpClient.request(url, requestOptions);\n const nne = () => ({ ...nn(), txid, url, status: response.status, statusText: response.statusText });\n if (response.status === 404 && response.statusText === 'Not Found') {\n r.notes.push({ ...nn(), what: 'getMerklePathNotFound' });\n }\n else if (!response.ok || response.status !== 200 || response.statusText !== 'OK') {\n r.notes.push({ ...nne(), what: 'getMerklePathBadStatus' });\n }\n else if (!response.data) {\n r.notes.push({ ...nne(), what: 'getMerklePathNoData' });\n }\n else {\n const p = response.data;\n const header = await services.hashToHeader(p.target);\n if (header) {\n const proof = { index: p.index, nodes: p.nodes, height: header.height };\n r.merklePath = (0, index_client_1.convertProofToMerklePath)(txid, proof);\n r.header = header;\n r.notes.push({ ...nne(), what: 'getMerklePathSuccess' });\n }\n else {\n r.notes.push({ ...nne(), what: 'getMerklePathNoHeader', target: p.target });\n }\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n const { code, description } = e;\n r.notes.push({ ...nn(), what: 'getMerklePathCatch', code, description });\n r.error = e;\n }\n return r;\n }\n}\nexports.Bitails = Bitails;\n//# sourceMappingURL=Bitails.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/Bitails.js?\n}"); /***/ }), @@ -3487,18 +3278,18 @@ /***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -eval("{\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.WhatsOnChain = exports.WhatsOnChainNoServices = void 0;\nexports.convertWocToBlockHeaderHex = convertWocToBlockHeaderHex;\nexports.getWhatsOnChainBlockHeaderByHash = getWhatsOnChainBlockHeaderByHash;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst tscProofToMerklePath_1 = __webpack_require__(/*! ../../utility/tscProofToMerklePath */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/tscProofToMerklePath.js\");\nconst SdkWhatsOnChain_1 = __importDefault(__webpack_require__(/*! ./SdkWhatsOnChain */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/SdkWhatsOnChain.js\"));\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst Services_1 = __webpack_require__(/*! ../Services */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/Services.js\");\nconst validationHelpers_1 = __webpack_require__(/*! ../../sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nclass WhatsOnChainNoServices extends SdkWhatsOnChain_1.default {\n constructor(chain = 'main', config = {}) {\n super(chain, config);\n }\n /**\n * POST\n * https://api.whatsonchain.com/v1/bsv/main/txs/status\n * Content-Type: application/json\n * data: \"{\\\"txids\\\":[\\\"6815f8014db74eab8b7f75925c68929597f1d97efa970109d990824c25e5e62b\\\"]}\"\n *\n * result for a mined txid:\n * [{\n * \"txid\":\"294cd1ebd5689fdee03509f92c32184c0f52f037d4046af250229b97e0c8f1aa\",\n * \"blockhash\":\"000000000000000004b5ce6670f2ff27354a1e87d0a01bf61f3307f4ccd358b5\",\n * \"blockheight\":612251,\n * \"blocktime\":1575841517,\n * \"confirmations\":278272\n * }]\n *\n * result for a valid recent txid:\n * [{\"txid\":\"6815f8014db74eab8b7f75925c68929597f1d97efa970109d990824c25e5e62b\"}]\n *\n * result for an unknown txid:\n * [{\"txid\":\"6815f8014db74eab8b7f75925c68929597f1d97efa970109d990824c25e5e62c\",\"error\":\"unknown\"}]\n */\n async getStatusForTxids(txids) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: undefined,\n results: []\n };\n const requestOptions = {\n method: 'POST',\n headers: this.getHttpHeaders(),\n data: { txids }\n };\n const url = `${this.URL}/txs/status`;\n try {\n const response = await this.httpClient.request(url, requestOptions);\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`Unable to get status for txids at this timei.`);\n const data = response.data;\n for (const txid of txids) {\n const d = data.find(d => d.txid === txid);\n if (!d || d.error === 'unknown')\n r.results.push({ txid, status: 'unknown', depth: undefined });\n else if (d.error !== undefined) {\n console.log(`WhatsOnChain getStatusForTxids unexpected error ${d.error} ${txid}`);\n r.results.push({ txid, status: 'unknown', depth: undefined });\n }\n else if (d.confirmations === undefined)\n r.results.push({ txid, status: 'known', depth: 0 });\n else\n r.results.push({ txid, status: 'mined', depth: d.confirmations });\n }\n r.status = 'success';\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n r.error = e;\n }\n return r;\n }\n /**\n * 2025-02-16 throwing internal server error 500.\n * @param txid\n * @returns\n */\n async getTxPropagation(txid) {\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(`${this.URL}/tx/hash/${txid}/propagation`, requestOptions);\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('txid', `valid transaction. '${txid}' response ${response.statusText}`);\n return 0;\n }\n /**\n * May return undefined for unmined transactions that are in the mempool.\n * @param txid\n * @returns raw transaction as hex string or undefined if txid not found in mined block.\n */\n async getRawTx(txid) {\n const headers = this.getHttpHeaders();\n headers['Cache-Control'] = 'no-cache';\n const requestOptions = {\n method: 'GET',\n headers\n };\n const url = `${this.URL}/tx/${txid}/hex`;\n for (let retry = 0; retry < 2; retry++) {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n if (response.status === 404 && response.statusText === 'Not Found')\n return undefined;\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('txid', `valid transaction. '${txid}' response ${response.statusText}`);\n return response.data;\n }\n throw new WERR_errors_1.WERR_INTERNAL();\n }\n async getRawTxResult(txid) {\n const r = { name: 'WoC', txid: (0, utilityHelpers_noBuffer_1.asString)(txid) };\n try {\n const rawTxHex = await this.getRawTx(txid);\n if (rawTxHex)\n r.rawTx = (0, utilityHelpers_noBuffer_1.asArray)(rawTxHex);\n }\n catch (err) {\n r.error = WalletError_1.WalletError.fromUnknown(err);\n }\n return r;\n }\n /**\n * WhatsOnChain does not natively support a postBeef end-point aware of multiple txids of interest in the Beef.\n *\n * Send rawTx in `txids` order from beef.\n *\n * @param beef\n * @param txids\n * @returns\n */\n async postBeef(beef, txids) {\n const r = {\n name: 'WoC',\n status: 'success',\n txidResults: [],\n notes: []\n };\n let delay = false;\n const nn = () => ({ name: 'WoCpostBeef', when: new Date().toISOString() });\n const nne = () => ({ ...nn(), beef: beef.toHex(), txids: txids.join(',') });\n for (const txid of txids) {\n const rawTx = sdk_1.Utils.toHex(beef.findTxid(txid).rawTx);\n if (delay) {\n // For multiple txids, give WoC time to propagate each one.\n await (0, utilityHelpers_1.wait)(3000);\n }\n delay = true;\n const tr = await this.postRawTx(rawTx);\n if (txid !== tr.txid) {\n tr.notes.push({ ...nne(), what: 'postRawTxTxidChanged', txid, trTxid: tr.txid });\n }\n r.txidResults.push(tr);\n if (r.status === 'success' && tr.status !== 'success')\n r.status = 'error';\n }\n if (r.status === 'success') {\n r.notes.push({ ...nn(), what: 'postBeefSuccess' });\n }\n else {\n r.notes.push({ ...nne(), what: 'postBeefError' });\n }\n return r;\n }\n /**\n * @param rawTx raw transaction to broadcast as hex string\n * @returns txid returned by transaction processor of transaction broadcast\n */\n async postRawTx(rawTx) {\n let txid = sdk_1.Utils.toHex((0, utilityHelpers_1.doubleSha256BE)(sdk_1.Utils.toArray(rawTx, 'hex')));\n const r = {\n txid,\n status: 'success',\n notes: []\n };\n const headers = this.getHttpHeaders();\n headers['Content-Type'] = 'application/json';\n headers['Accept'] = 'text/plain';\n const requestOptions = {\n method: 'POST',\n headers,\n data: { txhex: rawTx }\n };\n const url = `${this.URL}/tx/raw`;\n const nn = () => ({ name: 'WoCpostRawTx', when: new Date().toISOString() });\n const nne = () => ({ ...nn(), rawTx, txid, url });\n const retryLimit = 5;\n for (let retry = 0; retry < retryLimit; retry++) {\n try {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n r.notes.push({ ...nn(), what: 'postRawTxRateLimit' });\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n if (response.ok) {\n const txid = response.data;\n r.notes.push({ ...nn(), what: 'postRawTxSuccess' });\n }\n else if (response.data === 'unexpected response code 500: Transaction already in the mempool') {\n r.notes.push({ ...nne(), what: 'postRawTxSuccessAlreadyInMempool' });\n }\n else {\n r.status = 'error';\n if (response.data === 'unexpected response code 500: 258: txn-mempool-conflict') {\n r.doubleSpend = true; // this is a possible double spend attempt\n r.competingTxs = undefined; // not provided with any data for this.\n r.notes.push({ ...nne(), what: 'postRawTxErrorMempoolConflict' });\n }\n else if (response.data === 'unexpected response code 500: Missing inputs') {\n r.doubleSpend = true; // this is a possible double spend attempt\n r.competingTxs = undefined; // not provided with any data for this.\n r.notes.push({ ...nne(), what: 'postRawTxErrorMissingInputs' });\n }\n else {\n const n = {\n ...nne(),\n what: 'postRawTxError'\n };\n if (typeof response.data === 'string') {\n n.data = response.data.slice(0, 128);\n r.data = response.data;\n }\n else {\n r.data = '';\n }\n if (typeof response.statusText === 'string') {\n n.statusText = response.statusText.slice(0, 128);\n r.data += `,${response.statusText}`;\n }\n if (typeof response.status === 'string') {\n n.status = response.status.slice(0, 128);\n r.data += `,${response.status}`;\n }\n if (typeof response.status === 'number') {\n n.status = response.status;\n r.data += `,${response.status}`;\n }\n r.notes.push(n);\n }\n }\n }\n catch (eu) {\n r.status = 'error';\n const e = WalletError_1.WalletError.fromUnknown(eu);\n r.notes.push({\n ...nne(),\n what: 'postRawTxCatch',\n code: e.code,\n description: e.description\n });\n r.serviceError = true;\n r.data = `${e.code} ${e.description}`;\n }\n return r;\n }\n r.status = 'error';\n r.serviceError = true;\n r.notes.push({\n ...nne(),\n what: 'postRawTxRetryLimit',\n retryLimit\n });\n return r;\n }\n async updateBsvExchangeRate(rate, updateMsecs) {\n if (rate) {\n // Check if the rate we know is stale enough to update.\n updateMsecs || (updateMsecs = 1000 * 60 * 15);\n if (new Date(Date.now() - updateMsecs) < rate.timestamp)\n return rate;\n }\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n for (let retry = 0; retry < 2; retry++) {\n const response = await this.httpClient.request(`${this.URL}/exchangerate`, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`WoC exchangerate response ${response.statusText}`);\n const wocrate = response.data;\n if (wocrate.currency !== 'USD')\n wocrate.rate = NaN;\n const newRate = {\n timestamp: new Date(),\n base: 'USD',\n rate: wocrate.rate\n };\n return newRate;\n }\n throw new WERR_errors_1.WERR_INTERNAL();\n }\n async getUtxoStatus(output, outputFormat, outpoint) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: new WERR_errors_1.WERR_INTERNAL(),\n details: []\n };\n for (let retry = 0;; retry++) {\n let url = '';\n try {\n const scriptHash = (0, Services_1.validateScriptHash)(output, outputFormat);\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(`${this.URL}/script/${scriptHash}/unspent/all`, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`WoC getUtxoStatus response ${response.statusText}`);\n const data = response.data;\n if (data.script !== scriptHash || !Array.isArray(data.result)) {\n throw new WERR_errors_1.WERR_INTERNAL('data. is not an array');\n }\n if (data.result.length === 0) {\n r.status = 'success';\n r.error = undefined;\n r.isUtxo = false;\n }\n else {\n r.status = 'success';\n r.error = undefined;\n for (const s of data.result) {\n r.details.push({\n txid: s.tx_hash,\n satoshis: s.value,\n height: s.height,\n index: s.tx_pos\n });\n }\n if (outpoint) {\n const { txid, vout } = (0, validationHelpers_1.parseWalletOutpoint)(outpoint);\n r.isUtxo = r.details.find(d => d.txid === txid && d.index === vout) !== undefined;\n }\n else\n r.isUtxo = r.details.length > 0;\n }\n return r;\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n if (e.code !== 'ECONNRESET' || retry > 2) {\n r.error = new WERR_errors_1.WERR_INTERNAL(`service failure: ${url}, error: ${JSON.stringify(WalletError_1.WalletError.fromUnknown(eu))}`);\n return r;\n }\n }\n }\n }\n async getScriptHashConfirmedHistory(hash) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: undefined,\n history: []\n };\n // reverse hash from LE to BE for Woc\n hash = sdk_1.Utils.toHex(sdk_1.Utils.toArray(hash, 'hex').reverse());\n const url = `${this.URL}/script/${hash}/confirmed/history`;\n for (let retry = 0;; retry++) {\n try {\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n if (!response.ok && response.status === 404) {\n // There is no history for this script hash...\n r.status = 'success';\n return r;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200) {\n r.error = new WERR_errors_1.WERR_BAD_REQUEST(`WoC getScriptHashConfirmedHistory response ${response.ok} ${response.status} ${response.statusText}`);\n return r;\n }\n if (response.data.error) {\n r.error = new WERR_errors_1.WERR_BAD_REQUEST(`WoC getScriptHashConfirmedHistory error ${response.data.error}`);\n return r;\n }\n r.history = response.data.result.map(d => ({ txid: d.tx_hash, height: d.height }));\n r.status = 'success';\n return r;\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n if (e.code !== 'ECONNRESET' || retry > 2) {\n r.error = new WERR_errors_1.WERR_INTERNAL(`WoC getScriptHashConfirmedHistory service failure: ${url}, error: ${JSON.stringify(WalletError_1.WalletError.fromUnknown(eu))}`);\n return r;\n }\n }\n }\n return r;\n }\n async getScriptHashUnconfirmedHistory(hash) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: undefined,\n history: []\n };\n // reverse hash from LE to BE for Woc\n hash = sdk_1.Utils.toHex(sdk_1.Utils.toArray(hash, 'hex').reverse());\n const url = `${this.URL}/script/${hash}/unconfirmed/history`;\n for (let retry = 0;; retry++) {\n try {\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n if (!response.ok && response.status === 404) {\n // There is no history for this script hash...\n r.status = 'success';\n return r;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200) {\n r.error = new WERR_errors_1.WERR_BAD_REQUEST(`WoC getScriptHashUnconfirmedHistory response ${response.ok} ${response.status} ${response.statusText}`);\n return r;\n }\n if (response.data.error) {\n r.error = new WERR_errors_1.WERR_BAD_REQUEST(`WoC getScriptHashUnconfirmedHistory error ${response.data.error}`);\n return r;\n }\n r.history = response.data.result.map(d => ({ txid: d.tx_hash, height: d.height }));\n r.status = 'success';\n return r;\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n if (e.code !== 'ECONNRESET' || retry > 2) {\n r.error = new WERR_errors_1.WERR_INTERNAL(`WoC getScriptHashUnconfirmedHistory service failure: ${url}, error: ${JSON.stringify(WalletError_1.WalletError.fromUnknown(eu))}`);\n return r;\n }\n }\n }\n return r;\n }\n async getScriptHashHistory(hash) {\n const r1 = await this.getScriptHashConfirmedHistory(hash);\n if (r1.error || r1.status !== 'success')\n return r1;\n const r2 = await this.getScriptHashUnconfirmedHistory(hash);\n if (r2.error || r2.status !== 'success')\n return r2;\n r1.history = r1.history.concat(r2.history);\n return r1;\n }\n /**\n {\n \"hash\": \"000000000000000004a288072ebb35e37233f419918f9783d499979cb6ac33eb\",\n \"confirmations\": 328433,\n \"size\": 14421,\n \"height\": 575045,\n \"version\": 536928256,\n \"versionHex\": \"2000e000\",\n \"merkleroot\": \"4ebcba09addd720991d03473f39dce4b9a72cc164e505cd446687a54df9b1585\",\n \"time\": 1553416668,\n \"mediantime\": 1553414858,\n \"nonce\": 87914848,\n \"bits\": \"180997ee\",\n \"difficulty\": 114608607557.4425,\n \"chainwork\": \"000000000000000000000000000000000000000000ddf5d385546872bab7dc01\",\n \"previousblockhash\": \"00000000000000000988156c7075dc9147a5b62922f1310862e8b9000d46dd9b\",\n \"nextblockhash\": \"00000000000000000112b36a37c10235fa0c991f680bc5482ba9692e0ae697db\",\n \"nTx\": 0,\n \"num_tx\": 5\n }\n */\n async getBlockHeaderByHash(hash) {\n const headers = this.getHttpHeaders();\n const requestOptions = {\n method: 'GET',\n headers\n };\n const url = `${this.URL}/block/${hash}/header`;\n for (let retry = 0; retry < 2; retry++) {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n if (response.status === 404 && response.statusText === 'Not Found')\n return undefined;\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('hash', `valid block hash. '${hash}' response ${response.statusText}`);\n const header = convertWocToBlockHeaderHex(response.data);\n return header;\n }\n throw new WERR_errors_1.WERR_INTERNAL();\n }\n async getChainInfo() {\n const headers = this.getHttpHeaders();\n const requestOptions = {\n method: 'GET',\n headers\n };\n const url = `${this.URL}/chain/info`;\n for (let retry = 0; retry < 2; retry++) {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('hash', `valid block hash. '${url}' response ${response.statusText}`);\n return response.data;\n }\n throw new WERR_errors_1.WERR_INTERNAL();\n }\n}\nexports.WhatsOnChainNoServices = WhatsOnChainNoServices;\n/**\n *\n */\nclass WhatsOnChain extends WhatsOnChainNoServices {\n constructor(chain = 'main', config = {}, services) {\n super(chain, config);\n this.services = services || new Services_1.Services(chain);\n }\n /**\n * @param txid\n * @returns\n */\n async getMerklePath(txid, services) {\n const r = { name: 'WoCTsc', notes: [] };\n const headers = this.getHttpHeaders();\n const requestOptions = {\n method: 'GET',\n headers\n };\n for (let retry = 0; retry < 2; retry++) {\n try {\n const response = await this.httpClient.request(`${this.URL}/tx/${txid}/proof/tsc`, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n r.notes.push({\n what: 'getMerklePathRetry',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n await (0, utilityHelpers_1.wait)(2000);\n continue;\n }\n if (response.status === 404 && response.statusText === 'Not Found') {\n r.notes.push({\n what: 'getMerklePathNotFound',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n return r;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.ok || response.status !== 200) {\n r.notes.push({\n what: 'getMerklePathBadStatus',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('txid', `valid transaction. '${txid}' response ${response.statusText}`);\n }\n if (!response.data) {\n // Unmined, proof not yet available.\n r.notes.push({\n what: 'getMerklePathNoData',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n return r;\n }\n if (!Array.isArray(response.data))\n response.data = [response.data];\n if (response.data.length != 1)\n return r;\n const p = response.data[0];\n const header = await services.hashToHeader(p.target);\n if (header) {\n const proof = {\n index: p.index,\n nodes: p.nodes,\n height: header.height\n };\n r.merklePath = (0, tscProofToMerklePath_1.convertProofToMerklePath)(txid, proof);\n r.header = header;\n r.notes.push({\n what: 'getMerklePathSuccess',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n }\n else {\n r.notes.push({\n what: 'getMerklePathNoHeader',\n target: p.target,\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('blockhash', 'a valid on-chain block hash');\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n r.notes.push({\n what: 'getMerklePathError',\n name: r.name,\n code: e.code,\n description: e.description\n });\n r.error = e;\n }\n return r;\n }\n r.notes.push({ what: 'getMerklePathInternal', name: r.name });\n throw new WERR_errors_1.WERR_INTERNAL();\n }\n}\nexports.WhatsOnChain = WhatsOnChain;\nfunction convertWocToBlockHeaderHex(woc) {\n const bits = typeof woc.bits === 'string' ? parseInt(woc.bits, 16) : woc.bits;\n if (!woc.previousblockhash) {\n woc.previousblockhash = '0000000000000000000000000000000000000000000000000000000000000000'; // genesis\n }\n return {\n version: woc.version,\n previousHash: woc.previousblockhash,\n merkleRoot: woc.merkleroot,\n time: woc.time,\n bits,\n nonce: woc.nonce,\n hash: woc.hash,\n height: woc.height\n };\n}\nasync function getWhatsOnChainBlockHeaderByHash(hash, chain = 'main', apiKey) {\n const config = apiKey ? { apiKey } : {};\n const woc = new WhatsOnChain(chain, config);\n const header = await woc.getBlockHeaderByHash(hash);\n return header;\n}\n//# sourceMappingURL=WhatsOnChain.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/WhatsOnChain.js?\n}"); +eval("{\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.WhatsOnChain = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst tscProofToMerklePath_1 = __webpack_require__(/*! ../../utility/tscProofToMerklePath */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/tscProofToMerklePath.js\");\nconst SdkWhatsOnChain_1 = __importDefault(__webpack_require__(/*! ./SdkWhatsOnChain */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/SdkWhatsOnChain.js\"));\nconst sdk_2 = __webpack_require__(/*! ../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\n/**\n *\n */\nclass WhatsOnChain extends SdkWhatsOnChain_1.default {\n constructor(chain = 'main', config = {}, services) {\n super(chain, config);\n this.services = services || new index_client_1.Services(chain);\n }\n /**\n * POST\n * https://api.whatsonchain.com/v1/bsv/main/txs/status\n * Content-Type: application/json\n * data: \"{\\\"txids\\\":[\\\"6815f8014db74eab8b7f75925c68929597f1d97efa970109d990824c25e5e62b\\\"]}\"\n *\n * result for a mined txid:\n * [{\n * \"txid\":\"294cd1ebd5689fdee03509f92c32184c0f52f037d4046af250229b97e0c8f1aa\",\n * \"blockhash\":\"000000000000000004b5ce6670f2ff27354a1e87d0a01bf61f3307f4ccd358b5\",\n * \"blockheight\":612251,\n * \"blocktime\":1575841517,\n * \"confirmations\":278272\n * }]\n *\n * result for a valid recent txid:\n * [{\"txid\":\"6815f8014db74eab8b7f75925c68929597f1d97efa970109d990824c25e5e62b\"}]\n *\n * result for an unknown txid:\n * [{\"txid\":\"6815f8014db74eab8b7f75925c68929597f1d97efa970109d990824c25e5e62c\",\"error\":\"unknown\"}]\n */\n async getStatusForTxids(txids) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: undefined,\n results: []\n };\n const requestOptions = {\n method: 'POST',\n headers: this.getHttpHeaders(),\n data: { txids }\n };\n const url = `${this.URL}/txs/status`;\n try {\n const response = await this.httpClient.request(url, requestOptions);\n if (!response.data || !response.ok || response.status !== 200)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`Unable to get status for txids at this timei.`);\n const data = response.data;\n for (const txid of txids) {\n const d = data.find(d => d.txid === txid);\n if (!d || d.error === 'unknown')\n r.results.push({ txid, status: 'unknown', depth: undefined });\n else if (d.error !== undefined) {\n console.log(`WhatsOnChain getStatusForTxids unexpected error ${d.error} ${txid}`);\n r.results.push({ txid, status: 'unknown', depth: undefined });\n }\n else if (d.confirmations === undefined)\n r.results.push({ txid, status: 'known', depth: 0 });\n else\n r.results.push({ txid, status: 'mined', depth: d.confirmations });\n }\n r.status = 'success';\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n r.error = e;\n }\n return r;\n }\n /**\n * 2025-02-16 throwing internal server error 500.\n * @param txid\n * @returns\n */\n async getTxPropagation(txid) {\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(`${this.URL}/tx/hash/${txid}/propagation`, requestOptions);\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('txid', `valid transaction. '${txid}' response ${response.statusText}`);\n return 0;\n }\n /**\n * May return undefined for unmined transactions that are in the mempool.\n * @param txid\n * @returns raw transaction as hex string or undefined if txid not found in mined block.\n */\n async getRawTx(txid) {\n const headers = this.getHttpHeaders();\n headers['Cache-Control'] = 'no-cache';\n const requestOptions = {\n method: 'GET',\n headers\n };\n const url = `${this.URL}/tx/${txid}/hex`;\n for (let retry = 0; retry < 2; retry++) {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, index_client_1.wait)(2000);\n continue;\n }\n if (response.status === 404 && response.statusText === 'Not Found')\n return undefined;\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('txid', `valid transaction. '${txid}' response ${response.statusText}`);\n return response.data;\n }\n throw new index_client_1.sdk.WERR_INTERNAL();\n }\n async getRawTxResult(txid) {\n const r = { name: 'WoC', txid: (0, index_client_1.asString)(txid) };\n try {\n const rawTxHex = await this.getRawTx(txid);\n if (rawTxHex)\n r.rawTx = (0, index_client_1.asArray)(rawTxHex);\n }\n catch (err) {\n r.error = index_client_1.sdk.WalletError.fromUnknown(err);\n }\n return r;\n }\n /**\n * WhatsOnChain does not natively support a postBeef end-point aware of multiple txids of interest in the Beef.\n *\n * Send rawTx in `txids` order from beef.\n *\n * @param beef\n * @param txids\n * @returns\n */\n async postBeef(beef, txids) {\n const r = {\n name: 'WoC',\n status: 'success',\n txidResults: [],\n notes: []\n };\n let delay = false;\n const nn = () => ({ name: 'WoCpostBeef', when: new Date().toISOString() });\n const nne = () => ({ ...nn(), beef: beef.toHex(), txids: txids.join(',') });\n for (const txid of txids) {\n const rawTx = sdk_1.Utils.toHex(beef.findTxid(txid).rawTx);\n if (delay) {\n // For multiple txids, give WoC time to propagate each one.\n await (0, index_client_1.wait)(3000);\n }\n delay = true;\n const tr = await this.postRawTx(rawTx);\n if (txid !== tr.txid) {\n tr.notes.push({ ...nne(), what: 'postRawTxTxidChanged', txid, trTxid: tr.txid });\n }\n r.txidResults.push(tr);\n if (r.status === 'success' && tr.status !== 'success')\n r.status = 'error';\n }\n if (r.status === 'success') {\n r.notes.push({ ...nn(), what: 'postBeefSuccess' });\n }\n else {\n r.notes.push({ ...nne(), what: 'postBeefError' });\n }\n return r;\n }\n /**\n * @param rawTx raw transaction to broadcast as hex string\n * @returns txid returned by transaction processor of transaction broadcast\n */\n async postRawTx(rawTx) {\n let txid = sdk_1.Utils.toHex((0, index_client_1.doubleSha256BE)(sdk_1.Utils.toArray(rawTx, 'hex')));\n const r = {\n txid,\n status: 'success',\n notes: []\n };\n const headers = this.getHttpHeaders();\n headers['Content-Type'] = 'application/json';\n headers['Accept'] = 'text/plain';\n const requestOptions = {\n method: 'POST',\n headers,\n data: { txhex: rawTx }\n };\n const url = `${this.URL}/tx/raw`;\n const nn = () => ({ name: 'WoCpostRawTx', when: new Date().toISOString() });\n const nne = () => ({ ...nn(), rawTx, txid, url });\n const retryLimit = 5;\n for (let retry = 0; retry < retryLimit; retry++) {\n try {\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n r.notes.push({ ...nn(), what: 'postRawTxRateLimit' });\n await (0, index_client_1.wait)(2000);\n continue;\n }\n if (response.ok) {\n const txid = response.data;\n r.notes.push({ ...nn(), what: 'postRawTxSuccess' });\n }\n else if (response.data === 'unexpected response code 500: Transaction already in the mempool') {\n r.notes.push({ ...nne(), what: 'postRawTxSuccessAlreadyInMempool' });\n }\n else {\n r.status = 'error';\n if (response.data === 'unexpected response code 500: 258: txn-mempool-conflict') {\n r.doubleSpend = true; // this is a possible double spend attempt\n r.competingTxs = undefined; // not provided with any data for this.\n r.notes.push({ ...nne(), what: 'postRawTxErrorMempoolConflict' });\n }\n else if (response.data === 'unexpected response code 500: Missing inputs') {\n r.doubleSpend = true; // this is a possible double spend attempt\n r.competingTxs = undefined; // not provided with any data for this.\n r.notes.push({ ...nne(), what: 'postRawTxErrorMissingInputs' });\n }\n else {\n const n = {\n ...nne(),\n what: 'postRawTxError'\n };\n if (typeof response.data === 'string') {\n n.data = response.data.slice(0, 128);\n r.data = response.data;\n }\n else {\n r.data = '';\n }\n if (typeof response.statusText === 'string') {\n n.statusText = response.statusText.slice(0, 128);\n r.data += `,${response.statusText}`;\n }\n if (typeof response.status === 'string') {\n n.status = response.status.slice(0, 128);\n r.data += `,${response.status}`;\n }\n if (typeof response.status === 'number') {\n n.status = response.status;\n r.data += `,${response.status}`;\n }\n r.notes.push(n);\n }\n }\n }\n catch (eu) {\n r.status = 'error';\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n r.notes.push({\n ...nne(),\n what: 'postRawTxCatch',\n code: e.code,\n description: e.description\n });\n r.serviceError = true;\n r.data = `${e.code} ${e.description}`;\n }\n return r;\n }\n r.status = 'error';\n r.serviceError = true;\n r.notes.push({\n ...nne(),\n what: 'postRawTxRetryLimit',\n retryLimit\n });\n return r;\n }\n /**\n * @param txid\n * @returns\n */\n async getMerklePath(txid, services) {\n const r = { name: 'WoCTsc', notes: [] };\n const headers = this.getHttpHeaders();\n const requestOptions = {\n method: 'GET',\n headers\n };\n for (let retry = 0; retry < 2; retry++) {\n try {\n const response = await this.httpClient.request(`${this.URL}/tx/${txid}/proof/tsc`, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n r.notes.push({\n what: 'getMerklePathRetry',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n await (0, index_client_1.wait)(2000);\n continue;\n }\n if (response.status === 404 && response.statusText === 'Not Found') {\n r.notes.push({\n what: 'getMerklePathNotFound',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n return r;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.ok || response.status !== 200) {\n r.notes.push({\n what: 'getMerklePathBadStatus',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('txid', `valid transaction. '${txid}' response ${response.statusText}`);\n }\n if (!response.data) {\n // Unmined, proof not yet available.\n r.notes.push({\n what: 'getMerklePathNoData',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n return r;\n }\n if (!Array.isArray(response.data))\n response.data = [response.data];\n if (response.data.length != 1)\n return r;\n const p = response.data[0];\n const header = await services.hashToHeader(p.target);\n if (header) {\n const proof = {\n index: p.index,\n nodes: p.nodes,\n height: header.height\n };\n r.merklePath = (0, tscProofToMerklePath_1.convertProofToMerklePath)(txid, proof);\n r.header = header;\n r.notes.push({\n what: 'getMerklePathSuccess',\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n }\n else {\n r.notes.push({\n what: 'getMerklePathNoHeader',\n target: p.target,\n name: r.name,\n status: response.status,\n statusText: response.statusText\n });\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('blockhash', 'a valid on-chain block hash');\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n r.notes.push({\n what: 'getMerklePathError',\n name: r.name,\n code: e.code,\n description: e.description\n });\n r.error = e;\n }\n return r;\n }\n r.notes.push({ what: 'getMerklePathInternal', name: r.name });\n throw new index_client_1.sdk.WERR_INTERNAL();\n }\n async updateBsvExchangeRate(rate, updateMsecs) {\n if (rate) {\n // Check if the rate we know is stale enough to update.\n updateMsecs || (updateMsecs = 1000 * 60 * 15);\n if (new Date(Date.now() - updateMsecs) < rate.timestamp)\n return rate;\n }\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n for (let retry = 0; retry < 2; retry++) {\n const response = await this.httpClient.request(`${this.URL}/exchangerate`, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, index_client_1.wait)(2000);\n continue;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`WoC exchangerate response ${response.statusText}`);\n const wocrate = response.data;\n if (wocrate.currency !== 'USD')\n wocrate.rate = NaN;\n const newRate = {\n timestamp: new Date(),\n base: 'USD',\n rate: wocrate.rate\n };\n return newRate;\n }\n throw new index_client_1.sdk.WERR_INTERNAL();\n }\n async getUtxoStatus(output, outputFormat, outpoint) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: new index_client_1.sdk.WERR_INTERNAL(),\n details: []\n };\n for (let retry = 0;; retry++) {\n let url = '';\n try {\n const scriptHash = (0, index_client_1.validateScriptHash)(output, outputFormat);\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(`${this.URL}/script/${scriptHash}/unspent/all`, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, index_client_1.wait)(2000);\n continue;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`WoC getUtxoStatus response ${response.statusText}`);\n const data = response.data;\n if (data.script !== scriptHash || !Array.isArray(data.result)) {\n throw new index_client_1.sdk.WERR_INTERNAL('data. is not an array');\n }\n if (data.result.length === 0) {\n r.status = 'success';\n r.error = undefined;\n r.isUtxo = false;\n }\n else {\n r.status = 'success';\n r.error = undefined;\n for (const s of data.result) {\n r.details.push({\n txid: s.tx_hash,\n satoshis: s.value,\n height: s.height,\n index: s.tx_pos\n });\n }\n if (outpoint) {\n const { txid, vout } = (0, sdk_2.parseWalletOutpoint)(outpoint);\n r.isUtxo = r.details.find(d => d.txid === txid && d.index === vout) !== undefined;\n }\n else\n r.isUtxo = r.details.length > 0;\n }\n return r;\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n if (e.code !== 'ECONNRESET' || retry > 2) {\n r.error = new index_client_1.sdk.WERR_INTERNAL(`service failure: ${url}, error: ${JSON.stringify(index_client_1.sdk.WalletError.fromUnknown(eu))}`);\n return r;\n }\n }\n }\n }\n async getScriptHashConfirmedHistory(hash) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: undefined,\n history: []\n };\n // reverse hash from LE to BE for Woc\n hash = sdk_1.Utils.toHex(sdk_1.Utils.toArray(hash, 'hex').reverse());\n const url = `${this.URL}/script/${hash}/confirmed/history`;\n for (let retry = 0;; retry++) {\n try {\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, index_client_1.wait)(2000);\n continue;\n }\n if (!response.ok && response.status === 404) {\n // There is no history for this script hash...\n r.status = 'success';\n return r;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200) {\n r.error = new index_client_1.sdk.WERR_BAD_REQUEST(`WoC getScriptHashConfirmedHistory response ${response.ok} ${response.status} ${response.statusText}`);\n return r;\n }\n if (response.data.error) {\n r.error = new index_client_1.sdk.WERR_BAD_REQUEST(`WoC getScriptHashConfirmedHistory error ${response.data.error}`);\n return r;\n }\n r.history = response.data.result.map(d => ({ txid: d.tx_hash, height: d.height }));\n r.status = 'success';\n return r;\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n if (e.code !== 'ECONNRESET' || retry > 2) {\n r.error = new index_client_1.sdk.WERR_INTERNAL(`WoC getScriptHashConfirmedHistory service failure: ${url}, error: ${JSON.stringify(index_client_1.sdk.WalletError.fromUnknown(eu))}`);\n return r;\n }\n }\n }\n return r;\n }\n async getScriptHashUnconfirmedHistory(hash) {\n const r = {\n name: 'WoC',\n status: 'error',\n error: undefined,\n history: []\n };\n // reverse hash from LE to BE for Woc\n hash = sdk_1.Utils.toHex(sdk_1.Utils.toArray(hash, 'hex').reverse());\n const url = `${this.URL}/script/${hash}/unconfirmed/history`;\n for (let retry = 0;; retry++) {\n try {\n const requestOptions = {\n method: 'GET',\n headers: this.getHttpHeaders()\n };\n const response = await this.httpClient.request(url, requestOptions);\n if (response.statusText === 'Too Many Requests' && retry < 2) {\n await (0, index_client_1.wait)(2000);\n continue;\n }\n if (!response.ok && response.status === 404) {\n // There is no history for this script hash...\n r.status = 'success';\n return r;\n }\n // response.statusText is often, but not always 'OK' on success...\n if (!response.data || !response.ok || response.status !== 200) {\n r.error = new index_client_1.sdk.WERR_BAD_REQUEST(`WoC getScriptHashUnconfirmedHistory response ${response.ok} ${response.status} ${response.statusText}`);\n return r;\n }\n if (response.data.error) {\n r.error = new index_client_1.sdk.WERR_BAD_REQUEST(`WoC getScriptHashUnconfirmedHistory error ${response.data.error}`);\n return r;\n }\n r.history = response.data.result.map(d => ({ txid: d.tx_hash, height: d.height }));\n r.status = 'success';\n return r;\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n if (e.code !== 'ECONNRESET' || retry > 2) {\n r.error = new index_client_1.sdk.WERR_INTERNAL(`WoC getScriptHashUnconfirmedHistory service failure: ${url}, error: ${JSON.stringify(index_client_1.sdk.WalletError.fromUnknown(eu))}`);\n return r;\n }\n }\n }\n return r;\n }\n async getScriptHashHistory(hash) {\n const r1 = await this.getScriptHashConfirmedHistory(hash);\n if (r1.error || r1.status !== 'success')\n return r1;\n const r2 = await this.getScriptHashUnconfirmedHistory(hash);\n if (r2.error || r2.status !== 'success')\n return r2;\n r1.history = r1.history.concat(r2.history);\n return r1;\n }\n}\nexports.WhatsOnChain = WhatsOnChain;\n//# sourceMappingURL=WhatsOnChain.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/WhatsOnChain.js?\n}"); /***/ }), -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/exchangeRates.js": -/*!*********************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/exchangeRates.js ***! - \*********************************************************************************************/ +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/echangeRates.js": +/*!********************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/echangeRates.js ***! + \********************************************************************************************/ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.updateChaintracksFiatExchangeRates = updateChaintracksFiatExchangeRates;\nexports.updateExchangeratesapi = updateExchangeratesapi;\nexports.getExchangeRatesIo = getExchangeRatesIo;\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nasync function updateChaintracksFiatExchangeRates(targetCurrencies, options) {\n const url = options.chaintracksFiatExchangeRatesUrl;\n if (!url)\n throw new WERR_errors_1.WERR_MISSING_PARAMETER('options.chaintracksFiatExchangeRatesUrl');\n const response = await fetch(url);\n const data = await response.json();\n const r = { status: response.status, data };\n if (r.status !== 200 || !r.data || r.data.status != 'success') {\n throw new WERR_errors_1.WERR_BAD_REQUEST(`${url} returned status ${r.status}`);\n }\n const rates = r.data.value;\n rates.timestamp = new Date(rates.timestamp);\n return rates;\n}\nasync function updateExchangeratesapi(targetCurrencies, options) {\n if (!options.exchangeratesapiKey)\n throw new WERR_errors_1.WERR_MISSING_PARAMETER('options.exchangeratesapiKey');\n const iorates = await getExchangeRatesIo(options.exchangeratesapiKey);\n if (!iorates.success)\n throw new WERR_errors_1.WERR_BAD_REQUEST(`getExchangeRatesIo returned success ${iorates.success}`);\n if (!iorates.rates['USD'] || !iorates.rates[iorates.base])\n throw new WERR_errors_1.WERR_BAD_REQUEST(`getExchangeRatesIo missing rates for 'USD' or base`);\n const r = {\n timestamp: new Date(iorates.timestamp * 1000),\n base: 'USD',\n rates: {}\n };\n const basePerUsd = iorates.rates[iorates.base] / iorates.rates['USD'];\n let updates = 0;\n for (const [key, value] of Object.entries(iorates.rates)) {\n if (targetCurrencies.indexOf(key) > -1) {\n r.rates[key] = value * basePerUsd;\n updates++;\n }\n }\n if (updates !== targetCurrencies.length)\n throw new WERR_errors_1.WERR_BAD_REQUEST(`getExchangeRatesIo failed to update all target currencies`);\n //console.log(`new fiat rates=${JSON.stringify(r)}`)\n return r;\n}\nasync function getExchangeRatesIo(key) {\n const url = `http://api.exchangeratesapi.io/v1/latest?access_key=${key}`;\n const response = await fetch(url);\n const data = await response.json();\n const r = { status: response.status, data };\n if (r.status !== 200 || !r.data) {\n throw new WERR_errors_1.WERR_BAD_REQUEST(`getExchangeRatesIo returned status ${r.status}`);\n }\n const rates = r.data;\n return rates;\n}\n/*\n{\n \"success\": true,\n \"timestamp\": 1702405384,\n \"base\": \"EUR\",\n \"date\": \"2023-12-12\",\n \"rates\": {\n \"AED\": 3.96261,\n \"AFN\": 74.453362,\n \"ALL\": 101.807155,\n \"AMD\": 435.489459,\n \"ANG\": 1.944069,\n \"AOA\": 897.226337,\n \"ARS\": 395.468082,\n \"AUD\": 1.646886,\n \"AWG\": 1.942271,\n \"AZN\": 1.832044,\n \"BAM\": 1.95407,\n \"BBD\": 2.177971,\n \"BDT\": 118.654929,\n \"BGN\": 1.956827,\n \"BHD\": 0.406753,\n \"BIF\": 3078.499675,\n \"BMD\": 1.079039,\n \"BND\": 1.446102,\n \"BOB\": 7.4534,\n \"BRL\": 5.35741,\n \"BSD\": 1.07874,\n \"BTC\": 0.000026145469,\n \"BTN\": 89.916078,\n \"BWP\": 14.715901,\n \"BYN\": 3.553337,\n \"BYR\": 21149.174075,\n \"BZD\": 2.174364,\n \"CAD\": 1.468287,\n \"CDF\": 2875.640503,\n \"CHF\": 0.945353,\n \"CLF\": 0.034313,\n \"CLP\": 948.09775,\n \"CNY\": 7.743512,\n \"COP\": 4307.525658,\n \"CRC\": 569.093422,\n \"CUC\": 1.079039,\n \"CUP\": 28.594547,\n \"CVE\": 110.978933,\n \"CZK\": 24.507795,\n \"DJF\": 191.766554,\n \"DKK\": 7.457544,\n \"DOP\": 61.505535,\n \"DZD\": 145.236415,\n \"EGP\": 33.367028,\n \"ERN\": 16.185592,\n \"ETB\": 60.199033,\n \"EUR\": 1,\n \"FJD\": 2.416779,\n \"FKP\": 0.859886,\n \"GBP\": 0.859574,\n \"GEL\": 2.880527,\n \"GGP\": 0.859886,\n \"GHS\": 12.980915,\n \"GIP\": 0.859886,\n \"GMD\": 72.726644,\n \"GNF\": 9285.134874,\n \"GTQ\": 8.443457,\n \"GYD\": 225.859997,\n \"HKD\": 8.426031,\n \"HNL\": 26.685156,\n \"HRK\": 7.598132,\n \"HTG\": 142.513142,\n \"HUF\": 382.707793,\n \"IDR\": 16801.292339,\n \"ILS\": 4.007585,\n \"IMP\": 0.859886,\n \"INR\": 89.987955,\n \"IQD\": 1414.081256,\n \"IRR\": 45602.907562,\n \"ISK\": 151.109018,\n \"JEP\": 0.859886,\n \"JMD\": 167.700721,\n \"JOD\": 0.765366,\n \"JPY\": 157.115675,\n \"KES\": 165.523229,\n \"KGS\": 96.379362,\n \"KHR\": 4440.24707,\n \"KMF\": 493.571281,\n \"KPW\": 971.097551,\n \"KRW\": 1417.685123,\n \"KWD\": 0.332733,\n \"KYD\": 0.8989,\n \"KZT\": 493.04112,\n \"LAK\": 22368.488843,\n \"LBP\": 16154.243871,\n \"LKR\": 352.747636,\n \"LRD\": 203.02122,\n \"LSL\": 20.582684,\n \"LTL\": 3.186123,\n \"LVL\": 0.6527,\n \"LYD\": 5.211954,\n \"MAD\": 10.976529,\n \"MDL\": 19.340873,\n \"MGA\": 4939.301335,\n \"MKD\": 61.507276,\n \"MMK\": 2265.283559,\n \"MNT\": 3705.780074,\n \"MOP\": 8.676817,\n \"MRU\": 42.727878,\n \"MUR\": 47.690625,\n \"MVR\": 16.584924,\n \"MWK\": 1816.023037,\n \"MXN\": 18.69803,\n \"MYR\": 5.052606,\n \"MZN\": 68.249194,\n \"NAD\": 20.588506,\n \"NGN\": 865.924709,\n \"NIO\": 39.6024,\n \"NOK\": 11.848426,\n \"NPR\": 143.865605,\n \"NZD\": 1.761931,\n \"OMR\": 0.415394,\n \"PAB\": 1.07864,\n \"PEN\": 4.073376,\n \"PGK\": 4.025102,\n \"PHP\": 59.974075,\n \"PKR\": 306.446851,\n \"PLN\": 4.334063,\n \"PYG\": 7963.910929,\n \"QAR\": 3.928776,\n \"RON\": 4.973399,\n \"RSD\": 117.196649,\n \"RUB\": 97.248412,\n \"RWF\": 1351.496966,\n \"SAR\": 4.047186,\n \"SBD\": 9.12268,\n \"SCR\": 14.561036,\n \"SDG\": 648.5028,\n \"SEK\": 11.285032,\n \"SGD\": 1.449037,\n \"SHP\": 1.312921,\n \"SLE\": 24.488188,\n \"SLL\": 21311.029931,\n \"SOS\": 616.131981,\n \"SRD\": 40.655509,\n \"STD\": 22333.938945,\n \"SYP\": 14029.21897,\n \"SZL\": 20.587826,\n \"THB\": 38.597298,\n \"TJS\": 11.757734,\n \"TMT\": 3.776638,\n \"TND\": 3.377493,\n \"TOP\": 2.551714,\n \"TRY\": 31.312865,\n \"TTD\": 7.321483,\n \"TWD\": 34.012943,\n \"TZS\": 2697.598652,\n \"UAH\": 39.917867,\n \"UGX\": 4102.367289,\n \"USD\": 1.079039,\n \"UYU\": 42.422631,\n \"UZS\": 13299.161683,\n \"VEF\": 3838024.202021,\n \"VES\": 38.392542,\n \"VND\": 26188.28851,\n \"VUV\": 129.693288,\n \"WST\": 2.964402,\n \"XAF\": 655.37362,\n \"XAG\": 0.047456,\n \"XAU\": 0.000545,\n \"XCD\": 2.916158,\n \"XDR\": 0.811478,\n \"XOF\": 657.134976,\n \"XPF\": 119.331742,\n \"YER\": 270.110528,\n \"ZAR\": 20.470755,\n \"ZMK\": 9712.646776,\n \"ZMW\": 26.319693,\n \"ZWL\": 347.450277\n }\n}\n*/\n//# sourceMappingURL=exchangeRates.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/exchangeRates.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.updateChaintracksFiatExchangeRates = updateChaintracksFiatExchangeRates;\nexports.updateExchangeratesapi = updateExchangeratesapi;\nexports.getExchangeRatesIo = getExchangeRatesIo;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nasync function updateChaintracksFiatExchangeRates(targetCurrencies, options) {\n const url = options.chaintracksFiatExchangeRatesUrl;\n if (!url)\n throw new index_client_1.sdk.WERR_MISSING_PARAMETER('options.chaintracksFiatExchangeRatesUrl');\n const response = await fetch(url);\n const data = await response.json();\n const r = { status: response.status, data };\n if (r.status !== 200 || !r.data || r.data.status != 'success') {\n throw new index_client_1.sdk.WERR_BAD_REQUEST(`${url} returned status ${r.status}`);\n }\n const rates = r.data.value;\n rates.timestamp = new Date(rates.timestamp);\n return rates;\n}\nasync function updateExchangeratesapi(targetCurrencies, options) {\n if (!options.exchangeratesapiKey)\n throw new index_client_1.sdk.WERR_MISSING_PARAMETER('options.exchangeratesapiKey');\n const iorates = await getExchangeRatesIo(options.exchangeratesapiKey);\n if (!iorates.success)\n throw new index_client_1.sdk.WERR_BAD_REQUEST(`getExchangeRatesIo returned success ${iorates.success}`);\n if (!iorates.rates['USD'] || !iorates.rates[iorates.base])\n throw new index_client_1.sdk.WERR_BAD_REQUEST(`getExchangeRatesIo missing rates for 'USD' or base`);\n const r = {\n timestamp: new Date(iorates.timestamp * 1000),\n base: 'USD',\n rates: {}\n };\n const basePerUsd = iorates.rates[iorates.base] / iorates.rates['USD'];\n let updates = 0;\n for (const [key, value] of Object.entries(iorates.rates)) {\n if (targetCurrencies.indexOf(key) > -1) {\n r.rates[key] = value * basePerUsd;\n updates++;\n }\n }\n if (updates !== targetCurrencies.length)\n throw new index_client_1.sdk.WERR_BAD_REQUEST(`getExchangeRatesIo failed to update all target currencies`);\n //console.log(`new fiat rates=${JSON.stringify(r)}`)\n return r;\n}\nasync function getExchangeRatesIo(key) {\n const url = `http://api.exchangeratesapi.io/v1/latest?access_key=${key}`;\n const response = await fetch(url);\n const data = await response.json();\n const r = { status: response.status, data };\n if (r.status !== 200 || !r.data) {\n throw new index_client_1.sdk.WERR_BAD_REQUEST(`getExchangeRatesIo returned status ${r.status}`);\n }\n const rates = r.data;\n return rates;\n}\n/*\n{\n \"success\": true,\n \"timestamp\": 1702405384,\n \"base\": \"EUR\",\n \"date\": \"2023-12-12\",\n \"rates\": {\n \"AED\": 3.96261,\n \"AFN\": 74.453362,\n \"ALL\": 101.807155,\n \"AMD\": 435.489459,\n \"ANG\": 1.944069,\n \"AOA\": 897.226337,\n \"ARS\": 395.468082,\n \"AUD\": 1.646886,\n \"AWG\": 1.942271,\n \"AZN\": 1.832044,\n \"BAM\": 1.95407,\n \"BBD\": 2.177971,\n \"BDT\": 118.654929,\n \"BGN\": 1.956827,\n \"BHD\": 0.406753,\n \"BIF\": 3078.499675,\n \"BMD\": 1.079039,\n \"BND\": 1.446102,\n \"BOB\": 7.4534,\n \"BRL\": 5.35741,\n \"BSD\": 1.07874,\n \"BTC\": 0.000026145469,\n \"BTN\": 89.916078,\n \"BWP\": 14.715901,\n \"BYN\": 3.553337,\n \"BYR\": 21149.174075,\n \"BZD\": 2.174364,\n \"CAD\": 1.468287,\n \"CDF\": 2875.640503,\n \"CHF\": 0.945353,\n \"CLF\": 0.034313,\n \"CLP\": 948.09775,\n \"CNY\": 7.743512,\n \"COP\": 4307.525658,\n \"CRC\": 569.093422,\n \"CUC\": 1.079039,\n \"CUP\": 28.594547,\n \"CVE\": 110.978933,\n \"CZK\": 24.507795,\n \"DJF\": 191.766554,\n \"DKK\": 7.457544,\n \"DOP\": 61.505535,\n \"DZD\": 145.236415,\n \"EGP\": 33.367028,\n \"ERN\": 16.185592,\n \"ETB\": 60.199033,\n \"EUR\": 1,\n \"FJD\": 2.416779,\n \"FKP\": 0.859886,\n \"GBP\": 0.859574,\n \"GEL\": 2.880527,\n \"GGP\": 0.859886,\n \"GHS\": 12.980915,\n \"GIP\": 0.859886,\n \"GMD\": 72.726644,\n \"GNF\": 9285.134874,\n \"GTQ\": 8.443457,\n \"GYD\": 225.859997,\n \"HKD\": 8.426031,\n \"HNL\": 26.685156,\n \"HRK\": 7.598132,\n \"HTG\": 142.513142,\n \"HUF\": 382.707793,\n \"IDR\": 16801.292339,\n \"ILS\": 4.007585,\n \"IMP\": 0.859886,\n \"INR\": 89.987955,\n \"IQD\": 1414.081256,\n \"IRR\": 45602.907562,\n \"ISK\": 151.109018,\n \"JEP\": 0.859886,\n \"JMD\": 167.700721,\n \"JOD\": 0.765366,\n \"JPY\": 157.115675,\n \"KES\": 165.523229,\n \"KGS\": 96.379362,\n \"KHR\": 4440.24707,\n \"KMF\": 493.571281,\n \"KPW\": 971.097551,\n \"KRW\": 1417.685123,\n \"KWD\": 0.332733,\n \"KYD\": 0.8989,\n \"KZT\": 493.04112,\n \"LAK\": 22368.488843,\n \"LBP\": 16154.243871,\n \"LKR\": 352.747636,\n \"LRD\": 203.02122,\n \"LSL\": 20.582684,\n \"LTL\": 3.186123,\n \"LVL\": 0.6527,\n \"LYD\": 5.211954,\n \"MAD\": 10.976529,\n \"MDL\": 19.340873,\n \"MGA\": 4939.301335,\n \"MKD\": 61.507276,\n \"MMK\": 2265.283559,\n \"MNT\": 3705.780074,\n \"MOP\": 8.676817,\n \"MRU\": 42.727878,\n \"MUR\": 47.690625,\n \"MVR\": 16.584924,\n \"MWK\": 1816.023037,\n \"MXN\": 18.69803,\n \"MYR\": 5.052606,\n \"MZN\": 68.249194,\n \"NAD\": 20.588506,\n \"NGN\": 865.924709,\n \"NIO\": 39.6024,\n \"NOK\": 11.848426,\n \"NPR\": 143.865605,\n \"NZD\": 1.761931,\n \"OMR\": 0.415394,\n \"PAB\": 1.07864,\n \"PEN\": 4.073376,\n \"PGK\": 4.025102,\n \"PHP\": 59.974075,\n \"PKR\": 306.446851,\n \"PLN\": 4.334063,\n \"PYG\": 7963.910929,\n \"QAR\": 3.928776,\n \"RON\": 4.973399,\n \"RSD\": 117.196649,\n \"RUB\": 97.248412,\n \"RWF\": 1351.496966,\n \"SAR\": 4.047186,\n \"SBD\": 9.12268,\n \"SCR\": 14.561036,\n \"SDG\": 648.5028,\n \"SEK\": 11.285032,\n \"SGD\": 1.449037,\n \"SHP\": 1.312921,\n \"SLE\": 24.488188,\n \"SLL\": 21311.029931,\n \"SOS\": 616.131981,\n \"SRD\": 40.655509,\n \"STD\": 22333.938945,\n \"SYP\": 14029.21897,\n \"SZL\": 20.587826,\n \"THB\": 38.597298,\n \"TJS\": 11.757734,\n \"TMT\": 3.776638,\n \"TND\": 3.377493,\n \"TOP\": 2.551714,\n \"TRY\": 31.312865,\n \"TTD\": 7.321483,\n \"TWD\": 34.012943,\n \"TZS\": 2697.598652,\n \"UAH\": 39.917867,\n \"UGX\": 4102.367289,\n \"USD\": 1.079039,\n \"UYU\": 42.422631,\n \"UZS\": 13299.161683,\n \"VEF\": 3838024.202021,\n \"VES\": 38.392542,\n \"VND\": 26188.28851,\n \"VUV\": 129.693288,\n \"WST\": 2.964402,\n \"XAF\": 655.37362,\n \"XAG\": 0.047456,\n \"XAU\": 0.000545,\n \"XCD\": 2.916158,\n \"XDR\": 0.811478,\n \"XOF\": 657.134976,\n \"XPF\": 119.331742,\n \"YER\": 270.110528,\n \"ZAR\": 20.470755,\n \"ZMK\": 9712.646776,\n \"ZMW\": 26.319693,\n \"ZWL\": 347.450277\n }\n}\n*/\n//# sourceMappingURL=echangeRates.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/services/providers/echangeRates.js?\n}"); /***/ }), @@ -3542,7 +3333,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.buildSignableTransaction = buildSignableTransaction;\nexports.makeChangeLock = makeChangeLock;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst ScriptTemplateBRC29_1 = __webpack_require__(/*! ../../utility/ScriptTemplateBRC29 */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js\");\nfunction buildSignableTransaction(dctr, args, wallet) {\n var _a;\n const changeKeys = wallet.getClientChangeKeyPair();\n const inputBeef = args.inputBEEF ? sdk_1.Beef.fromBinary(args.inputBEEF) : undefined;\n const { inputs: storageInputs, outputs: storageOutputs } = dctr;\n const tx = new sdk_1.Transaction(args.version, [], [], args.lockTime);\n // The order of outputs in storageOutputs is always:\n // CreateActionArgs.outputs in the original order\n // Commission output\n // Change outputs\n // The Vout values will be randomized if args.options.randomizeOutputs is true. Default is true.\n const voutToIndex = Array(storageOutputs.length);\n for (let vout = 0; vout < storageOutputs.length; vout++) {\n const i = storageOutputs.findIndex(o => o.vout === vout);\n if (i < 0)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('output.vout', `sequential. ${vout} is missing`);\n voutToIndex[vout] = i;\n }\n //////////////\n // Add OUTPUTS\n /////////////\n for (let vout = 0; vout < storageOutputs.length; vout++) {\n const i = voutToIndex[vout];\n const out = storageOutputs[i];\n if (vout !== out.vout)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('output.vout', `equal to array index. ${out.vout} !== ${vout}`);\n const change = out.providedBy === 'storage' && out.purpose === 'change';\n const lockingScript = change\n ? makeChangeLock(out, dctr, args, changeKeys, wallet)\n : (0, utilityHelpers_1.asBsvSdkScript)(out.lockingScript);\n const output = {\n satoshis: out.satoshis,\n lockingScript,\n change\n };\n tx.addOutput(output);\n }\n if (storageOutputs.length === 0) {\n // Add a dummy output to avoid transaction rejection by processors for having no outputs.\n const output = {\n satoshis: 0,\n lockingScript: sdk_1.Script.fromASM('OP_FALSE OP_RETURN 42'),\n change: false\n };\n tx.addOutput(output);\n }\n //////////////\n // Merge and sort INPUTS info by vin order.\n /////////////\n const inputs = [];\n for (const storageInput of storageInputs) {\n const argsInput = storageInput.vin !== undefined && storageInput.vin < args.inputs.length\n ? args.inputs[storageInput.vin]\n : undefined;\n inputs.push({ argsInput, storageInput });\n }\n inputs.sort((a, b) => a.storageInput.vin < b.storageInput.vin ? -1 : a.storageInput.vin === b.storageInput.vin ? 0 : 1);\n const pendingStorageInputs = [];\n //////////////\n // Add INPUTS\n /////////////\n let totalChangeInputs = 0;\n for (const { storageInput, argsInput } of inputs) {\n // Two types of inputs are handled: user specified wth/without unlockingScript and storage specified using SABPPP template.\n if (argsInput) {\n // Type 1: User supplied input, with or without an explicit unlockingScript.\n // If without, signAction must be used to provide the actual unlockScript.\n const hasUnlock = typeof argsInput.unlockingScript === 'string';\n const unlock = hasUnlock ? (0, utilityHelpers_1.asBsvSdkScript)(argsInput.unlockingScript) : new sdk_1.Script();\n const sourceTransaction = args.isSignAction ? (_a = inputBeef === null || inputBeef === void 0 ? void 0 : inputBeef.findTxid(argsInput.outpoint.txid)) === null || _a === void 0 ? void 0 : _a.tx : undefined;\n const inputToAdd = {\n sourceTXID: argsInput.outpoint.txid,\n sourceOutputIndex: argsInput.outpoint.vout,\n // Include the source transaction for access to the outputs locking script and output satoshis for user side fee calculation.\n // TODO: Make this conditional to improve performance when user can supply locking scripts themselves.\n sourceTransaction,\n unlockingScript: unlock,\n sequence: argsInput.sequenceNumber\n };\n tx.addInput(inputToAdd);\n }\n else {\n // Type2: SABPPP protocol inputs which are signed using ScriptTemplateBRC29.\n if (storageInput.type !== 'P2PKH')\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('type', `vin ${storageInput.vin}, \"${storageInput.type}\" is not a supported unlocking script type.`);\n pendingStorageInputs.push({\n vin: tx.inputs.length,\n derivationPrefix: (0, utilityHelpers_1.verifyTruthy)(storageInput.derivationPrefix),\n derivationSuffix: (0, utilityHelpers_1.verifyTruthy)(storageInput.derivationSuffix),\n unlockerPubKey: storageInput.senderIdentityKey,\n sourceSatoshis: storageInput.sourceSatoshis,\n lockingScript: storageInput.sourceLockingScript\n });\n const inputToAdd = {\n sourceTXID: storageInput.sourceTxid,\n sourceOutputIndex: storageInput.sourceVout,\n sourceTransaction: storageInput.sourceTransaction\n ? sdk_1.Transaction.fromBinary(storageInput.sourceTransaction)\n : undefined,\n unlockingScript: new sdk_1.Script(),\n sequence: 0xffffffff\n };\n tx.addInput(inputToAdd);\n totalChangeInputs += (0, utilityHelpers_1.verifyTruthy)(storageInput.sourceSatoshis);\n }\n }\n // The amount is the total of non-foreign inputs minus change outputs\n // Note that the amount can be negative when we are redeeming more inputs than we are spending\n const totalChangeOutputs = storageOutputs\n .filter(x => x.purpose === 'change')\n .reduce((acc, el) => acc + el.satoshis, 0);\n const amount = totalChangeInputs - totalChangeOutputs;\n return {\n tx,\n amount,\n pdi: pendingStorageInputs,\n log: ''\n };\n}\n/**\n * Derive a change output locking script\n */\nfunction makeChangeLock(out, dctr, args, changeKeys, wallet) {\n const derivationPrefix = dctr.derivationPrefix;\n const derivationSuffix = (0, utilityHelpers_1.verifyTruthy)(out.derivationSuffix);\n const sabppp = new ScriptTemplateBRC29_1.ScriptTemplateBRC29({\n derivationPrefix,\n derivationSuffix,\n keyDeriver: wallet.keyDeriver\n });\n const lockingScript = sabppp.lock(changeKeys.privateKey, changeKeys.publicKey);\n return lockingScript;\n}\n//# sourceMappingURL=buildSignableTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/buildSignableTransaction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.buildSignableTransaction = buildSignableTransaction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst createAction_1 = __webpack_require__(/*! ./createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js\");\nfunction buildSignableTransaction(dctr, args, wallet) {\n var _a;\n const changeKeys = wallet.getClientChangeKeyPair();\n const inputBeef = args.inputBEEF ? sdk_1.Beef.fromBinary(args.inputBEEF) : undefined;\n const { inputs: storageInputs, outputs: storageOutputs } = dctr;\n const tx = new sdk_1.Transaction(args.version, [], [], args.lockTime);\n // The order of outputs in storageOutputs is always:\n // CreateActionArgs.outputs in the original order\n // Commission output\n // Change outputs\n // The Vout values will be randomized if args.options.randomizeOutputs is true. Default is true.\n const voutToIndex = Array(storageOutputs.length);\n for (let vout = 0; vout < storageOutputs.length; vout++) {\n const i = storageOutputs.findIndex(o => o.vout === vout);\n if (i < 0)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('output.vout', `sequential. ${vout} is missing`);\n voutToIndex[vout] = i;\n }\n //////////////\n // Add OUTPUTS\n /////////////\n for (let vout = 0; vout < storageOutputs.length; vout++) {\n const i = voutToIndex[vout];\n const out = storageOutputs[i];\n if (vout !== out.vout)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('output.vout', `equal to array index. ${out.vout} !== ${vout}`);\n const change = out.providedBy === 'storage' && out.purpose === 'change';\n const lockingScript = change\n ? (0, createAction_1.makeChangeLock)(out, dctr, args, changeKeys, wallet)\n : (0, index_client_1.asBsvSdkScript)(out.lockingScript);\n const output = {\n satoshis: out.satoshis,\n lockingScript,\n change\n };\n tx.addOutput(output);\n }\n if (storageOutputs.length === 0) {\n // Add a dummy output to avoid transaction rejection by processors for having no outputs.\n const output = {\n satoshis: 0,\n lockingScript: sdk_1.Script.fromASM('OP_FALSE OP_RETURN 42'),\n change: false\n };\n tx.addOutput(output);\n }\n //////////////\n // Merge and sort INPUTS info by vin order.\n /////////////\n const inputs = [];\n for (const storageInput of storageInputs) {\n const argsInput = storageInput.vin !== undefined && storageInput.vin < args.inputs.length\n ? args.inputs[storageInput.vin]\n : undefined;\n inputs.push({ argsInput, storageInput });\n }\n inputs.sort((a, b) => a.storageInput.vin < b.storageInput.vin ? -1 : a.storageInput.vin === b.storageInput.vin ? 0 : 1);\n const pendingStorageInputs = [];\n //////////////\n // Add INPUTS\n /////////////\n let totalChangeInputs = 0;\n for (const { storageInput, argsInput } of inputs) {\n // Two types of inputs are handled: user specified wth/without unlockingScript and storage specified using SABPPP template.\n if (argsInput) {\n // Type 1: User supplied input, with or without an explicit unlockingScript.\n // If without, signAction must be used to provide the actual unlockScript.\n const hasUnlock = typeof argsInput.unlockingScript === 'string';\n const unlock = hasUnlock ? (0, index_client_1.asBsvSdkScript)(argsInput.unlockingScript) : new sdk_1.Script();\n const sourceTransaction = args.isSignAction ? (_a = inputBeef === null || inputBeef === void 0 ? void 0 : inputBeef.findTxid(argsInput.outpoint.txid)) === null || _a === void 0 ? void 0 : _a.tx : undefined;\n const inputToAdd = {\n sourceTXID: argsInput.outpoint.txid,\n sourceOutputIndex: argsInput.outpoint.vout,\n // Include the source transaction for access to the outputs locking script and output satoshis for user side fee calculation.\n // TODO: Make this conditional to improve performance when user can supply locking scripts themselves.\n sourceTransaction,\n unlockingScript: unlock,\n sequence: argsInput.sequenceNumber\n };\n tx.addInput(inputToAdd);\n }\n else {\n // Type2: SABPPP protocol inputs which are signed using ScriptTemplateBRC29.\n if (storageInput.type !== 'P2PKH')\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('type', `vin ${storageInput.vin}, \"${storageInput.type}\" is not a supported unlocking script type.`);\n pendingStorageInputs.push({\n vin: tx.inputs.length,\n derivationPrefix: (0, index_client_1.verifyTruthy)(storageInput.derivationPrefix),\n derivationSuffix: (0, index_client_1.verifyTruthy)(storageInput.derivationSuffix),\n unlockerPubKey: storageInput.senderIdentityKey,\n sourceSatoshis: storageInput.sourceSatoshis,\n lockingScript: storageInput.sourceLockingScript\n });\n const inputToAdd = {\n sourceTXID: storageInput.sourceTxid,\n sourceOutputIndex: storageInput.sourceVout,\n sourceTransaction: storageInput.sourceTransaction\n ? sdk_1.Transaction.fromBinary(storageInput.sourceTransaction)\n : undefined,\n unlockingScript: new sdk_1.Script(),\n sequence: 0xffffffff\n };\n tx.addInput(inputToAdd);\n totalChangeInputs += (0, index_client_1.verifyTruthy)(storageInput.sourceSatoshis);\n }\n }\n // The amount is the total of non-foreign inputs minus change outputs\n // Note that the amount can be negative when we are redeeming more inputs than we are spending\n const totalChangeOutputs = storageOutputs\n .filter(x => x.purpose === 'change')\n .reduce((acc, el) => acc + el.satoshis, 0);\n const amount = totalChangeInputs - totalChangeOutputs;\n return {\n tx,\n amount,\n pdi: pendingStorageInputs,\n log: ''\n };\n}\n//# sourceMappingURL=buildSignableTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/buildSignableTransaction.js?\n}"); /***/ }), @@ -3553,7 +3344,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.completeSignedTransaction = completeSignedTransaction;\nexports.verifyUnlockScripts = verifyUnlockScripts;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst ScriptTemplateBRC29_1 = __webpack_require__(/*! ../../utility/ScriptTemplateBRC29 */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nasync function completeSignedTransaction(prior, spends, wallet) {\n /////////////////////\n // Insert the user provided unlocking scripts from \"spends\" arg\n /////////////////////\n for (const [key, spend] of Object.entries(spends)) {\n const vin = Number(key);\n const createInput = prior.args.inputs[vin];\n const input = prior.tx.inputs[vin];\n if (!createInput || !input || createInput.unlockingScript || !Number.isInteger(createInput.unlockingScriptLength))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('args', `spend does not correspond to prior input with valid unlockingScriptLength.`);\n if (spend.unlockingScript.length / 2 > createInput.unlockingScriptLength)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('args', `spend unlockingScript length ${spend.unlockingScript.length} exceeds expected length ${createInput.unlockingScriptLength}`);\n input.unlockingScript = (0, utilityHelpers_1.asBsvSdkScript)(spend.unlockingScript);\n if (spend.sequenceNumber !== undefined)\n input.sequence = spend.sequenceNumber;\n }\n const results = {\n sdk: {}\n };\n /////////////////////\n // Insert SABPPP unlock templates for wallet signed inputs\n /////////////////////\n for (const pdi of prior.pdi) {\n const sabppp = new ScriptTemplateBRC29_1.ScriptTemplateBRC29({\n derivationPrefix: pdi.derivationPrefix,\n derivationSuffix: pdi.derivationSuffix,\n keyDeriver: wallet.keyDeriver\n });\n const keys = wallet.getClientChangeKeyPair();\n const lockerPrivKey = keys.privateKey;\n const unlockerPubKey = pdi.unlockerPubKey || keys.publicKey;\n const sourceSatoshis = pdi.sourceSatoshis;\n const lockingScript = (0, utilityHelpers_1.asBsvSdkScript)(pdi.lockingScript);\n const unlockTemplate = sabppp.unlock(lockerPrivKey, unlockerPubKey, sourceSatoshis, lockingScript);\n const input = prior.tx.inputs[pdi.vin];\n input.unlockingScriptTemplate = unlockTemplate;\n }\n /////////////////////\n // Sign wallet signed inputs making transaction fully valid.\n /////////////////////\n await prior.tx.sign();\n return prior.tx;\n}\n/**\n * @param txid The TXID of a transaction in the beef for which all unlocking scripts must be valid.\n * @param beef Must contain transactions for txid and all its inputs.\n * @throws WERR_INVALID_PARAMETER if any unlocking script is invalid, if sourceTXID is invalid, if beef doesn't contain required transactions.\n */\nfunction verifyUnlockScripts(txid, beef) {\n var _a, _b, _c, _d;\n const tx = (_a = beef.findTxid(txid)) === null || _a === void 0 ? void 0 : _a.tx;\n if (!tx)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`txid`, `contained in beef, txid ${txid}`);\n for (let i = 0; i < tx.inputs.length; i++) {\n const input = tx.inputs[i];\n if (!input.sourceTXID)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`inputs[${i}].sourceTXID`, `valid`);\n if (!input.unlockingScript)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`inputs[${i}].unlockingScript`, `valid`);\n input.sourceTransaction = (_b = beef.findTxid(input.sourceTXID)) === null || _b === void 0 ? void 0 : _b.tx;\n if (!input.sourceTransaction) {\n // The beef doesn't contain all the source transactions only if advanced features\n // such as knownTxids are used.\n // Skip unlock script checks.\n return;\n // throw new WERR_INVALID_PARAMETER(`inputs[${i}].sourceTXID`, `contained in beef`)\n }\n }\n for (let i = 0; i < tx.inputs.length; i++) {\n const input = tx.inputs[i];\n const sourceOutput = input.sourceTransaction.outputs[input.sourceOutputIndex];\n const otherInputs = tx.inputs.filter((_, idx) => idx !== i);\n const spend = new sdk_1.Spend({\n sourceTXID: input.sourceTXID,\n sourceOutputIndex: input.sourceOutputIndex,\n lockingScript: sourceOutput.lockingScript,\n sourceSatoshis: (_c = sourceOutput.satoshis) !== null && _c !== void 0 ? _c : 0,\n transactionVersion: tx.version,\n otherInputs,\n unlockingScript: input.unlockingScript,\n inputSequence: (_d = input.sequence) !== null && _d !== void 0 ? _d : 0,\n inputIndex: i,\n outputs: tx.outputs,\n lockTime: tx.lockTime\n });\n try {\n const spendValid = spend.validate();\n if (!spendValid)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`inputs[${i}].unlockScript`, `valid`);\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`inputs[${i}].unlockScript`, `valid. ${e.message}`);\n }\n }\n}\n//# sourceMappingURL=completeSignedTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/completeSignedTransaction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.completeSignedTransaction = completeSignedTransaction;\nexports.verifyUnlockScripts = verifyUnlockScripts;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst sdk_2 = __webpack_require__(/*! ../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nasync function completeSignedTransaction(prior, spends, wallet) {\n /////////////////////\n // Insert the user provided unlocking scripts from \"spends\" arg\n /////////////////////\n for (const [key, spend] of Object.entries(spends)) {\n const vin = Number(key);\n const createInput = prior.args.inputs[vin];\n const input = prior.tx.inputs[vin];\n if (!createInput || !input || createInput.unlockingScript || !Number.isInteger(createInput.unlockingScriptLength))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('args', `spend does not correspond to prior input with valid unlockingScriptLength.`);\n if (spend.unlockingScript.length / 2 > createInput.unlockingScriptLength)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('args', `spend unlockingScript length ${spend.unlockingScript.length} exceeds expected length ${createInput.unlockingScriptLength}`);\n input.unlockingScript = (0, index_client_1.asBsvSdkScript)(spend.unlockingScript);\n if (spend.sequenceNumber !== undefined)\n input.sequence = spend.sequenceNumber;\n }\n const results = {\n sdk: {}\n };\n /////////////////////\n // Insert SABPPP unlock templates for wallet signed inputs\n /////////////////////\n for (const pdi of prior.pdi) {\n const sabppp = new index_client_1.ScriptTemplateBRC29({\n derivationPrefix: pdi.derivationPrefix,\n derivationSuffix: pdi.derivationSuffix,\n keyDeriver: wallet.keyDeriver\n });\n const keys = wallet.getClientChangeKeyPair();\n const lockerPrivKey = keys.privateKey;\n const unlockerPubKey = pdi.unlockerPubKey || keys.publicKey;\n const sourceSatoshis = pdi.sourceSatoshis;\n const lockingScript = (0, index_client_1.asBsvSdkScript)(pdi.lockingScript);\n const unlockTemplate = sabppp.unlock(lockerPrivKey, unlockerPubKey, sourceSatoshis, lockingScript);\n const input = prior.tx.inputs[pdi.vin];\n input.unlockingScriptTemplate = unlockTemplate;\n }\n /////////////////////\n // Sign wallet signed inputs making transaction fully valid.\n /////////////////////\n await prior.tx.sign();\n return prior.tx;\n}\n/**\n * @param txid The TXID of a transaction in the beef for which all unlocking scripts must be valid.\n * @param beef Must contain transactions for txid and all its inputs.\n * @throws WERR_INVALID_PARAMETER if any unlocking script is invalid, if sourceTXID is invalid, if beef doesn't contain required transactions.\n */\nfunction verifyUnlockScripts(txid, beef) {\n var _a, _b, _c, _d;\n const tx = (_a = beef.findTxid(txid)) === null || _a === void 0 ? void 0 : _a.tx;\n if (!tx)\n throw new sdk_2.WERR_INVALID_PARAMETER(`txid`, `contained in beef, txid ${txid}`);\n for (let i = 0; i < tx.inputs.length; i++) {\n const input = tx.inputs[i];\n if (!input.sourceTXID)\n throw new sdk_2.WERR_INVALID_PARAMETER(`inputs[${i}].sourceTXID`, `valid`);\n if (!input.unlockingScript)\n throw new sdk_2.WERR_INVALID_PARAMETER(`inputs[${i}].unlockingScript`, `valid`);\n input.sourceTransaction = (_b = beef.findTxid(input.sourceTXID)) === null || _b === void 0 ? void 0 : _b.tx;\n if (!input.sourceTransaction) {\n // The beef doesn't contain all the source transactions only if advanced features\n // such as knownTxids are used.\n // Skip unlock script checks.\n return;\n // throw new WERR_INVALID_PARAMETER(`inputs[${i}].sourceTXID`, `contained in beef`)\n }\n }\n for (let i = 0; i < tx.inputs.length; i++) {\n const input = tx.inputs[i];\n const sourceOutput = input.sourceTransaction.outputs[input.sourceOutputIndex];\n const otherInputs = tx.inputs.filter((_, idx) => idx !== i);\n const spend = new sdk_1.Spend({\n sourceTXID: input.sourceTXID,\n sourceOutputIndex: input.sourceOutputIndex,\n lockingScript: sourceOutput.lockingScript,\n sourceSatoshis: (_c = sourceOutput.satoshis) !== null && _c !== void 0 ? _c : 0,\n transactionVersion: tx.version,\n otherInputs,\n unlockingScript: input.unlockingScript,\n inputSequence: (_d = input.sequence) !== null && _d !== void 0 ? _d : 0,\n inputIndex: i,\n outputs: tx.outputs,\n lockTime: tx.lockTime\n });\n try {\n const spendValid = spend.validate();\n if (!spendValid)\n throw new sdk_2.WERR_INVALID_PARAMETER(`inputs[${i}].unlockScript`, `valid`);\n }\n catch (eu) {\n const e = sdk_2.WalletError.fromUnknown(eu);\n throw new sdk_2.WERR_INVALID_PARAMETER(`inputs[${i}].unlockScript`, `valid. ${e.message}`);\n }\n }\n}\n//# sourceMappingURL=completeSignedTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/completeSignedTransaction.js?\n}"); /***/ }), @@ -3564,7 +3355,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.createAction = createAction;\nexports.processAction = processAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst buildSignableTransaction_1 = __webpack_require__(/*! ./buildSignableTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/buildSignableTransaction.js\");\nconst completeSignedTransaction_1 = __webpack_require__(/*! ./completeSignedTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/completeSignedTransaction.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nasync function createAction(wallet, auth, vargs) {\n var _a;\n const r = {};\n let prior = undefined;\n if (vargs.isNewTx) {\n prior = await createNewTx(wallet, vargs);\n if (vargs.isSignAction) {\n return makeSignableTransactionResult(prior, wallet, vargs);\n }\n prior.tx = await (0, completeSignedTransaction_1.completeSignedTransaction)(prior, {}, wallet);\n r.txid = prior.tx.id('hex');\n const beef = new sdk_1.Beef();\n if (prior.dcr.inputBeef)\n beef.mergeBeef(prior.dcr.inputBeef);\n beef.mergeTransaction(prior.tx);\n (0, completeSignedTransaction_1.verifyUnlockScripts)(r.txid, beef);\n r.noSendChange = (_a = prior.dcr.noSendChangeOutputVouts) === null || _a === void 0 ? void 0 : _a.map(vout => `${r.txid}.${vout}`);\n if (!vargs.options.returnTXIDOnly)\n r.tx = beef.toBinaryAtomic(r.txid);\n }\n const { sendWithResults, notDelayedResults } = await processAction(prior, wallet, auth, vargs);\n r.sendWithResults = sendWithResults;\n r.notDelayedResults = notDelayedResults;\n return r;\n}\nasync function createNewTx(wallet, args) {\n const storageArgs = removeUnlockScripts(args);\n const dcr = await wallet.storage.createAction(storageArgs);\n const reference = dcr.reference;\n const { tx, amount, pdi } = (0, buildSignableTransaction_1.buildSignableTransaction)(dcr, args, wallet);\n const prior = { reference, dcr, args, amount, tx, pdi };\n return prior;\n}\nfunction makeSignableTransactionResult(prior, wallet, args) {\n var _a;\n if (!prior.dcr.inputBeef)\n throw new WERR_errors_1.WERR_INTERNAL('prior.dcr.inputBeef must be valid');\n const txid = prior.tx.id('hex');\n const r = {\n noSendChange: args.isNoSend ? (_a = prior.dcr.noSendChangeOutputVouts) === null || _a === void 0 ? void 0 : _a.map(vout => `${txid}.${vout}`) : undefined,\n signableTransaction: {\n reference: prior.dcr.reference,\n tx: makeSignableTransactionBeef(prior.tx, prior.dcr.inputBeef)\n }\n };\n wallet.pendingSignActions[r.signableTransaction.reference] = prior;\n return r;\n}\nfunction makeSignableTransactionBeef(tx, inputBEEF) {\n // This is a special case beef for transaction signing.\n // We only need the transaction being signed, and for each input, the raw source transaction.\n const beef = new sdk_1.Beef();\n for (const input of tx.inputs) {\n if (!input.sourceTransaction)\n throw new WERR_errors_1.WERR_INTERNAL('Every signableTransaction input must have a sourceTransaction');\n beef.mergeRawTx(input.sourceTransaction.toBinary());\n }\n beef.mergeRawTx(tx.toBinary());\n return beef.toBinaryAtomic(tx.id('hex'));\n}\nfunction removeUnlockScripts(args) {\n let storageArgs = args;\n if (!storageArgs.inputs.every(i => i.unlockingScript === undefined)) {\n // Never send unlocking scripts to storage, all it needs is the script length.\n storageArgs = { ...args, inputs: [] };\n for (const i of args.inputs) {\n const di = {\n ...i,\n unlockingScriptLength: i.unlockingScript !== undefined ? i.unlockingScript.length : i.unlockingScriptLength\n };\n delete di.unlockingScript;\n storageArgs.inputs.push(di);\n }\n }\n return storageArgs;\n}\nasync function processAction(prior, wallet, auth, vargs) {\n const args = {\n isNewTx: vargs.isNewTx,\n isSendWith: vargs.isSendWith,\n isNoSend: vargs.isNoSend,\n isDelayed: vargs.isDelayed,\n reference: prior ? prior.reference : undefined,\n txid: prior ? prior.tx.id('hex') : undefined,\n rawTx: prior ? prior.tx.toBinary() : undefined,\n sendWith: vargs.isSendWith ? vargs.options.sendWith : []\n };\n const r = await wallet.storage.processAction(args);\n return r;\n}\nfunction makeDummyTransactionForOutputSatoshis(vout, satoshis) {\n const tx = new sdk_1.Transaction();\n for (let i = 0; i < vout; i++)\n tx.addOutput({ lockingScript: new sdk_1.Script(), satoshis: 0 });\n tx.addOutput({ lockingScript: new sdk_1.Script(), satoshis });\n return tx;\n}\n//# sourceMappingURL=createAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.createAction = createAction;\nexports.makeChangeLock = makeChangeLock;\nexports.processAction = processAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst sdk_2 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst buildSignableTransaction_1 = __webpack_require__(/*! ./buildSignableTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/buildSignableTransaction.js\");\nconst completeSignedTransaction_1 = __webpack_require__(/*! ./completeSignedTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/completeSignedTransaction.js\");\nasync function createAction(wallet, auth, vargs) {\n var _a;\n const r = {};\n let prior = undefined;\n if (vargs.isNewTx) {\n prior = await createNewTx(wallet, vargs);\n if (vargs.isSignAction) {\n return makeSignableTransactionResult(prior, wallet, vargs);\n }\n prior.tx = await (0, completeSignedTransaction_1.completeSignedTransaction)(prior, {}, wallet);\n r.txid = prior.tx.id('hex');\n const beef = new sdk_1.Beef();\n if (prior.dcr.inputBeef)\n beef.mergeBeef(prior.dcr.inputBeef);\n beef.mergeTransaction(prior.tx);\n (0, completeSignedTransaction_1.verifyUnlockScripts)(r.txid, beef);\n r.noSendChange = (_a = prior.dcr.noSendChangeOutputVouts) === null || _a === void 0 ? void 0 : _a.map(vout => `${r.txid}.${vout}`);\n if (!vargs.options.returnTXIDOnly)\n r.tx = beef.toBinaryAtomic(r.txid);\n }\n const { sendWithResults, notDelayedResults } = await processAction(prior, wallet, auth, vargs);\n r.sendWithResults = sendWithResults;\n r.notDelayedResults = notDelayedResults;\n return r;\n}\nasync function createNewTx(wallet, args) {\n const storageArgs = removeUnlockScripts(args);\n const dcr = await wallet.storage.createAction(storageArgs);\n const reference = dcr.reference;\n const { tx, amount, pdi } = (0, buildSignableTransaction_1.buildSignableTransaction)(dcr, args, wallet);\n const prior = { reference, dcr, args, amount, tx, pdi };\n return prior;\n}\nfunction makeSignableTransactionResult(prior, wallet, args) {\n var _a;\n if (!prior.dcr.inputBeef)\n throw new index_client_1.sdk.WERR_INTERNAL('prior.dcr.inputBeef must be valid');\n const txid = prior.tx.id('hex');\n const r = {\n noSendChange: args.isNoSend ? (_a = prior.dcr.noSendChangeOutputVouts) === null || _a === void 0 ? void 0 : _a.map(vout => `${txid}.${vout}`) : undefined,\n signableTransaction: {\n reference: prior.dcr.reference,\n tx: makeSignableTransactionBeef(prior.tx, prior.dcr.inputBeef)\n }\n };\n wallet.pendingSignActions[r.signableTransaction.reference] = prior;\n return r;\n}\nfunction makeSignableTransactionBeef(tx, inputBEEF) {\n // This is a special case beef for transaction signing.\n // We only need the transaction being signed, and for each input, the raw source transaction.\n const beef = new sdk_1.Beef();\n for (const input of tx.inputs) {\n if (!input.sourceTransaction)\n throw new index_client_1.sdk.WERR_INTERNAL('Every signableTransaction input must have a sourceTransaction');\n beef.mergeRawTx(input.sourceTransaction.toBinary());\n }\n beef.mergeRawTx(tx.toBinary());\n return beef.toBinaryAtomic(tx.id('hex'));\n}\n/**\n * Derive a change output locking script\n */\nfunction makeChangeLock(out, dctr, args, changeKeys, wallet) {\n const derivationPrefix = dctr.derivationPrefix;\n const derivationSuffix = (0, index_client_1.verifyTruthy)(out.derivationSuffix);\n const sabppp = new index_client_1.ScriptTemplateBRC29({\n derivationPrefix,\n derivationSuffix,\n keyDeriver: wallet.keyDeriver\n });\n const lockingScript = sabppp.lock(changeKeys.privateKey, changeKeys.publicKey);\n return lockingScript;\n}\nfunction removeUnlockScripts(args) {\n let storageArgs = args;\n if (!storageArgs.inputs.every(i => i.unlockingScript === undefined)) {\n // Never send unlocking scripts to storage, all it needs is the script length.\n storageArgs = { ...args, inputs: [] };\n for (const i of args.inputs) {\n const di = {\n ...i,\n unlockingScriptLength: i.unlockingScript !== undefined ? i.unlockingScript.length : i.unlockingScriptLength\n };\n delete di.unlockingScript;\n storageArgs.inputs.push(di);\n }\n }\n return storageArgs;\n}\nasync function processAction(prior, wallet, auth, vargs) {\n const args = {\n isNewTx: vargs.isNewTx,\n isSendWith: vargs.isSendWith,\n isNoSend: vargs.isNoSend,\n isDelayed: vargs.isDelayed,\n reference: prior ? prior.reference : undefined,\n txid: prior ? prior.tx.id('hex') : undefined,\n rawTx: prior ? prior.tx.toBinary() : undefined,\n sendWith: vargs.isSendWith ? vargs.options.sendWith : []\n };\n const r = await wallet.storage.processAction(args);\n return r;\n}\nfunction makeDummyTransactionForOutputSatoshis(vout, satoshis) {\n const tx = new sdk_2.Transaction();\n for (let i = 0; i < vout; i++)\n tx.addOutput({ lockingScript: new sdk_2.Script(), satoshis: 0 });\n tx.addOutput({ lockingScript: new sdk_2.Script(), satoshis });\n return tx;\n}\n//# sourceMappingURL=createAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js?\n}"); /***/ }), @@ -3575,7 +3366,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.internalizeAction = internalizeAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst validationHelpers_1 = __webpack_require__(/*! ../../sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\n/**\n * Internalize Action allows a wallet to take ownership of outputs in a pre-existing transaction.\n * The transaction may, or may not already be known to both the storage and user.\n *\n * Two types of outputs are handled: \"wallet payments\" and \"basket insertions\".\n *\n * A \"basket insertion\" output is considered a custom output and has no effect on the wallet's \"balance\".\n *\n * A \"wallet payment\" adds an outputs value to the wallet's change \"balance\". These outputs are assigned to the \"default\" basket.\n *\n * Processing starts with simple validation and then checks for a pre-existing transaction.\n * If the transaction is already known to the user, then the outputs are reviewed against the existing outputs treatment,\n * and merge rules are added to the arguments passed to the storage layer.\n * The existing transaction must be in the 'unproven' or 'completed' status. Any other status is an error.\n *\n * When the transaction already exists, the description is updated. The isOutgoing sense is not changed.\n *\n * \"basket insertion\" Merge Rules:\n * 1. The \"default\" basket may not be specified as the insertion basket.\n * 2. A change output in the \"default\" basket may not be target of an insertion into a different basket.\n * 3. These baskets do not affect the wallet's balance and are typed \"custom\".\n *\n * \"wallet payment\" Merge Rules:\n * 1. Targetting an existing change \"default\" basket output results in a no-op. No error. No alterations made.\n * 2. Targetting a previously \"custom\" non-change output converts it into a change output. This alters the transaction's `amount`, and the wallet balance.\n *\n */\nasync function internalizeAction(wallet, auth, args) {\n const vargs = (0, validationHelpers_1.validateInternalizeActionArgs)(args);\n const { ab, tx, txid } = await validateAtomicBeef();\n const brc29ProtocolID = [2, '3241645161d8'];\n for (const o of vargs.outputs) {\n if (o.outputIndex < 0 || o.outputIndex >= tx.outputs.length)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('outputIndex', `a valid output index in range 0 to ${tx.outputs.length - 1}`);\n switch (o.protocol) {\n case 'basket insertion':\n setupBasketInsertionForOutput(o, vargs);\n break;\n case 'wallet payment':\n setupWalletPaymentForOutput(o, vargs);\n break;\n default:\n throw new WERR_errors_1.WERR_INTERNAL(`unexpected protocol ${o.protocol}`);\n }\n }\n const r = await wallet.storage.internalizeAction(args);\n return r;\n function setupWalletPaymentForOutput(o, dargs) {\n const p = o.paymentRemittance;\n const output = tx.outputs[o.outputIndex];\n if (!p)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('paymentRemittance', `valid for protocol ${o.protocol}`);\n const keyID = `${p.derivationPrefix} ${p.derivationSuffix}`;\n const privKey = wallet.keyDeriver.derivePrivateKey(brc29ProtocolID, keyID, p.senderIdentityKey);\n const expectedLockScript = new sdk_1.P2PKH().lock(privKey.toAddress());\n if (output.lockingScript.toHex() !== expectedLockScript.toHex())\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('paymentRemittance', `locked by script conforming to BRC-29`);\n }\n function setupBasketInsertionForOutput(o, dargs) {\n /*\n No additional validations...\n */\n }\n async function validateAtomicBeef() {\n const ab = sdk_1.Beef.fromBinary(vargs.tx);\n // TODO: Add support for known txids...\n const txValid = await ab.verify(await wallet.getServices().getChainTracker(), false);\n if (!txValid || !ab.atomicTxid) {\n console.log(`internalizeAction beef is invalid: ${ab.toLogString()}`);\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', 'valid AtomicBEEF');\n }\n const txid = ab.atomicTxid;\n const btx = ab.findTxid(txid);\n if (!btx)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', `valid AtomicBEEF with newest txid of ${txid}`);\n const tx = btx.tx;\n return { ab, tx, txid };\n }\n}\n//# sourceMappingURL=internalizeAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/internalizeAction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.internalizeAction = internalizeAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * Internalize Action allows a wallet to take ownership of outputs in a pre-existing transaction.\n * The transaction may, or may not already be known to both the storage and user.\n *\n * Two types of outputs are handled: \"wallet payments\" and \"basket insertions\".\n *\n * A \"basket insertion\" output is considered a custom output and has no effect on the wallet's \"balance\".\n *\n * A \"wallet payment\" adds an outputs value to the wallet's change \"balance\". These outputs are assigned to the \"default\" basket.\n *\n * Processing starts with simple validation and then checks for a pre-existing transaction.\n * If the transaction is already known to the user, then the outputs are reviewed against the existing outputs treatment,\n * and merge rules are added to the arguments passed to the storage layer.\n * The existing transaction must be in the 'unproven' or 'completed' status. Any other status is an error.\n *\n * When the transaction already exists, the description is updated. The isOutgoing sense is not changed.\n *\n * \"basket insertion\" Merge Rules:\n * 1. The \"default\" basket may not be specified as the insertion basket.\n * 2. A change output in the \"default\" basket may not be target of an insertion into a different basket.\n * 3. These baskets do not affect the wallet's balance and are typed \"custom\".\n *\n * \"wallet payment\" Merge Rules:\n * 1. Targetting an existing change \"default\" basket output results in a no-op. No error. No alterations made.\n * 2. Targetting a previously \"custom\" non-change output converts it into a change output. This alters the transaction's `amount`, and the wallet balance.\n *\n */\nasync function internalizeAction(wallet, auth, args) {\n const vargs = index_client_1.sdk.validateInternalizeActionArgs(args);\n const { ab, tx, txid } = await validateAtomicBeef();\n const brc29ProtocolID = [2, '3241645161d8'];\n for (const o of vargs.outputs) {\n if (o.outputIndex < 0 || o.outputIndex >= tx.outputs.length)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('outputIndex', `a valid output index in range 0 to ${tx.outputs.length - 1}`);\n switch (o.protocol) {\n case 'basket insertion':\n setupBasketInsertionForOutput(o, vargs);\n break;\n case 'wallet payment':\n setupWalletPaymentForOutput(o, vargs);\n break;\n default:\n throw new index_client_1.sdk.WERR_INTERNAL(`unexpected protocol ${o.protocol}`);\n }\n }\n const r = await wallet.storage.internalizeAction(args);\n return r;\n function setupWalletPaymentForOutput(o, dargs) {\n const p = o.paymentRemittance;\n const output = tx.outputs[o.outputIndex];\n if (!p)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('paymentRemitance', `valid for protocol ${o.protocol}`);\n const keyID = `${p.derivationPrefix} ${p.derivationSuffix}`;\n const privKey = wallet.keyDeriver.derivePrivateKey(brc29ProtocolID, keyID, p.senderIdentityKey);\n const expectedLockScript = new sdk_1.P2PKH().lock(privKey.toAddress());\n if (output.lockingScript.toHex() !== expectedLockScript.toHex())\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('paymentRemitance', `locked by script conforming to BRC-29`);\n }\n function setupBasketInsertionForOutput(o, dargs) {\n /*\n No additional validations...\n */\n }\n async function validateAtomicBeef() {\n const ab = sdk_1.Beef.fromBinary(vargs.tx);\n // TODO: Add support for known txids...\n const txValid = await ab.verify(await wallet.getServices().getChainTracker(), false);\n if (!txValid || !ab.atomicTxid) {\n console.log(`internalizeAction beef is invalid: ${ab.toLogString()}`);\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', 'valid AtomicBEEF');\n }\n const txid = ab.atomicTxid;\n const btx = ab.findTxid(txid);\n if (!btx)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', `valid AtomicBEEF with newest txid of ${txid}`);\n const tx = btx.tx;\n return { ab, tx, txid };\n }\n}\n//# sourceMappingURL=internalizeAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/internalizeAction.js?\n}"); /***/ }), @@ -3586,7 +3377,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.proveCertificate = proveCertificate;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nasync function proveCertificate(wallet, auth, vargs) {\n const lcargs = {\n partial: {\n type: vargs.type,\n serialNumber: vargs.serialNumber,\n certifier: vargs.certifier,\n subject: vargs.subject,\n revocationOutpoint: vargs.revocationOutpoint,\n signature: vargs.signature\n },\n certifiers: [],\n types: [],\n limit: 2,\n offset: 0,\n privileged: false\n };\n const lcr = await wallet.storage.listCertificates(lcargs);\n if (lcr.certificates.length != 1)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('args', `a unique certificate match`);\n const storageCert = lcr.certificates[0];\n const keyringForVerifier = await sdk_1.MasterCertificate.createKeyringForVerifier(wallet, storageCert.certifier, vargs.verifier, storageCert.fields, vargs.fieldsToReveal, storageCert.keyring, storageCert.serialNumber, vargs.privileged, vargs.privilegedReason);\n return { keyringForVerifier };\n}\n//# sourceMappingURL=proveCertificate.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/proveCertificate.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.proveCertificate = proveCertificate;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nasync function proveCertificate(wallet, auth, vargs) {\n const lcargs = {\n partial: {\n type: vargs.type,\n serialNumber: vargs.serialNumber,\n certifier: vargs.certifier,\n subject: vargs.subject,\n revocationOutpoint: vargs.revocationOutpoint,\n signature: vargs.signature\n },\n certifiers: [],\n types: [],\n limit: 2,\n offset: 0,\n privileged: false\n };\n const lcr = await wallet.storage.listCertificates(lcargs);\n if (lcr.certificates.length != 1)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('args', `a unique certificate match`);\n const storageCert = lcr.certificates[0];\n const keyringForVerifier = await sdk_1.MasterCertificate.createKeyringForVerifier(wallet, storageCert.certifier, vargs.verifier, storageCert.fields, vargs.fieldsToReveal, storageCert.keyring, storageCert.serialNumber, vargs.privileged, vargs.privilegedReason);\n return { keyringForVerifier };\n}\n//# sourceMappingURL=proveCertificate.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/proveCertificate.js?\n}"); /***/ }), @@ -3597,7 +3388,18 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.signAction = signAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst createAction_1 = __webpack_require__(/*! ./createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js\");\nconst completeSignedTransaction_1 = __webpack_require__(/*! ./completeSignedTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/completeSignedTransaction.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst validationHelpers_1 = __webpack_require__(/*! ../../sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nasync function signAction(wallet, auth, args) {\n const prior = wallet.pendingSignActions[args.reference];\n if (!prior)\n throw new WERR_errors_1.WERR_NOT_IMPLEMENTED('recovery of out-of-session signAction reference data is not yet implemented.');\n if (!prior.dcr.inputBeef)\n throw new WERR_errors_1.WERR_INTERNAL('prior.dcr.inputBeef must be valid');\n const vargs = mergePriorOptions(prior.args, args);\n prior.tx = await (0, completeSignedTransaction_1.completeSignedTransaction)(prior, vargs.spends, wallet);\n const { sendWithResults, notDelayedResults } = await (0, createAction_1.processAction)(prior, wallet, auth, vargs);\n const txid = prior.tx.id('hex');\n const beef = sdk_1.Beef.fromBinary(prior.dcr.inputBeef);\n beef.mergeTransaction(prior.tx);\n (0, completeSignedTransaction_1.verifyUnlockScripts)(txid, beef);\n const r = {\n txid: prior.tx.id('hex'),\n tx: vargs.options.returnTXIDOnly ? undefined : beef.toBinaryAtomic(txid),\n sendWithResults,\n notDelayedResults\n };\n return r;\n}\nfunction mergePriorOptions(caVargs, saArgs) {\n const saOptions = (saArgs.options || (saArgs.options = {}));\n if (saOptions.acceptDelayedBroadcast === undefined)\n saOptions.acceptDelayedBroadcast = caVargs.options.acceptDelayedBroadcast;\n if (saOptions.returnTXIDOnly === undefined)\n saOptions.returnTXIDOnly = caVargs.options.returnTXIDOnly;\n if (saOptions.noSend === undefined)\n saOptions.noSend = caVargs.options.noSend;\n if (saOptions.sendWith === undefined)\n saOptions.sendWith = caVargs.options.sendWith;\n return (0, validationHelpers_1.validateSignActionArgs)(saArgs);\n}\n//# sourceMappingURL=signAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/signAction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.signAction = signAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst createAction_1 = __webpack_require__(/*! ./createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/createAction.js\");\nconst sdk_2 = __webpack_require__(/*! ../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst completeSignedTransaction_1 = __webpack_require__(/*! ./completeSignedTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/completeSignedTransaction.js\");\nasync function signAction(wallet, auth, args) {\n const prior = wallet.pendingSignActions[args.reference];\n if (!prior)\n throw new index_client_1.sdk.WERR_NOT_IMPLEMENTED('recovery of out-of-session signAction reference data is not yet implemented.');\n if (!prior.dcr.inputBeef)\n throw new index_client_1.sdk.WERR_INTERNAL('prior.dcr.inputBeef must be valid');\n const vargs = mergePriorOptions(prior.args, args);\n prior.tx = await (0, completeSignedTransaction_1.completeSignedTransaction)(prior, vargs.spends, wallet);\n const { sendWithResults, notDelayedResults } = await (0, createAction_1.processAction)(prior, wallet, auth, vargs);\n const txid = prior.tx.id('hex');\n const beef = sdk_1.Beef.fromBinary(prior.dcr.inputBeef);\n beef.mergeTransaction(prior.tx);\n (0, completeSignedTransaction_1.verifyUnlockScripts)(txid, beef);\n const r = {\n txid: prior.tx.id('hex'),\n tx: vargs.options.returnTXIDOnly ? undefined : beef.toBinaryAtomic(txid),\n sendWithResults,\n notDelayedResults\n };\n return r;\n}\nfunction mergePriorOptions(caVargs, saArgs) {\n const saOptions = (saArgs.options || (saArgs.options = {}));\n if (saOptions.acceptDelayedBroadcast === undefined)\n saOptions.acceptDelayedBroadcast = caVargs.options.acceptDelayedBroadcast;\n if (saOptions.returnTXIDOnly === undefined)\n saOptions.returnTXIDOnly = caVargs.options.returnTXIDOnly;\n if (saOptions.noSend === undefined)\n saOptions.noSend = caVargs.options.noSend;\n if (saOptions.sendWith === undefined)\n saOptions.sendWith = caVargs.options.sendWith;\n return (0, sdk_2.validateSignActionArgs)(saArgs);\n}\n//# sourceMappingURL=signAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/signer/methods/signAction.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageIdb.js": +/*!*******************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageIdb.js ***! + \*******************************************************************************/ +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || (function () {\n var ownKeys = function(o) {\n ownKeys = Object.getOwnPropertyNames || function (o) {\n var ar = [];\n for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;\n return ar;\n };\n return ownKeys(o);\n };\n return function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== \"default\") __createBinding(result, mod, k[i]);\n __setModuleDefault(result, mod);\n return result;\n };\n})();\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageIdb = void 0;\nconst idb_1 = __webpack_require__(/*! idb */ \"../node_modules/idb/build/index.js\");\nconst sdk = __importStar(__webpack_require__(/*! ../sdk/index */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\"));\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst StorageProvider_1 = __webpack_require__(/*! ./StorageProvider */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageProvider.js\");\nconst listActionsIdb_1 = __webpack_require__(/*! ./methods/listActionsIdb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listActionsIdb.js\");\nconst listOutputsIdb_1 = __webpack_require__(/*! ./methods/listOutputsIdb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listOutputsIdb.js\");\nconst reviewStatusIdb_1 = __webpack_require__(/*! ./methods/reviewStatusIdb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/reviewStatusIdb.js\");\nconst purgeDataIdb_1 = __webpack_require__(/*! ./methods/purgeDataIdb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/purgeDataIdb.js\");\n/**\n * This class implements the `StorageProvider` interface using IndexedDB,\n * via the promises wrapper package `idb`.\n */\nclass StorageIdb extends StorageProvider_1.StorageProvider {\n constructor(options) {\n super(options);\n this.allStores = [\n 'certificates',\n 'certificate_fields',\n 'commissions',\n 'monitor_events',\n 'outputs',\n 'output_baskets',\n 'output_tags',\n 'output_tags_map',\n 'proven_txs',\n 'proven_tx_reqs',\n 'sync_states',\n 'transactions',\n 'tx_labels',\n 'tx_labels_map',\n 'users'\n ];\n this.dbName = `wallet-toolbox-${this.chain}net`;\n }\n /**\n * This method must be called at least once before any other method accesses the database,\n * and each time the schema may have updated.\n *\n * If the database has already been created in this context, `storageName` and `storageIdentityKey`\n * are ignored.\n *\n * @param storageName\n * @param storageIdentityKey\n * @returns\n */\n async migrate(storageName, storageIdentityKey) {\n const db = await this.verifyDB(storageName, storageIdentityKey);\n return db.version.toString();\n }\n /**\n * Following initial database initialization, this method verfies that db is ready for use.\n *\n * @throws `WERR_INVALID_OPERATION` if the database has not been initialized by a call to `migrate`.\n *\n * @param storageName\n * @param storageIdentityKey\n *\n * @returns\n */\n async verifyDB(storageName, storageIdentityKey) {\n if (this.db)\n return this.db;\n this.db = await this.initDB(storageName, storageIdentityKey);\n this._settings = (await this.db.getAll('settings'))[0];\n this.whenLastAccess = new Date();\n return this.db;\n }\n /**\n * Convert the standard optional `TrxToken` parameter into either a direct knex database instance,\n * or a Knex.Transaction as appropriate.\n */\n toDbTrx(stores, mode, trx) {\n if (trx) {\n const t = trx;\n return t;\n }\n else {\n if (!this.db)\n throw new Error('not initialized');\n const db = this.db;\n const trx = db.transaction(stores || this.allStores, mode || 'readwrite');\n this.whenLastAccess = new Date();\n return trx;\n }\n }\n /**\n * Called by `makeAvailable` to return storage `TableSettings`.\n * Since this is the first async method that must be called by all clients,\n * it is where async initialization occurs.\n *\n * After initialization, cached settings are returned.\n *\n * @param trx\n */\n async readSettings(trx) {\n await this.verifyDB();\n return this._settings;\n }\n async initDB(storageName, storageIdentityKey) {\n const chain = this.chain;\n const maxOutputScript = 1024;\n const db = await (0, idb_1.openDB)(this.dbName, 1, {\n upgrade(db, oldVersion, newVersion, transaction) {\n if (!db.objectStoreNames.contains('proven_txs')) {\n // proven_txs object store\n const provenTxsStore = db.createObjectStore('proven_txs', {\n keyPath: 'provenTxId',\n autoIncrement: true\n });\n provenTxsStore.createIndex('txid', 'txid', { unique: true });\n }\n if (!db.objectStoreNames.contains('proven_tx_reqs')) {\n // proven_tx_reqs object store\n const provenTxReqsStore = db.createObjectStore('proven_tx_reqs', {\n keyPath: 'provenTxReqId',\n autoIncrement: true\n });\n provenTxReqsStore.createIndex('provenTxId', 'provenTxId');\n provenTxReqsStore.createIndex('txid', 'txid', { unique: true });\n provenTxReqsStore.createIndex('status', 'status');\n provenTxReqsStore.createIndex('batch', 'batch');\n }\n if (!db.objectStoreNames.contains('users')) {\n const users = db.createObjectStore('users', {\n keyPath: 'userId',\n autoIncrement: true\n });\n users.createIndex('identityKey', 'identityKey', { unique: true });\n }\n if (!db.objectStoreNames.contains('certificates')) {\n // certificates object store\n const certificatesStore = db.createObjectStore('certificates', {\n keyPath: 'certificateId',\n autoIncrement: true\n });\n certificatesStore.createIndex('userId', 'userId');\n certificatesStore.createIndex('userId_type_certifier_serialNumber', ['userId', 'type', 'certifier', 'serialNumber'], { unique: true });\n }\n if (!db.objectStoreNames.contains('certificate_fields')) {\n // certificate_fields object store\n const certificateFieldsStore = db.createObjectStore('certificate_fields', {\n keyPath: ['certificateId', 'fieldName'] // Composite key\n });\n certificateFieldsStore.createIndex('userId', 'userId');\n certificateFieldsStore.createIndex('certificateId', 'certificateId');\n }\n if (!db.objectStoreNames.contains('output_baskets')) {\n // output_baskets object store\n const outputBasketsStore = db.createObjectStore('output_baskets', {\n keyPath: 'basketId',\n autoIncrement: true\n });\n outputBasketsStore.createIndex('userId', 'userId');\n outputBasketsStore.createIndex('name_userId', ['name', 'userId'], { unique: true });\n }\n if (!db.objectStoreNames.contains('transactions')) {\n // transactions object store\n const transactionsStore = db.createObjectStore('transactions', {\n keyPath: 'transactionId',\n autoIncrement: true\n });\n transactionsStore.createIndex('userId', 'userId');\n transactionsStore.createIndex('status', 'status'),\n transactionsStore.createIndex('status_userId', ['status', 'userId']);\n transactionsStore.createIndex('provenTxId', 'provenTxId');\n transactionsStore.createIndex('reference', 'reference', { unique: true });\n }\n if (!db.objectStoreNames.contains('commissions')) {\n // commissions object store\n const commissionsStore = db.createObjectStore('commissions', {\n keyPath: 'commissionId',\n autoIncrement: true\n });\n commissionsStore.createIndex('userId', 'userId');\n commissionsStore.createIndex('transactionId', 'transactionId', { unique: true });\n }\n if (!db.objectStoreNames.contains('outputs')) {\n // outputs object store\n const outputsStore = db.createObjectStore('outputs', {\n keyPath: 'outputId',\n autoIncrement: true\n });\n outputsStore.createIndex('userId', 'userId');\n outputsStore.createIndex('transactionId', 'transactionId');\n outputsStore.createIndex('basketId', 'basketId');\n outputsStore.createIndex('spentBy', 'spentBy');\n outputsStore.createIndex('transactionId_vout_userId', ['transactionId', 'vout', 'userId'], { unique: true });\n }\n if (!db.objectStoreNames.contains('output_tags')) {\n // output_tags object store\n const outputTagsStore = db.createObjectStore('output_tags', {\n keyPath: 'outputTagId',\n autoIncrement: true\n });\n outputTagsStore.createIndex('userId', 'userId');\n outputTagsStore.createIndex('tag_userId', ['tag', 'userId'], { unique: true });\n }\n if (!db.objectStoreNames.contains('output_tags_map')) {\n // output_tags_map object store\n const outputTagsMapStore = db.createObjectStore('output_tags_map', {\n keyPath: ['outputTagId', 'outputId']\n });\n outputTagsMapStore.createIndex('outputTagId', 'outputTagId');\n outputTagsMapStore.createIndex('outputId', 'outputId');\n }\n if (!db.objectStoreNames.contains('tx_labels')) {\n // tx_labels object store\n const txLabelsStore = db.createObjectStore('tx_labels', {\n keyPath: 'txLabelId',\n autoIncrement: true\n });\n txLabelsStore.createIndex('userId', 'userId');\n txLabelsStore.createIndex('label_userId', ['label', 'userId'], { unique: true });\n }\n if (!db.objectStoreNames.contains('tx_labels_map')) {\n // tx_labels_map object store\n const txLabelsMapStore = db.createObjectStore('tx_labels_map', {\n keyPath: ['txLabelId', 'transactionId']\n });\n txLabelsMapStore.createIndex('txLabelId', 'txLabelId');\n txLabelsMapStore.createIndex('transactionId', 'transactionId');\n }\n if (!db.objectStoreNames.contains('monitor_events')) {\n // monitor_events object store\n const monitorEventsStore = db.createObjectStore('monitor_events', {\n keyPath: 'id',\n autoIncrement: true\n });\n }\n if (!db.objectStoreNames.contains('sync_states')) {\n // sync_states object store\n const syncStatesStore = db.createObjectStore('sync_states', {\n keyPath: 'syncStateId',\n autoIncrement: true\n });\n syncStatesStore.createIndex('userId', 'userId');\n syncStatesStore.createIndex('refNum', 'refNum', { unique: true });\n syncStatesStore.createIndex('status', 'status');\n }\n if (!db.objectStoreNames.contains('settings')) {\n if (!storageName || !storageIdentityKey) {\n throw new sdk.WERR_INVALID_OPERATION('migrate must be called before first access');\n }\n const settings = db.createObjectStore('settings', {\n keyPath: 'storageIdentityKey'\n });\n const s = {\n created_at: new Date(),\n updated_at: new Date(),\n storageIdentityKey,\n storageName,\n chain,\n dbtype: 'IndexedDB',\n maxOutputScript\n };\n settings.put(s);\n }\n }\n });\n return db;\n }\n //\n // StorageProvider abstract methods\n //\n async reviewStatus(args) {\n return await (0, reviewStatusIdb_1.reviewStatusIdb)(this, args);\n }\n async purgeData(params, trx) {\n return await (0, purgeDataIdb_1.purgeDataIdb)(this, params, trx);\n }\n /**\n * Proceeds in three stages:\n * 1. Find an output that exactly funds the transaction (if exactSatoshis is not undefined).\n * 2. Find an output that overfunds by the least amount (targetSatoshis).\n * 3. Find an output that comes as close to funding as possible (targetSatoshis).\n * 4. Return undefined if no output is found.\n *\n * Outputs must belong to userId and basketId and have spendable true.\n * Their corresponding transaction must have status of 'completed', 'unproven', or 'sending' (if excludeSending is false).\n *\n * @param userId\n * @param basketId\n * @param targetSatoshis\n * @param exactSatoshis\n * @param excludeSending\n * @param transactionId\n * @returns next funding output to add to transaction or undefined if there are none.\n */\n async allocateChangeInput(userId, basketId, targetSatoshis, exactSatoshis, excludeSending, transactionId) {\n const dbTrx = this.toDbTrx(['outputs', 'transactions'], 'readwrite');\n try {\n const txStatus = ['completed', 'unproven'];\n if (!excludeSending)\n txStatus.push('sending');\n const args = {\n partial: { userId, basketId, spendable: true },\n txStatus,\n trx: dbTrx\n };\n const outputs = await this.findOutputs(args);\n let output;\n let scores = [];\n for (const o of outputs) {\n if (exactSatoshis && o.satoshis === exactSatoshis) {\n output = o;\n break;\n }\n const score = o.satoshis - targetSatoshis;\n scores.push({ output: o, score });\n }\n if (!output) {\n // sort scores increasing by score property\n scores = scores.sort((a, b) => a.score - b.score);\n // find the first score that is greater than or equal to 0\n const o = scores.find(s => s.score >= 0);\n if (o) {\n // stage 2 satisfied (minimally funded)\n output = o.output;\n }\n else if (scores.length > 0) {\n // stage 3 satisfied (minimally under-funded)\n output = scores.slice(-1)[0].output;\n }\n else {\n // no available funding outputs\n output = undefined;\n }\n }\n if (output) {\n // mark output as spent by transactionId\n await this.updateOutput(output.outputId, { spendable: false, spentBy: transactionId }, dbTrx);\n }\n return output;\n }\n finally {\n await dbTrx.done;\n }\n }\n async getProvenOrRawTx(txid, trx) {\n const r = {\n proven: undefined,\n rawTx: undefined,\n inputBEEF: undefined\n };\n r.proven = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid: txid }, trx }));\n if (!r.proven) {\n const req = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid: txid }, trx }));\n if (req && ['unsent', 'unmined', 'unconfirmed', 'sending', 'nosend', 'completed'].includes(req.status)) {\n r.rawTx = req.rawTx;\n r.inputBEEF = req.inputBEEF;\n }\n }\n return r;\n }\n async getRawTxOfKnownValidTransaction(txid, offset, length, trx) {\n if (!txid)\n return undefined;\n if (!this.isAvailable())\n await this.makeAvailable();\n let rawTx = undefined;\n const r = await this.getProvenOrRawTx(txid, trx);\n if (r.proven)\n rawTx = r.proven.rawTx;\n else\n rawTx = r.rawTx;\n if (rawTx && offset !== undefined && length !== undefined && Number.isInteger(offset) && Number.isInteger(length)) {\n rawTx = rawTx.slice(offset, offset + length);\n }\n return rawTx;\n }\n async getLabelsForTransactionId(transactionId, trx) {\n const maps = await this.findTxLabelMaps({ partial: { transactionId, isDeleted: false }, trx });\n const labelIds = maps.map(m => m.txLabelId);\n const labels = [];\n for (const txLabelId of labelIds) {\n const label = (0, utilityHelpers_1.verifyOne)(await this.findTxLabels({ partial: { txLabelId, isDeleted: false }, trx }));\n labels.push(label);\n }\n return labels;\n }\n async getTagsForOutputId(outputId, trx) {\n const maps = await this.findOutputTagMaps({ partial: { outputId, isDeleted: false }, trx });\n const tagIds = maps.map(m => m.outputTagId);\n const tags = [];\n for (const outputTagId of tagIds) {\n const tag = (0, utilityHelpers_1.verifyOne)(await this.findOutputTags({ partial: { outputTagId, isDeleted: false }, trx }));\n tags.push(tag);\n }\n return tags;\n }\n async listActions(auth, vargs) {\n if (!auth.userId)\n throw new sdk.WERR_UNAUTHORIZED();\n return await (0, listActionsIdb_1.listActionsIdb)(this, auth, vargs);\n }\n async listOutputs(auth, vargs) {\n if (!auth.userId)\n throw new sdk.WERR_UNAUTHORIZED();\n return await (0, listOutputsIdb_1.listOutputsIdb)(this, auth, vargs);\n }\n async countChangeInputs(userId, basketId, excludeSending) {\n const txStatus = ['completed', 'unproven'];\n if (!excludeSending)\n txStatus.push('sending');\n const args = { partial: { userId, basketId }, txStatus };\n let count = 0;\n await this.filterOutputs(args, r => {\n count++;\n });\n return count;\n }\n async findCertificatesAuth(auth, args) {\n if (!auth.userId || (args.partial.userId && args.partial.userId !== auth.userId))\n throw new sdk.WERR_UNAUTHORIZED();\n args.partial.userId = auth.userId;\n return await this.findCertificates(args);\n }\n async findOutputBasketsAuth(auth, args) {\n if (!auth.userId || (args.partial.userId && args.partial.userId !== auth.userId))\n throw new sdk.WERR_UNAUTHORIZED();\n args.partial.userId = auth.userId;\n return await this.findOutputBaskets(args);\n }\n async findOutputsAuth(auth, args) {\n if (!auth.userId || (args.partial.userId && args.partial.userId !== auth.userId))\n throw new sdk.WERR_UNAUTHORIZED();\n args.partial.userId = auth.userId;\n return await this.findOutputs(args);\n }\n async insertCertificateAuth(auth, certificate) {\n if (!auth.userId || (certificate.userId && certificate.userId !== auth.userId))\n throw new sdk.WERR_UNAUTHORIZED();\n certificate.userId = auth.userId;\n return await this.insertCertificate(certificate);\n }\n //\n // StorageReaderWriter abstract methods\n //\n async dropAllData() {\n await (0, idb_1.deleteDB)(this.dbName);\n }\n async filterOutputTagMaps(args, filtered, userId) {\n var _a, _b, _c, _d;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['output_tags_map'], 'readonly', args.trx);\n let cursor;\n if (((_b = args.partial) === null || _b === void 0 ? void 0 : _b.outputTagId) !== undefined) {\n cursor = await dbTrx.objectStore('output_tags_map').index('outputTagId').openCursor(args.partial.outputTagId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.outputId) !== undefined) {\n cursor = await dbTrx.objectStore('output_tags_map').index('outputId').openCursor(args.partial.outputId);\n }\n else {\n cursor = await dbTrx.objectStore('output_tags_map').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.tagIds && !args.tagIds.includes(r.outputTagId))\n continue;\n if (args.partial) {\n if (args.partial.outputTagId && r.outputTagId !== args.partial.outputTagId)\n continue;\n if (args.partial.outputId && r.outputId !== args.partial.outputId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.isDeleted !== undefined && r.isDeleted !== args.partial.isDeleted)\n continue;\n }\n if (userId !== undefined && r.txid) {\n const count = await this.countOutputTags({ partial: { userId, outputTagId: r.outputTagId }, trx: args.trx });\n if (count === 0)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_d = args.paged) === null || _d === void 0 ? void 0 : _d.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findOutputTagMaps(args) {\n const results = [];\n await this.filterOutputTagMaps(args, r => {\n results.push(this.validateEntity(r));\n });\n return results;\n }\n async filterProvenTxReqs(args, filtered, userId) {\n var _a, _b, _c, _d, _e, _f, _g;\n if (args.partial.rawTx)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.rawTx', `undefined. ProvenTxReqs may not be found by rawTx value.`);\n if (args.partial.inputBEEF)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.inputBEEF', `undefined. ProvenTxReqs may not be found by inputBEEF value.`);\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['proven_tx_reqs'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.provenTxReqId) {\n cursor = await dbTrx.objectStore('proven_tx_reqs').openCursor(args.partial.provenTxReqId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.provenTxId) !== undefined) {\n cursor = await dbTrx.objectStore('proven_tx_reqs').index('provenTxId').openCursor(args.partial.provenTxId);\n }\n else if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.txid) !== undefined) {\n cursor = await dbTrx.objectStore('proven_tx_reqs').index('txid').openCursor(args.partial.txid);\n }\n else if (((_e = args.partial) === null || _e === void 0 ? void 0 : _e.status) !== undefined) {\n cursor = await dbTrx.objectStore('proven_tx_reqs').index('status').openCursor(args.partial.status);\n }\n else if (((_f = args.partial) === null || _f === void 0 ? void 0 : _f.batch) !== undefined) {\n cursor = await dbTrx.objectStore('proven_tx_reqs').index('batch').openCursor(args.partial.batch);\n }\n else {\n cursor = await dbTrx.objectStore('proven_tx_reqs').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.provenTxReqId && r.provenTxReqId !== args.partial.provenTxReqId)\n continue;\n if (args.partial.provenTxId && r.provenTxId !== args.partial.provenTxId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.status && r.status !== args.partial.status)\n continue;\n if (args.partial.attempts !== undefined && r.attempts !== args.partial.attempts)\n continue;\n if (args.partial.notified !== undefined && r.notified !== args.partial.notified)\n continue;\n if (args.partial.txid && r.txid !== args.partial.txid)\n continue;\n if (args.partial.batch && r.batch !== args.partial.batch)\n continue;\n if (args.partial.history && r.history !== args.partial.history)\n continue;\n if (args.partial.notify && r.notify !== args.partial.notify)\n continue;\n }\n if (userId !== undefined && r.txid) {\n const count = await this.countTransactions({ partial: { userId, txid: r.txid }, trx: args.trx });\n if (count === 0)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_g = args.paged) === null || _g === void 0 ? void 0 : _g.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findProvenTxReqs(args) {\n const results = [];\n await this.filterProvenTxReqs(args, r => {\n results.push(this.validateEntity(r));\n });\n return results;\n }\n async filterProvenTxs(args, filtered, userId) {\n var _a, _b, _c, _d;\n if (args.partial.rawTx)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.rawTx', `undefined. ProvenTxs may not be found by rawTx value.`);\n if (args.partial.merklePath)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.merklePath', `undefined. ProvenTxs may not be found by merklePath value.`);\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['proven_txs'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.provenTxId) {\n cursor = await dbTrx.objectStore('proven_txs').openCursor(args.partial.provenTxId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.txid) !== undefined) {\n cursor = await dbTrx.objectStore('proven_txs').index('txid').openCursor(args.partial.txid);\n }\n else {\n cursor = await dbTrx.objectStore('proven_txs').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.provenTxId && r.provenTxId !== args.partial.provenTxId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.txid && r.txid !== args.partial.txid)\n continue;\n if (args.partial.height !== undefined && r.height !== args.partial.height)\n continue;\n if (args.partial.index !== undefined && r.index !== args.partial.index)\n continue;\n if (args.partial.blockHash && r.blockHash !== args.partial.blockHash)\n continue;\n if (args.partial.merkleRoot && r.merkleRoot !== args.partial.merkleRoot)\n continue;\n }\n if (userId !== undefined) {\n const count = await this.countTransactions({ partial: { userId, provenTxId: r.provenTxId }, trx: args.trx });\n if (count === 0)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_d = args.paged) === null || _d === void 0 ? void 0 : _d.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findProvenTxs(args) {\n const results = [];\n await this.filterProvenTxs(args, r => {\n results.push(this.validateEntity(r));\n });\n return results;\n }\n async filterTxLabelMaps(args, filtered, userId) {\n var _a, _b, _c, _d;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['tx_labels_map'], 'readonly', args.trx);\n let cursor;\n if (((_b = args.partial) === null || _b === void 0 ? void 0 : _b.transactionId) !== undefined) {\n cursor = await dbTrx.objectStore('tx_labels_map').index('transactionId').openCursor(args.partial.transactionId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.txLabelId) !== undefined) {\n cursor = await dbTrx.objectStore('tx_labels_map').index('txLabelId').openCursor(args.partial.txLabelId);\n }\n else {\n cursor = await dbTrx.objectStore('tx_labels_map').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.txLabelId && r.txLabelId !== args.partial.txLabelId)\n continue;\n if (args.partial.transactionId && r.transactionId !== args.partial.transactionId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.isDeleted !== undefined && r.isDeleted !== args.partial.isDeleted)\n continue;\n }\n if (userId !== undefined) {\n const count = await this.countTxLabels({ partial: { userId, txLabelId: r.txLabelId }, trx: args.trx });\n if (count === 0)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_d = args.paged) === null || _d === void 0 ? void 0 : _d.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findTxLabelMaps(args) {\n const results = [];\n await this.filterTxLabelMaps(args, r => {\n results.push(this.validateEntity(r));\n });\n return results;\n }\n async countOutputTagMaps(args) {\n let count = 0;\n await this.filterOutputTagMaps(args, () => {\n count++;\n });\n return count;\n }\n async countProvenTxReqs(args) {\n let count = 0;\n await this.filterProvenTxReqs(args, () => {\n count++;\n });\n return count;\n }\n async countProvenTxs(args) {\n let count = 0;\n await this.filterProvenTxs(args, () => {\n count++;\n });\n return count;\n }\n async countTxLabelMaps(args) {\n let count = 0;\n await this.filterTxLabelMaps(args, () => {\n count++;\n });\n return count;\n }\n async insertCertificate(certificate, trx) {\n const e = await this.validateEntityForInsert(certificate, trx, undefined, ['isDeleted']);\n const fields = e.fields;\n if (e.fields)\n delete e.fields;\n if (e.certificateId === 0)\n delete e.certificateId;\n const dbTrx = this.toDbTrx(['certificates', 'certificate_fields'], 'readwrite', trx);\n const store = dbTrx.objectStore('certificates');\n try {\n const id = Number(await store.add(e));\n certificate.certificateId = id;\n if (fields) {\n for (const field of fields) {\n field.certificateId = certificate.certificateId;\n field.userId = certificate.userId;\n await this.insertCertificateField(field, dbTrx);\n }\n }\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return certificate.certificateId;\n }\n async insertCertificateField(certificateField, trx) {\n const e = await this.validateEntityForInsert(certificateField, trx);\n const dbTrx = this.toDbTrx(['certificate_fields'], 'readwrite', trx);\n const store = dbTrx.objectStore('certificate_fields');\n try {\n await store.add(e);\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n }\n async insertCommission(commission, trx) {\n const e = await this.validateEntityForInsert(commission, trx);\n if (e.commissionId === 0)\n delete e.commissionId;\n const dbTrx = this.toDbTrx(['commissions'], 'readwrite', trx);\n const store = dbTrx.objectStore('commissions');\n try {\n const id = Number(await store.add(e));\n commission.commissionId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return commission.commissionId;\n }\n async insertMonitorEvent(event, trx) {\n const e = await this.validateEntityForInsert(event, trx);\n if (e.id === 0)\n delete e.id;\n const dbTrx = this.toDbTrx(['monitor_events'], 'readwrite', trx);\n const store = dbTrx.objectStore('monitor_events');\n try {\n const id = Number(await store.add(e));\n event.id = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return event.id;\n }\n async insertOutput(output, trx) {\n const e = await this.validateEntityForInsert(output, trx);\n if (e.outputId === 0)\n delete e.outputId;\n const dbTrx = this.toDbTrx(['outputs'], 'readwrite', trx);\n const store = dbTrx.objectStore('outputs');\n try {\n const id = Number(await store.add(e));\n output.outputId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return output.outputId;\n }\n async insertOutputBasket(basket, trx) {\n const e = await this.validateEntityForInsert(basket, trx, undefined, ['isDeleted']);\n if (e.basketId === 0)\n delete e.basketId;\n const dbTrx = this.toDbTrx(['output_baskets'], 'readwrite', trx);\n const store = dbTrx.objectStore('output_baskets');\n try {\n const id = Number(await store.add(e));\n basket.basketId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return basket.basketId;\n }\n async insertOutputTag(tag, trx) {\n const e = await this.validateEntityForInsert(tag, trx, undefined, ['isDeleted']);\n if (e.outputTagId === 0)\n delete e.outputTagId;\n const dbTrx = this.toDbTrx(['output_tags'], 'readwrite', trx);\n const store = dbTrx.objectStore('output_tags');\n try {\n const id = Number(await store.add(e));\n tag.outputTagId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return tag.outputTagId;\n }\n async insertOutputTagMap(tagMap, trx) {\n const e = await this.validateEntityForInsert(tagMap, trx, undefined, ['isDeleted']);\n const dbTrx = this.toDbTrx(['output_tags_map'], 'readwrite', trx);\n const store = dbTrx.objectStore('output_tags_map');\n try {\n await store.add(e);\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n }\n async insertProvenTx(tx, trx) {\n const e = await this.validateEntityForInsert(tx, trx);\n if (e.provenTxId === 0)\n delete e.provenTxId;\n const dbTrx = this.toDbTrx(['proven_txs'], 'readwrite', trx);\n const store = dbTrx.objectStore('proven_txs');\n try {\n const id = Number(await store.add(e));\n tx.provenTxId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return tx.provenTxId;\n }\n async insertProvenTxReq(tx, trx) {\n const e = await this.validateEntityForInsert(tx, trx);\n if (e.provenTxReqId === 0)\n delete e.provenTxReqId;\n const dbTrx = this.toDbTrx(['proven_tx_reqs'], 'readwrite', trx);\n const store = dbTrx.objectStore('proven_tx_reqs');\n try {\n const id = Number(await store.add(e));\n tx.provenTxReqId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return tx.provenTxReqId;\n }\n async insertSyncState(syncState, trx) {\n const e = await this.validateEntityForInsert(syncState, trx, ['when'], ['init']);\n if (e.syncStateId === 0)\n delete e.syncStateId;\n const dbTrx = this.toDbTrx(['sync_states'], 'readwrite', trx);\n const store = dbTrx.objectStore('sync_states');\n try {\n const id = Number(await store.add(e));\n syncState.syncStateId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return syncState.syncStateId;\n }\n async insertTransaction(tx, trx) {\n const e = await this.validateEntityForInsert(tx, trx);\n if (e.transactionId === 0)\n delete e.transactionId;\n const dbTrx = this.toDbTrx(['transactions'], 'readwrite', trx);\n const store = dbTrx.objectStore('transactions');\n try {\n const id = Number(await store.add(e));\n tx.transactionId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return tx.transactionId;\n }\n async insertTxLabel(label, trx) {\n const e = await this.validateEntityForInsert(label, trx, undefined, ['isDeleted']);\n if (e.txLabelId === 0)\n delete e.txLabelId;\n const dbTrx = this.toDbTrx(['tx_labels'], 'readwrite', trx);\n const store = dbTrx.objectStore('tx_labels');\n try {\n const id = Number(await store.add(e));\n label.txLabelId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return label.txLabelId;\n }\n async insertTxLabelMap(labelMap, trx) {\n const e = await this.validateEntityForInsert(labelMap, trx, undefined, ['isDeleted']);\n const dbTrx = this.toDbTrx(['tx_labels_map'], 'readwrite', trx);\n const store = dbTrx.objectStore('tx_labels_map');\n try {\n await store.add(e);\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n }\n async insertUser(user, trx) {\n const e = await this.validateEntityForInsert(user, trx);\n if (e.userId === 0)\n delete e.userId;\n const dbTrx = this.toDbTrx(['users'], 'readwrite', trx);\n const store = dbTrx.objectStore('users');\n try {\n const id = Number(await store.add(e));\n user.userId = id;\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return user.userId;\n }\n async updateIdb(id, update, keyProp, storeName, trx) {\n if (update[keyProp] !== undefined && (Array.isArray(id) || update[keyProp] !== id)) {\n throw new sdk.WERR_INVALID_PARAMETER(`update.${keyProp}`, `undefined`);\n }\n const u = this.validatePartialForUpdate(update);\n const dbTrx = this.toDbTrx([storeName], 'readwrite', trx);\n const store = dbTrx.objectStore(storeName);\n const ids = Array.isArray(id) ? id : [id];\n try {\n for (const i of ids) {\n const e = await store.get(i);\n if (!e)\n throw new sdk.WERR_INVALID_PARAMETER('id', `an existing record to update ${keyProp} ${i} not found`);\n const v = {\n ...e,\n ...u\n };\n const uid = await store.put(v);\n if (uid !== i)\n throw new sdk.WERR_INTERNAL(`updated id ${uid} does not match original ${id}`);\n }\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return 1;\n }\n async updateIdbKey(key, update, keyProps, storeName, trx) {\n if (key.length !== keyProps.length)\n throw new sdk.WERR_INTERNAL(`key.length ${key.length} !== keyProps.length ${keyProps.length}`);\n for (let i = 0; i < key.length; i++) {\n if (update[keyProps[i]] !== undefined && update[keyProps[i]] !== key[i]) {\n throw new sdk.WERR_INVALID_PARAMETER(`update.${keyProps[i]}`, `undefined`);\n }\n }\n const u = this.validatePartialForUpdate(update);\n const dbTrx = this.toDbTrx([storeName], 'readwrite', trx);\n const store = dbTrx.objectStore(storeName);\n try {\n const e = await store.get(key);\n if (!e)\n throw new sdk.WERR_INVALID_PARAMETER('key', `an existing record to update ${keyProps.join(',')} ${key.join(',')} not found`);\n const v = {\n ...e,\n ...u\n };\n const uid = await store.put(v);\n for (let i = 0; i < key.length; i++) {\n if (uid[i] !== key[i])\n throw new sdk.WERR_INTERNAL(`updated key ${uid[i]} does not match original ${key[i]}`);\n }\n }\n finally {\n if (!trx)\n await dbTrx.done;\n }\n return 1;\n }\n async updateCertificate(id, update, trx) {\n return this.updateIdb(id, update, 'certificateId', 'certificates', trx);\n }\n async updateCertificateField(certificateId, fieldName, update, trx) {\n return this.updateIdbKey([certificateId, fieldName], update, ['certificateId', 'fieldName'], 'certificate_fields', trx);\n }\n async updateCommission(id, update, trx) {\n return this.updateIdb(id, update, 'commissionId', 'commissions', trx);\n }\n async updateMonitorEvent(id, update, trx) {\n return this.updateIdb(id, update, 'id', 'monitor_events', trx);\n }\n async updateOutput(id, update, trx) {\n return this.updateIdb(id, update, 'outputId', 'outputs', trx);\n }\n async updateOutputBasket(id, update, trx) {\n return this.updateIdb(id, update, 'basketId', 'output_baskets', trx);\n }\n async updateOutputTag(id, update, trx) {\n return this.updateIdb(id, update, 'outputTagId', 'output_tags', trx);\n }\n async updateProvenTx(id, update, trx) {\n return this.updateIdb(id, update, 'provenTxId', 'proven_txs', trx);\n }\n async updateProvenTxReq(id, update, trx) {\n return this.updateIdb(id, update, 'provenTxReqId', 'proven_tx_reqs', trx);\n }\n async updateSyncState(id, update, trx) {\n return this.updateIdb(id, update, 'syncStateId', 'sync_states', trx);\n }\n async updateTransaction(id, update, trx) {\n return this.updateIdb(id, update, 'transactionId', 'transactions', trx);\n }\n async updateTxLabel(id, update, trx) {\n return this.updateIdb(id, update, 'txLabelId', 'tx_labels', trx);\n }\n async updateUser(id, update, trx) {\n return this.updateIdb(id, update, 'userId', 'users', trx);\n }\n async updateOutputTagMap(outputId, tagId, update, trx) {\n return this.updateIdbKey([tagId, outputId], update, ['outputTagId', 'outputId'], 'output_tags_map', trx);\n }\n async updateTxLabelMap(transactionId, txLabelId, update, trx) {\n return this.updateIdbKey([txLabelId, transactionId], update, ['txLabelId', 'transactionId'], 'tx_labels_map', trx);\n }\n //\n // StorageReader abstract methods\n //\n async destroy() {\n if (this.db) {\n this.db.close();\n }\n this.db = undefined;\n this._settings = undefined;\n }\n /**\n * @param scope\n * @param trx\n * @returns\n */\n async transaction(scope, trx) {\n if (trx)\n return await scope(trx);\n const stores = this.allStores;\n const db = await this.verifyDB();\n const tx = db.transaction(stores, 'readwrite');\n try {\n const r = await scope(tx);\n await tx.done;\n return r;\n }\n catch (err) {\n tx.abort();\n await tx.done;\n throw err;\n }\n }\n async filterCertificateFields(args, filtered) {\n var _a, _b, _c, _d;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['certificate_fields'], 'readonly', args.trx);\n let cursor;\n if (((_b = args.partial) === null || _b === void 0 ? void 0 : _b.certificateId) !== undefined) {\n cursor = await dbTrx\n .objectStore('certificate_fields')\n .index('certificateId')\n .openCursor(args.partial.certificateId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n cursor = await dbTrx.objectStore('certificate_fields').index('userId').openCursor(args.partial.userId);\n }\n else {\n cursor = await dbTrx.objectStore('certificate_fields').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.certificateId && r.certificateId !== args.partial.certificateId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.fieldName && r.fieldName !== args.partial.fieldName)\n continue;\n if (args.partial.fieldValue && r.fieldValue !== args.partial.fieldValue)\n continue;\n if (args.partial.masterKey && r.masterKey !== args.partial.masterKey)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_d = args.paged) === null || _d === void 0 ? void 0 : _d.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findCertificateFields(args) {\n const result = [];\n await this.filterCertificateFields(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterCertificates(args, filtered) {\n var _a, _b, _c, _d, _e, _f, _g;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['certificates'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.certificateId) {\n cursor = await dbTrx.objectStore('certificates').openCursor(args.partial.certificateId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.type) && ((_e = args.partial) === null || _e === void 0 ? void 0 : _e.certifier) && ((_f = args.partial) === null || _f === void 0 ? void 0 : _f.serialNumber)) {\n cursor = await dbTrx\n .objectStore('certificates')\n .index('userId_type_certifier_serialNumber')\n .openCursor([args.partial.userId, args.partial.type, args.partial.certifier, args.partial.serialNumber]);\n }\n else {\n cursor = await dbTrx.objectStore('certificates').index('userId').openCursor(args.partial.userId);\n }\n }\n else {\n cursor = await dbTrx.objectStore('certificates').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.certifiers && !args.certifiers.includes(r.certifier))\n continue;\n if (args.types && !args.types.includes(r.type))\n continue;\n if (args.partial) {\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.certificateId && r.certificateId !== args.partial.certificateId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.type && r.type !== args.partial.type)\n continue;\n if (args.partial.serialNumber && r.serialNumber !== args.partial.serialNumber)\n continue;\n if (args.partial.certifier && r.certifier !== args.partial.certifier)\n continue;\n if (args.partial.subject && r.subject !== args.partial.subject)\n continue;\n if (args.partial.verifier && r.verifier !== args.partial.verifier)\n continue;\n if (args.partial.revocationOutpoint && r.revocationOutpoint !== args.partial.revocationOutpoint)\n continue;\n if (args.partial.signature && r.signature !== args.partial.signature)\n continue;\n if (args.partial.isDeleted && r.isDeleted !== args.partial.isDeleted)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_g = args.paged) === null || _g === void 0 ? void 0 : _g.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findCertificates(args) {\n const result = [];\n await this.filterCertificates(args, r => {\n result.push(this.validateEntity(r));\n });\n if (args.includeFields) {\n for (const c of result) {\n const fields = await this.findCertificateFields({ partial: { certificateId: c.certificateId }, trx: args.trx });\n c.fields = fields;\n }\n }\n return result;\n }\n async filterCommissions(args, filtered) {\n var _a, _b, _c, _d, _e;\n if (args.partial.lockingScript)\n throw new sdk.WERR_INVALID_PARAMETER('partial.lockingScript', `undefined. Commissions may not be found by lockingScript value.`);\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['commissions'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.commissionId) {\n cursor = await dbTrx.objectStore('commissions').openCursor(args.partial.commissionId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n cursor = await dbTrx.objectStore('commissions').index('userId').openCursor(args.partial.userId);\n }\n else if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.transactionId) !== undefined) {\n cursor = await dbTrx.objectStore('commissions').index('transactionId').openCursor(args.partial.transactionId);\n }\n else {\n cursor = await dbTrx.objectStore('commissions').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.commissionId && r.commissionId !== args.partial.commissionId)\n continue;\n if (args.partial.transactionId && r.transactionId !== args.partial.transactionId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.satoshis !== undefined && r.satoshis !== args.partial.satoshis)\n continue;\n if (args.partial.keyOffset && r.keyOffset !== args.partial.keyOffset)\n continue;\n if (args.partial.isRedeemed !== undefined && r.isRedeemed !== args.partial.isRedeemed)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_e = args.paged) === null || _e === void 0 ? void 0 : _e.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findCommissions(args) {\n const result = [];\n await this.filterCommissions(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterMonitorEvents(args, filtered) {\n var _a, _b, _c;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['monitor_events'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.id) {\n cursor = await dbTrx.objectStore('monitor_events').openCursor(args.partial.id);\n }\n else {\n cursor = await dbTrx.objectStore('monitor_events').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.id && r.id !== args.partial.id)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.event && r.event !== args.partial.event)\n continue;\n if (args.partial.details && r.details !== args.partial.details)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_c = args.paged) === null || _c === void 0 ? void 0 : _c.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findMonitorEvents(args) {\n const result = [];\n await this.filterMonitorEvents(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterOutputBaskets(args, filtered) {\n var _a, _b, _c, _d, _e;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['output_baskets'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.basketId) {\n cursor = await dbTrx.objectStore('output_baskets').openCursor(args.partial.basketId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.name) !== undefined) {\n cursor = await dbTrx\n .objectStore('output_baskets')\n .index('name_userId')\n .openCursor([args.partial.name, args.partial.userId]);\n }\n else {\n cursor = await dbTrx.objectStore('output_baskets').index('userId').openCursor(args.partial.userId);\n }\n }\n else {\n cursor = await dbTrx.objectStore('output_baskets').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.basketId && r.basketId !== args.partial.basketId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.name && r.name !== args.partial.name)\n continue;\n if (args.partial.numberOfDesiredUTXOs !== undefined &&\n r.numberOfDesiredUTXOs !== args.partial.numberOfDesiredUTXOs)\n continue;\n if (args.partial.minimumDesiredUTXOValue !== undefined &&\n r.numberOfDesiredSatoshis !== args.partial.minimumDesiredUTXOValue)\n continue;\n if (args.partial.isDeleted !== undefined && r.isDeleted !== args.partial.isDeleted)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_e = args.paged) === null || _e === void 0 ? void 0 : _e.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findOutputBaskets(args) {\n const result = [];\n await this.filterOutputBaskets(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterOutputs(args, filtered, tagIds, isQueryModeAll) {\n var _a, _b, _c, _d, _e, _f, _g, _h, _j;\n // args.txStatus\n // args.noScript\n if (args.partial.lockingScript)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.lockingScript', `undefined. Outputs may not be found by lockingScript value.`);\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const stores = ['outputs'];\n if (tagIds && tagIds.length > 0) {\n stores.push('output_tags_map');\n }\n if (args.txStatus) {\n stores.push('transactions');\n }\n const dbTrx = this.toDbTrx(stores, 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.outputId) {\n cursor = await dbTrx.objectStore('outputs').openCursor(args.partial.outputId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.transactionId) && ((_e = args.partial) === null || _e === void 0 ? void 0 : _e.vout) !== undefined) {\n cursor = await dbTrx\n .objectStore('outputs')\n .index('transactionId_vout_userId')\n .openCursor([args.partial.transactionId, args.partial.vout, args.partial.userId]);\n }\n else {\n cursor = await dbTrx.objectStore('outputs').index('userId').openCursor(args.partial.userId);\n }\n }\n else if (((_f = args.partial) === null || _f === void 0 ? void 0 : _f.transactionId) !== undefined) {\n cursor = await dbTrx.objectStore('outputs').index('transactionId').openCursor(args.partial.transactionId);\n }\n else if (((_g = args.partial) === null || _g === void 0 ? void 0 : _g.basketId) !== undefined) {\n cursor = await dbTrx.objectStore('outputs').index('basketId').openCursor(args.partial.basketId);\n }\n else if (((_h = args.partial) === null || _h === void 0 ? void 0 : _h.spentBy) !== undefined) {\n cursor = await dbTrx.objectStore('outputs').index('spentBy').openCursor(args.partial.spentBy);\n }\n else {\n cursor = await dbTrx.objectStore('outputs').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.outputId && r.outputId !== args.partial.outputId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.transactionId && r.transactionId !== args.partial.transactionId)\n continue;\n if (args.partial.basketId && r.basketId !== args.partial.basketId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.spendable !== undefined && r.spendable !== args.partial.spendable)\n continue;\n if (args.partial.change !== undefined && r.change !== args.partial.change)\n continue;\n if (args.partial.outputDescription && r.outputDescription !== args.partial.outputDescription)\n continue;\n if (args.partial.vout !== undefined && r.vout !== args.partial.vout)\n continue;\n if (args.partial.satoshis !== undefined && r.satoshis !== args.partial.satoshis)\n continue;\n if (args.partial.providedBy && r.providedBy !== args.partial.providedBy)\n continue;\n if (args.partial.purpose && r.purpose !== args.partial.purpose)\n continue;\n if (args.partial.type && r.type !== args.partial.type)\n continue;\n if (args.partial.txid && r.txid !== args.partial.txid)\n continue;\n if (args.partial.senderIdentityKey && r.senderIdentityKey !== args.partial.senderIdentityKey)\n continue;\n if (args.partial.derivationPrefix && r.derivationPrefix !== args.partial.derivationPrefix)\n continue;\n if (args.partial.derivationSuffix && r.derivationSuffix !== args.partial.derivationSuffix)\n continue;\n if (args.partial.customInstructions && r.customInstructions !== args.partial.customInstructions)\n continue;\n if (args.partial.spentBy && r.spentBy !== args.partial.spentBy)\n continue;\n if (args.partial.sequenceNumber !== undefined && r.sequenceNumber !== args.partial.sequenceNumber)\n continue;\n if (args.partial.scriptLength !== undefined && r.scriptLength !== args.partial.scriptLength)\n continue;\n if (args.partial.scriptOffset !== undefined && r.scriptOffset !== args.partial.scriptOffset)\n continue;\n }\n if (args.txStatus !== undefined) {\n const count = await this.countTransactions({\n partial: { transactionId: r.transactionId },\n status: args.txStatus,\n trx: dbTrx\n });\n if (count === 0)\n continue;\n }\n if (tagIds && tagIds.length > 0) {\n let ids = [...tagIds];\n await this.filterOutputTagMaps({ partial: { outputId: r.outputId }, trx: dbTrx }, tm => {\n if (ids.length > 0) {\n const i = ids.indexOf(tm.outputTagId);\n if (i >= 0) {\n if (isQueryModeAll) {\n ids.splice(i, 1);\n }\n else {\n ids = [];\n }\n }\n }\n });\n if (ids.length > 0)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n if (args.noScript === true) {\n r.script = undefined;\n }\n filtered(r);\n count++;\n if (((_j = args.paged) === null || _j === void 0 ? void 0 : _j.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findOutputs(args, tagIds, isQueryModeAll) {\n const results = [];\n await this.filterOutputs(args, r => {\n results.push(this.validateEntity(r));\n }, tagIds, isQueryModeAll);\n for (const o of results) {\n if (!args.noScript) {\n await this.validateOutputScript(o);\n }\n else {\n o.lockingScript = undefined;\n }\n }\n return results;\n }\n async filterOutputTags(args, filtered) {\n var _a, _b, _c, _d, _e;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['output_tags'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.outputTagId) {\n cursor = await dbTrx.objectStore('output_tags').openCursor(args.partial.outputTagId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.tag) !== undefined) {\n cursor = await dbTrx\n .objectStore('output_tags')\n .index('tag_userId')\n .openCursor([args.partial.tag, args.partial.userId]);\n }\n else {\n cursor = await dbTrx.objectStore('output_tags').index('userId').openCursor(args.partial.userId);\n }\n }\n else {\n cursor = await dbTrx.objectStore('output_tags').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.outputTagId && r.outputTagId !== args.partial.outputTagId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.tag && r.tag !== args.partial.tag)\n continue;\n if (args.partial.isDeleted !== undefined && r.isDeleted !== args.partial.isDeleted)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_e = args.paged) === null || _e === void 0 ? void 0 : _e.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findOutputTags(args) {\n const result = [];\n await this.filterOutputTags(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterSyncStates(args, filtered) {\n var _a, _b, _c, _d, _e, _f, _g;\n if (args.partial.syncMap)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.syncMap', `undefined. SyncStates may not be found by syncMap value.`);\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['sync_states'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.syncStateId) {\n cursor = await dbTrx.objectStore('sync_states').openCursor(args.partial.syncStateId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n cursor = await dbTrx.objectStore('sync_states').index('userId').openCursor(args.partial.userId);\n }\n else if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.refNum) !== undefined) {\n cursor = await dbTrx.objectStore('sync_states').index('refNum').openCursor(args.partial.refNum);\n }\n else if (((_e = args.partial) === null || _e === void 0 ? void 0 : _e.status) !== undefined) {\n cursor = await dbTrx.objectStore('sync_states').index('status').openCursor(args.partial.status);\n }\n else {\n cursor = await dbTrx.objectStore('sync_states').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.syncStateId && r.syncStateId !== args.partial.syncStateId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.storageIdentityKey && r.storageIdentityKey !== args.partial.storageIdentityKey)\n continue;\n if (args.partial.storageName && r.storageName !== args.partial.storageName)\n continue;\n if (args.partial.status && r.status !== args.partial.status)\n continue;\n if (args.partial.init !== undefined && r.init !== args.partial.init)\n continue;\n if (args.partial.refNum !== undefined && r.refNum !== args.partial.refNum)\n continue;\n if (args.partial.when && ((_f = r.when) === null || _f === void 0 ? void 0 : _f.getTime()) !== args.partial.when.getTime())\n continue;\n if (args.partial.satoshis !== undefined && r.satoshis !== args.partial.satoshis)\n continue;\n if (args.partial.errorLocal && r.errorLocale !== args.partial.errorLocal)\n continue;\n if (args.partial.errorOther && r.errorOther !== args.partial.errorOther)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_g = args.paged) === null || _g === void 0 ? void 0 : _g.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findSyncStates(args) {\n const result = [];\n await this.filterSyncStates(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterTransactions(args, filtered, labelIds, isQueryModeAll) {\n var _a, _b, _c, _d, _e, _f, _g, _h;\n if (args.partial.rawTx)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.rawTx', `undefined. Transactions may not be found by rawTx value.`);\n if (args.partial.inputBEEF)\n throw new sdk.WERR_INVALID_PARAMETER('args.partial.inputBEEF', `undefined. Transactions may not be found by inputBEEF value.`);\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const stores = ['transactions'];\n if (labelIds && labelIds.length > 0) {\n stores.push('tx_labels_map');\n }\n const dbTrx = this.toDbTrx(stores, 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.transactionId) {\n cursor = await dbTrx.objectStore('transactions').openCursor(args.partial.transactionId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.status) !== undefined) {\n cursor = await dbTrx\n .objectStore('transactions')\n .index('status_userId')\n .openCursor([args.partial.status, args.partial.userId]);\n }\n else {\n cursor = await dbTrx.objectStore('transactions').index('userId').openCursor(args.partial.userId);\n }\n }\n else if (((_e = args.partial) === null || _e === void 0 ? void 0 : _e.status) !== undefined) {\n cursor = await dbTrx.objectStore('transactions').index('status').openCursor(args.partial.status);\n }\n else if (((_f = args.partial) === null || _f === void 0 ? void 0 : _f.provenTxId) !== undefined) {\n cursor = await dbTrx.objectStore('transactions').index('provenTxId').openCursor(args.partial.provenTxId);\n }\n else if (((_g = args.partial) === null || _g === void 0 ? void 0 : _g.reference) !== undefined) {\n cursor = await dbTrx.objectStore('transactions').index('reference').openCursor(args.partial.reference);\n }\n else {\n cursor = await dbTrx.objectStore('transactions').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.status && !args.status.includes(r.status))\n continue;\n if (args.partial) {\n if (args.partial.transactionId && r.transactionId !== args.partial.transactionId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.provenTxId && r.provenTxId !== args.partial.provenTxId)\n continue;\n if (args.partial.status && r.status !== args.partial.status)\n continue;\n if (args.partial.reference && r.reference !== args.partial.reference)\n continue;\n if (args.partial.isOutgoing !== undefined && r.isOutgoing !== args.partial.isOutgoing)\n continue;\n if (args.partial.satoshis !== undefined && r.satoshis !== args.partial.satoshis)\n continue;\n if (args.partial.description && r.description !== args.partial.description)\n continue;\n if (args.partial.version !== undefined && r.version !== args.partial.version)\n continue;\n if (args.partial.lockTime !== undefined && r.lockTime !== args.partial.lockTime)\n continue;\n if (args.partial.txid && r.txid !== args.partial.txid)\n continue;\n }\n if (labelIds && labelIds.length > 0) {\n let ids = [...labelIds];\n await this.filterTxLabelMaps({ partial: { transactionId: r.transactionId }, trx: dbTrx }, lm => {\n if (ids.length > 0) {\n const i = ids.indexOf(lm.txLabelId);\n if (i >= 0) {\n if (isQueryModeAll) {\n ids.splice(i, 1);\n }\n else {\n ids = [];\n }\n }\n }\n });\n if (ids.length > 0)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_h = args.paged) === null || _h === void 0 ? void 0 : _h.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findTransactions(args, labelIds, isQueryModeAll) {\n const results = [];\n await this.filterTransactions(args, r => {\n results.push(this.validateEntity(r));\n }, labelIds, isQueryModeAll);\n for (const t of results) {\n if (!args.noRawTx) {\n await this.validateRawTransaction(t, args.trx);\n }\n else {\n t.rawTx = undefined;\n t.inputBEEF = undefined;\n }\n }\n return results;\n }\n async filterTxLabels(args, filtered) {\n var _a, _b, _c, _d, _e;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['tx_labels'], 'readonly', args.trx);\n let cursor;\n if ((_b = args.partial) === null || _b === void 0 ? void 0 : _b.txLabelId) {\n cursor = await dbTrx.objectStore('tx_labels').openCursor(args.partial.txLabelId);\n }\n else if (((_c = args.partial) === null || _c === void 0 ? void 0 : _c.userId) !== undefined) {\n if (((_d = args.partial) === null || _d === void 0 ? void 0 : _d.label) !== undefined) {\n cursor = await dbTrx\n .objectStore('tx_labels')\n .index('label_userId')\n .openCursor([args.partial.label, args.partial.userId]);\n }\n else {\n cursor = await dbTrx.objectStore('tx_labels').index('userId').openCursor(args.partial.userId);\n }\n }\n else {\n cursor = await dbTrx.objectStore('tx_labels').openCursor();\n }\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.txLabelId && r.txLabelId !== args.partial.txLabelId)\n continue;\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.label && r.label !== args.partial.label)\n continue;\n if (args.partial.isDeleted !== undefined && r.isDeleted !== args.partial.isDeleted)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_e = args.paged) === null || _e === void 0 ? void 0 : _e.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findTxLabels(args) {\n const result = [];\n await this.filterTxLabels(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async filterUsers(args, filtered) {\n var _a, _b;\n const offset = ((_a = args.paged) === null || _a === void 0 ? void 0 : _a.offset) || 0;\n let skipped = 0;\n let count = 0;\n const dbTrx = this.toDbTrx(['users'], 'readonly', args.trx);\n let cursor = await dbTrx.objectStore('users').openCursor();\n let firstTime = true;\n while (cursor) {\n if (!firstTime)\n cursor = await cursor.continue();\n if (!cursor)\n break;\n firstTime = false;\n const r = cursor.value;\n if (args.since && args.since > r.updated_at)\n continue;\n if (args.partial) {\n if (args.partial.userId && r.userId !== args.partial.userId)\n continue;\n if (args.partial.created_at && r.created_at.getTime() !== args.partial.created_at.getTime())\n continue;\n if (args.partial.updated_at && r.updated_at.getTime() !== args.partial.updated_at.getTime())\n continue;\n if (args.partial.identityKey && r.identityKey !== args.partial.identityKey)\n continue;\n if (args.partial.activeStorage && r.activeStorage !== args.partial.activeStorage)\n continue;\n }\n if (skipped < offset) {\n skipped++;\n continue;\n }\n filtered(r);\n count++;\n if (((_b = args.paged) === null || _b === void 0 ? void 0 : _b.limit) && count >= args.paged.limit)\n break;\n }\n if (!args.trx)\n await dbTrx.done;\n }\n async findUsers(args) {\n const result = [];\n await this.filterUsers(args, r => {\n result.push(this.validateEntity(r));\n });\n return result;\n }\n async countCertificateFields(args) {\n let count = 0;\n await this.filterCertificateFields(args, () => {\n count++;\n });\n return count;\n }\n async countCertificates(args) {\n let count = 0;\n await this.filterCertificates(args, () => {\n count++;\n });\n return count;\n }\n async countCommissions(args) {\n let count = 0;\n await this.filterCommissions(args, () => {\n count++;\n });\n return count;\n }\n async countMonitorEvents(args) {\n let count = 0;\n await this.filterMonitorEvents(args, () => {\n count++;\n });\n return count;\n }\n async countOutputBaskets(args) {\n let count = 0;\n await this.filterOutputBaskets(args, () => {\n count++;\n });\n return count;\n }\n async countOutputs(args, tagIds, isQueryModeAll) {\n let count = 0;\n await this.filterOutputs({ ...args, noScript: true }, () => {\n count++;\n }, tagIds, isQueryModeAll);\n return count;\n }\n async countOutputTags(args) {\n let count = 0;\n await this.filterOutputTags(args, () => {\n count++;\n });\n return count;\n }\n async countSyncStates(args) {\n let count = 0;\n await this.filterSyncStates(args, () => {\n count++;\n });\n return count;\n }\n async countTransactions(args, labelIds, isQueryModeAll) {\n let count = 0;\n await this.filterTransactions({ ...args, noRawTx: true }, () => {\n count++;\n }, labelIds, isQueryModeAll);\n return count;\n }\n async countTxLabels(args) {\n let count = 0;\n await this.filterTxLabels(args, () => {\n count++;\n });\n return count;\n }\n async countUsers(args) {\n let count = 0;\n await this.filterUsers(args, () => {\n count++;\n });\n return count;\n }\n async getProvenTxsForUser(args) {\n const results = [];\n const fargs = {\n partial: {},\n since: args.since,\n paged: args.paged,\n trx: args.trx\n };\n await this.filterProvenTxs(fargs, r => {\n results.push(this.validateEntity(r));\n }, args.userId);\n return results;\n }\n async getProvenTxReqsForUser(args) {\n const results = [];\n const fargs = {\n partial: {},\n since: args.since,\n paged: args.paged,\n trx: args.trx\n };\n await this.filterProvenTxReqs(fargs, r => {\n results.push(this.validateEntity(r));\n }, args.userId);\n return results;\n }\n async getTxLabelMapsForUser(args) {\n const results = [];\n const fargs = {\n partial: {},\n since: args.since,\n paged: args.paged,\n trx: args.trx\n };\n await this.filterTxLabelMaps(fargs, r => {\n results.push(this.validateEntity(r));\n }, args.userId);\n return results;\n }\n async getOutputTagMapsForUser(args) {\n const results = [];\n const fargs = {\n partial: {},\n since: args.since,\n paged: args.paged,\n trx: args.trx\n };\n await this.filterOutputTagMaps(fargs, r => {\n results.push(this.validateEntity(r));\n }, args.userId);\n return results;\n }\n async verifyReadyForDatabaseAccess(trx) {\n if (!this._settings) {\n this._settings = await this.readSettings();\n }\n return this._settings.dbtype;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all individual records with time stamps or number[] retreived from database.\n */\n validateEntity(entity, dateFields, booleanFields) {\n entity.created_at = this.validateDate(entity.created_at);\n entity.updated_at = this.validateDate(entity.updated_at);\n if (dateFields) {\n for (const df of dateFields) {\n if (entity[df])\n entity[df] = this.validateDate(entity[df]);\n }\n }\n if (booleanFields) {\n for (const df of booleanFields) {\n if (entity[df] !== undefined)\n entity[df] = !!entity[df];\n }\n }\n for (const key of Object.keys(entity)) {\n const val = entity[key];\n if (val === null) {\n entity[key] = undefined;\n }\n else if (val instanceof Uint8Array) {\n entity[key] = Array.from(val);\n }\n }\n return entity;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all arrays of records with time stamps retreived from database.\n * @returns input `entities` array with contained values validated.\n */\n validateEntities(entities, dateFields, booleanFields) {\n for (let i = 0; i < entities.length; i++) {\n entities[i] = this.validateEntity(entities[i], dateFields, booleanFields);\n }\n return entities;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process the update template for entities being updated.\n */\n validatePartialForUpdate(update, dateFields, booleanFields) {\n if (!this.dbtype)\n throw new sdk.WERR_INTERNAL('must call verifyReadyForDatabaseAccess first');\n const v = { ...update };\n if (v.created_at)\n v.created_at = this.validateEntityDate(v.created_at);\n if (v.updated_at)\n v.updated_at = this.validateEntityDate(v.updated_at);\n if (!v.created_at)\n delete v.created_at;\n if (!v.updated_at)\n v.updated_at = this.validateEntityDate(new Date());\n if (dateFields) {\n for (const df of dateFields) {\n if (v[df])\n v[df] = this.validateOptionalEntityDate(v[df]);\n }\n }\n if (booleanFields) {\n for (const df of booleanFields) {\n if (update[df] !== undefined)\n update[df] = !!update[df] ? 1 : 0;\n }\n }\n for (const key of Object.keys(v)) {\n const val = v[key];\n if (Array.isArray(val) && (val.length === 0 || Number.isInteger(val[0]))) {\n v[key] = Uint8Array.from(val);\n }\n else if (val === null) {\n v[key] = undefined;\n }\n }\n this.isDirty = true;\n return v;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process new entities being inserted into the database.\n */\n async validateEntityForInsert(entity, trx, dateFields, booleanFields) {\n await this.verifyReadyForDatabaseAccess(trx);\n const v = { ...entity };\n v.created_at = this.validateOptionalEntityDate(v.created_at, true);\n v.updated_at = this.validateOptionalEntityDate(v.updated_at, true);\n if (!v.created_at)\n delete v.created_at;\n if (!v.updated_at)\n delete v.updated_at;\n if (dateFields) {\n for (const df of dateFields) {\n if (v[df])\n v[df] = this.validateOptionalEntityDate(v[df]);\n }\n }\n if (booleanFields) {\n for (const df of booleanFields) {\n if (entity[df] !== undefined)\n entity[df] = !!entity[df] ? 1 : 0;\n }\n }\n for (const key of Object.keys(v)) {\n const val = v[key];\n if (Array.isArray(val) && (val.length === 0 || Number.isInteger(val[0]))) {\n v[key] = Uint8Array.from(val);\n }\n else if (val === null) {\n v[key] = undefined;\n }\n }\n this.isDirty = true;\n return v;\n }\n async validateRawTransaction(t, trx) {\n // if there is no txid or there is a rawTransaction return what we have.\n if (t.rawTx || !t.txid)\n return;\n // rawTransaction is missing, see if we moved it ...\n const rawTx = await this.getRawTxOfKnownValidTransaction(t.txid, undefined, undefined, trx);\n if (!rawTx)\n return;\n t.rawTx = rawTx;\n }\n async adminStats(adminIdentityKey) {\n throw new Error('Method intentionally not implemented for personal storage.');\n }\n}\nexports.StorageIdb = StorageIdb;\n//# sourceMappingURL=StorageIdb.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageIdb.js?\n}"); /***/ }), @@ -3608,7 +3410,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageProvider = void 0;\nexports.validateStorageFeeModel = validateStorageFeeModel;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst getBeefForTransaction_1 = __webpack_require__(/*! ./methods/getBeefForTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/getBeefForTransaction.js\");\nconst processAction_1 = __webpack_require__(/*! ./methods/processAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js\");\nconst attemptToPostReqsToNetwork_1 = __webpack_require__(/*! ./methods/attemptToPostReqsToNetwork */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/attemptToPostReqsToNetwork.js\");\nconst listCertificates_1 = __webpack_require__(/*! ./methods/listCertificates */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listCertificates.js\");\nconst createAction_1 = __webpack_require__(/*! ./methods/createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/createAction.js\");\nconst internalizeAction_1 = __webpack_require__(/*! ./methods/internalizeAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/internalizeAction.js\");\nconst StorageReaderWriter_1 = __webpack_require__(/*! ./StorageReaderWriter */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageReaderWriter.js\");\nconst entities_1 = __webpack_require__(/*! ./schema/entities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nconst validationHelpers_1 = __webpack_require__(/*! ../sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst WalletError_1 = __webpack_require__(/*! ../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nclass StorageProvider extends StorageReaderWriter_1.StorageReaderWriter {\n static defaultOptions() {\n return {\n feeModel: { model: 'sat/kb', value: 1 },\n commissionSatoshis: 0,\n commissionPubKeyHex: undefined\n };\n }\n static createStorageBaseOptions(chain) {\n const options = {\n ...StorageProvider.defaultOptions(),\n chain\n };\n return options;\n }\n constructor(options) {\n super(options);\n this.isDirty = false;\n this.feeModel = options.feeModel;\n this.commissionPubKeyHex = options.commissionPubKeyHex;\n this.commissionSatoshis = options.commissionSatoshis;\n this.maxRecursionDepth = 12;\n }\n isStorageProvider() {\n return true;\n }\n setServices(v) {\n this._services = v;\n }\n getServices() {\n if (!this._services)\n throw new WERR_errors_1.WERR_INVALID_OPERATION('Must setServices first.');\n return this._services;\n }\n async abortAction(auth, args) {\n if (!auth.userId)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('auth.userId', 'valid');\n const userId = auth.userId;\n let reference = args.reference;\n let txid = undefined;\n const r = await this.transaction(async (trx) => {\n let tx = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTransactions({\n partial: { reference, userId },\n noRawTx: true,\n trx\n }));\n if (!tx && args.reference.length === 64) {\n // reference may also be a txid\n txid = reference;\n reference = undefined;\n tx = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTransactions({\n partial: { txid, userId },\n noRawTx: true,\n trx\n }));\n }\n const unAbortableStatus = ['completed', 'failed', 'sending', 'unproven'];\n if (!tx || !tx.isOutgoing || -1 < unAbortableStatus.findIndex(s => s === tx.status))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('reference', 'an inprocess, outgoing action that has not been signed and shared to the network.');\n await this.updateTransactionStatus('failed', tx.transactionId, userId, reference, trx);\n if (tx.txid) {\n const req = await entities_1.EntityProvenTxReq.fromStorageTxid(this, tx.txid, trx);\n if (req) {\n req.addHistoryNote({ what: 'abortAction', reference: args.reference });\n req.status = 'invalid';\n await req.updateStorageDynamicProperties(this, trx);\n }\n }\n const r = {\n aborted: true\n };\n return r;\n });\n return r;\n }\n async internalizeAction(auth, args) {\n return await (0, internalizeAction_1.internalizeAction)(this, auth, args);\n }\n /**\n * Given an array of transaction txids with current ProvenTxReq ready-to-share status,\n * lookup their ProvenTxReqApi req records.\n * For the txids with reqs and status still ready to send construct a single merged beef.\n *\n * @param txids\n * @param knownTxids\n * @param trx\n */\n async getReqsAndBeefToShareWithWorld(txids, knownTxids, trx) {\n const r = {\n beef: new sdk_1.Beef(),\n details: []\n };\n for (const txid of txids) {\n const d = {\n txid,\n status: 'unknown'\n };\n r.details.push(d);\n try {\n d.proven = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid }, trx }));\n if (d.proven)\n d.status = 'alreadySent';\n else {\n const alreadySentStatus = ['unmined', 'callback', 'unconfirmed', 'completed'];\n const readyToSendStatus = ['sending', 'unsent', 'nosend', 'unprocessed'];\n const errorStatus = ['unknown', 'nonfinal', 'invalid', 'doubleSpend'];\n d.req = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid }, trx }));\n if (!d.req) {\n d.status = 'error';\n d.error = `ERR_UNKNOWN_TXID: ${txid} was not found.`;\n }\n else if (errorStatus.indexOf(d.req.status) > -1) {\n d.status = 'error';\n d.error = `ERR_INVALID_PARAMETER: ${txid} is not ready to send.`;\n }\n else if (alreadySentStatus.indexOf(d.req.status) > -1) {\n d.status = 'alreadySent';\n }\n else if (readyToSendStatus.indexOf(d.req.status) > -1) {\n if (!d.req.rawTx || !d.req.inputBEEF) {\n d.status = 'error';\n d.error = `ERR_INTERNAL: ${txid} req is missing rawTx or beef.`;\n }\n else\n d.status = 'readyToSend';\n }\n else {\n d.status = 'error';\n d.error = `ERR_INTERNAL: ${txid} has unexpected req status ${d.req.status}`;\n }\n if (d.status === 'readyToSend') {\n await this.mergeReqToBeefToShareExternally(d.req, r.beef, knownTxids, trx);\n }\n }\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n d.error = `${e.name}: ${e.message}`;\n }\n }\n return r;\n }\n async mergeReqToBeefToShareExternally(req, mergeToBeef, knownTxids, trx) {\n const { rawTx, inputBEEF: beef } = req;\n if (!rawTx || !beef)\n throw new WERR_errors_1.WERR_INTERNAL(`req rawTx and beef must be valid.`);\n mergeToBeef.mergeRawTx((0, utilityHelpers_noBuffer_1.asArray)(rawTx));\n mergeToBeef.mergeBeef((0, utilityHelpers_noBuffer_1.asArray)(beef));\n const tx = sdk_1.Transaction.fromBinary((0, utilityHelpers_noBuffer_1.asArray)(rawTx));\n for (const input of tx.inputs) {\n if (!input.sourceTXID)\n throw new WERR_errors_1.WERR_INTERNAL(`req all transaction inputs must have valid sourceTXID`);\n const txid = input.sourceTXID;\n const btx = mergeToBeef.findTxid(txid);\n if (!btx) {\n if (knownTxids && knownTxids.indexOf(txid) > -1)\n mergeToBeef.mergeTxidOnly(txid);\n else\n await this.getValidBeefForKnownTxid(txid, mergeToBeef, undefined, knownTxids, trx);\n }\n }\n }\n /**\n * Checks if txid is a known valid ProvenTx and returns it if found.\n * Next checks if txid is a current ProvenTxReq and returns that if found.\n * If `newReq` is provided and an existing ProvenTxReq isn't found,\n * use `newReq` to create a new ProvenTxReq.\n *\n * This is safe \"findOrInsert\" operation using retry if unique index constraint\n * is violated by a race condition insert.\n *\n * @param txid\n * @param newReq\n * @param trx\n * @returns\n */\n async getProvenOrReq(txid, newReq, trx) {\n if (newReq && txid !== newReq.txid)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('newReq', `same txid`);\n const r = { proven: undefined, req: undefined };\n r.proven = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid }, trx }));\n if (r.proven)\n return r;\n for (let retry = 0;; retry++) {\n try {\n r.req = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid }, trx }));\n if (!r.req && !newReq)\n break;\n if (!r.req && newReq) {\n await this.insertProvenTxReq(newReq, trx);\n }\n if (r.req && newReq) {\n // Merge history and notify into existing\n const req1 = new entities_1.EntityProvenTxReq(r.req);\n req1.mergeHistory(newReq, undefined, true);\n req1.mergeNotifyTransactionIds(newReq);\n await req1.updateStorageDynamicProperties(this, trx);\n }\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return r;\n }\n async updateTransactionsStatus(transactionIds, status, trx) {\n await this.transaction(async (trx) => {\n for (const id of transactionIds) {\n await this.updateTransactionStatus(status, id, undefined, undefined, trx);\n }\n }, trx);\n }\n /**\n * For all `status` values besides 'failed', just updates the transaction records status property.\n *\n * For 'status' of 'failed', attempts to make outputs previously allocated as inputs to this transaction usable again.\n *\n * @param status\n * @param transactionId\n * @param userId\n * @param reference\n * @param trx\n */\n async updateTransactionStatus(status, transactionId, userId, reference, trx) {\n if (!transactionId && !(userId && reference))\n throw new WERR_errors_1.WERR_MISSING_PARAMETER('either transactionId or userId and reference');\n await this.transaction(async (trx) => {\n const where = {};\n if (transactionId)\n where.transactionId = transactionId;\n if (userId)\n where.userId = userId;\n if (reference)\n where.reference = reference;\n const tx = (0, utilityHelpers_1.verifyOne)(await this.findTransactions({ partial: where, noRawTx: true, trx }));\n //if (tx.status === status)\n // no change required. Assume inputs and outputs spendable and spentBy are valid for status.\n //return\n // Once completed, this method cannot be used to \"uncomplete\" transaction.\n if ((status !== 'completed' && tx.status === 'completed') || tx.provenTxId)\n throw new WERR_errors_1.WERR_INVALID_OPERATION('The status of a \"completed\" transaction cannot be changed.');\n // It is not possible to un-fail a transaction. Information is lost and not recoverable.\n if (status !== 'failed' && tx.status === 'failed')\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`A \"failed\" transaction may not be un-failed by this method.`);\n switch (status) {\n case 'failed':\n {\n // Attempt to make outputs previously allocated as inputs to this transaction usable again.\n // Only clear input's spentBy and reset spendable = true if it references this transaction\n const t = new entities_1.EntityTransaction(tx);\n const inputs = await t.getInputs(this, trx);\n for (const input of inputs) {\n // input is a prior output belonging to userId that reference this transaction either by `spentBy`\n // or by txid and vout.\n await this.updateOutput((0, utilityHelpers_1.verifyId)(input.outputId), { spendable: true, spentBy: undefined }, trx);\n }\n }\n break;\n case 'nosend':\n case 'unsigned':\n case 'unprocessed':\n case 'sending':\n case 'unproven':\n case 'completed':\n break;\n default:\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('status', `not be ${status}`);\n }\n await this.updateTransaction(tx.transactionId, { status }, trx);\n }, trx);\n }\n async createAction(auth, args) {\n if (!auth.userId)\n throw new WERR_errors_1.WERR_UNAUTHORIZED();\n return await (0, createAction_1.createAction)(this, auth, args);\n }\n async processAction(auth, args) {\n if (!auth.userId)\n throw new WERR_errors_1.WERR_UNAUTHORIZED();\n return await (0, processAction_1.processAction)(this, auth, args);\n }\n async attemptToPostReqsToNetwork(reqs, trx) {\n return await (0, attemptToPostReqsToNetwork_1.attemptToPostReqsToNetwork)(this, reqs, trx);\n }\n async listCertificates(auth, args) {\n return await (0, listCertificates_1.listCertificates)(this, auth, args);\n }\n async verifyKnownValidTransaction(txid, trx) {\n const { proven, rawTx } = await this.getProvenOrRawTx(txid, trx);\n return proven != undefined || rawTx != undefined;\n }\n async getValidBeefForKnownTxid(txid, mergeToBeef, trustSelf, knownTxids, trx, requiredLevels) {\n const beef = await this.getValidBeefForTxid(txid, mergeToBeef, trustSelf, knownTxids, trx, requiredLevels);\n if (!beef)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('txid', `known to storage. ${txid} is not known.`);\n return beef;\n }\n async getValidBeefForTxid(txid, mergeToBeef, trustSelf, knownTxids, trx, requiredLevels) {\n const beef = mergeToBeef || new sdk_1.Beef();\n const r = await this.getProvenOrRawTx(txid, trx);\n if (r.proven) {\n if (requiredLevels) {\n r.rawTx = r.proven.rawTx;\n }\n else {\n if (trustSelf === 'known')\n beef.mergeTxidOnly(txid);\n else {\n beef.mergeRawTx(r.proven.rawTx);\n const mp = new entities_1.EntityProvenTx(r.proven).getMerklePath();\n beef.mergeBump(mp);\n return beef;\n }\n }\n }\n if (!r.rawTx)\n return undefined;\n if (trustSelf === 'known') {\n beef.mergeTxidOnly(txid);\n }\n else {\n beef.mergeRawTx(r.rawTx);\n if (r.inputBEEF)\n beef.mergeBeef(r.inputBEEF);\n const tx = sdk_1.Transaction.fromBinary(r.rawTx);\n if (requiredLevels)\n requiredLevels--;\n for (const input of tx.inputs) {\n const btx = beef.findTxid(input.sourceTXID);\n if (!btx) {\n if (!requiredLevels && knownTxids && knownTxids.indexOf(input.sourceTXID) > -1)\n beef.mergeTxidOnly(input.sourceTXID);\n else\n await this.getValidBeefForKnownTxid(input.sourceTXID, beef, trustSelf, knownTxids, trx, requiredLevels);\n }\n }\n }\n return beef;\n }\n async getBeefForTransaction(txid, options) {\n const beef = await (0, getBeefForTransaction_1.getBeefForTransaction)(this, txid, options);\n return beef;\n }\n async findMonitorEventById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findMonitorEvents({ partial: { id }, trx }));\n }\n async relinquishCertificate(auth, args) {\n const vargs = (0, validationHelpers_1.validateRelinquishCertificateArgs)(args);\n const cert = (0, utilityHelpers_1.verifyOne)(await this.findCertificates({\n partial: {\n certifier: vargs.certifier,\n serialNumber: vargs.serialNumber,\n type: vargs.type\n }\n }));\n return await this.updateCertificate(cert.certificateId, {\n isDeleted: true\n });\n }\n async relinquishOutput(auth, args) {\n const vargs = (0, validationHelpers_1.validateRelinquishOutputArgs)(args);\n const { txid, vout } = (0, validationHelpers_1.parseWalletOutpoint)(vargs.output);\n const output = (0, utilityHelpers_1.verifyOne)(await this.findOutputs({ partial: { txid, vout } }));\n return await this.updateOutput(output.outputId, { basketId: undefined });\n }\n async processSyncChunk(args, chunk) {\n const user = (0, utilityHelpers_1.verifyTruthy)(await this.findUserByIdentityKey(args.identityKey));\n const ss = new entities_1.EntitySyncState((0, utilityHelpers_1.verifyOne)(await this.findSyncStates({\n partial: {\n storageIdentityKey: args.fromStorageIdentityKey,\n userId: user.userId\n }\n })));\n const r = await ss.processSyncChunk(this, args, chunk);\n return r;\n }\n /**\n * Handles storage changes when a valid MerklePath and mined block header are found for a ProvenTxReq txid.\n *\n * Performs the following storage updates (typically):\n * 1. Lookup the exising `ProvenTxReq` record for its rawTx\n * 2. Insert a new ProvenTx record using properties from `args` and rawTx, yielding a new provenTxId\n * 3. Update ProvenTxReq record with status 'completed' and new provenTxId value (and history of status changed)\n * 4. Unpack notify transactionIds from req and update each transaction's status to 'completed', provenTxId value.\n * 5. Update ProvenTxReq history again to record that transactions have been notified.\n * 6. Return results...\n *\n * Alterations of \"typically\" to handle:\n */\n async updateProvenTxReqWithNewProvenTx(args) {\n const req = await entities_1.EntityProvenTxReq.fromStorageId(this, args.provenTxReqId);\n let proven;\n if (req.provenTxId) {\n // Someone beat us to it, grab what we need for results...\n proven = new entities_1.EntityProvenTx((0, utilityHelpers_1.verifyOne)(await this.findProvenTxs({ partial: { txid: args.txid } })));\n }\n else {\n let isNew;\n ({ proven, isNew } = await this.transaction(async (trx) => {\n const { proven: api, isNew } = await this.findOrInsertProvenTx({\n created_at: new Date(),\n updated_at: new Date(),\n provenTxId: 0,\n txid: args.txid,\n height: args.height,\n index: args.index,\n merklePath: args.merklePath,\n rawTx: req.rawTx,\n blockHash: args.blockHash,\n merkleRoot: args.merkleRoot\n }, trx);\n proven = new entities_1.EntityProvenTx(api);\n if (isNew) {\n req.status = 'completed';\n req.provenTxId = proven.provenTxId;\n await req.updateStorageDynamicProperties(this, trx);\n // upate the transaction notifications outside of storage transaction....\n }\n return { proven, isNew };\n }));\n if (isNew) {\n const ids = req.notify.transactionIds || [];\n if (ids.length > 0) {\n for (const id of ids) {\n try {\n await this.updateTransaction(id, {\n provenTxId: proven.provenTxId,\n status: 'completed'\n });\n req.addHistoryNote({ what: 'notifyTxOfProof', transactionId: id });\n }\n catch (eu) {\n const { code, description } = WalletError_1.WalletError.fromUnknown(eu);\n const { provenTxId } = proven;\n req.addHistoryNote({ what: 'notifyTxOfProofError', id, provenTxId, code, description });\n }\n }\n await req.updateStorageDynamicProperties(this);\n }\n }\n }\n const r = {\n status: req.status,\n history: req.apiHistory,\n provenTxId: proven.provenTxId\n };\n return r;\n }\n /**\n * For each spendable output in the 'default' basket of the authenticated user,\n * verify that the output script, satoshis, vout and txid match that of an output\n * still in the mempool of at least one service provider.\n *\n * @returns object with invalidSpendableOutputs array. A good result is an empty array.\n */\n async confirmSpendableOutputs() {\n const invalidSpendableOutputs = [];\n const users = await this.findUsers({ partial: {} });\n for (const { userId } of users) {\n const defaultBasket = (0, utilityHelpers_1.verifyOne)(await this.findOutputBaskets({ partial: { userId, name: 'default' } }));\n const where = {\n userId,\n basketId: defaultBasket.basketId,\n spendable: true\n };\n const outputs = await this.findOutputs({ partial: where });\n const services = this.getServices();\n for (let i = outputs.length - 1; i >= 0; i--) {\n const o = outputs[i];\n const oid = (0, utilityHelpers_1.verifyId)(o.outputId);\n if (o.spendable) {\n let ok = false;\n if (o.lockingScript && o.lockingScript.length > 0) {\n const hash = services.hashOutputScript((0, utilityHelpers_noBuffer_1.asString)(o.lockingScript));\n const r = await services.getUtxoStatus(hash, undefined, `${o.txid}.${o.vout}`);\n if (r.isUtxo === true)\n ok = true;\n }\n if (!ok)\n invalidSpendableOutputs.push(o);\n }\n }\n }\n return { invalidSpendableOutputs };\n }\n async updateProvenTxReqDynamics(id, update, trx) {\n const partial = {};\n if (update['updated_at'])\n partial['updated_at'] = update['updated_at'];\n if (update['provenTxId'])\n partial['provenTxId'] = update['provenTxId'];\n if (update['status'])\n partial['status'] = update['status'];\n if (Number.isInteger(update['attempts']))\n partial['attempts'] = update['attempts'];\n if (update['notified'] !== undefined)\n partial['notified'] = update['notified'];\n if (update['batch'])\n partial['batch'] = update['batch'];\n if (update['history'])\n partial['history'] = update['history'];\n if (update['notify'])\n partial['notify'] = update['notify'];\n return await this.updateProvenTxReq(id, partial, trx);\n }\n async extendOutput(o, includeBasket = false, includeTags = false, trx) {\n const ox = o;\n if (includeBasket && ox.basketId)\n ox.basket = await this.findOutputBasketById(o.basketId, trx);\n if (includeTags) {\n ox.tags = await this.getTagsForOutputId(o.outputId);\n }\n return o;\n }\n async validateOutputScript(o, trx) {\n // without offset and length values return what we have (make no changes)\n if (!o.scriptLength || !o.scriptOffset || !o.txid)\n return;\n // if there is an outputScript and its length is the expected length return what we have.\n if (o.lockingScript && o.lockingScript.length === o.scriptLength)\n return;\n // outputScript is missing or has incorrect length...\n const script = await this.getRawTxOfKnownValidTransaction(o.txid, o.scriptOffset, o.scriptLength, trx);\n if (!script)\n return;\n o.lockingScript = script;\n }\n}\nexports.StorageProvider = StorageProvider;\nfunction validateStorageFeeModel(v) {\n const r = {\n model: 'sat/kb',\n value: 1\n };\n if (typeof v === 'object') {\n if (v.model !== 'sat/kb')\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('StorageFeeModel.model', `\"sat/kb\"`);\n if (typeof v.value === 'number') {\n r.value = v.value;\n }\n }\n return r;\n}\n//# sourceMappingURL=StorageProvider.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageProvider.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageProvider = void 0;\nexports.validateStorageFeeModel = validateStorageFeeModel;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst getBeefForTransaction_1 = __webpack_require__(/*! ./methods/getBeefForTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/getBeefForTransaction.js\");\nconst processAction_1 = __webpack_require__(/*! ./methods/processAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js\");\nconst attemptToPostReqsToNetwork_1 = __webpack_require__(/*! ./methods/attemptToPostReqsToNetwork */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/attemptToPostReqsToNetwork.js\");\nconst listCertificates_1 = __webpack_require__(/*! ./methods/listCertificates */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listCertificates.js\");\nconst createAction_1 = __webpack_require__(/*! ./methods/createAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/createAction.js\");\nconst internalizeAction_1 = __webpack_require__(/*! ./methods/internalizeAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/internalizeAction.js\");\nconst StorageReaderWriter_1 = __webpack_require__(/*! ./StorageReaderWriter */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageReaderWriter.js\");\nconst entities_1 = __webpack_require__(/*! ./schema/entities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass StorageProvider extends StorageReaderWriter_1.StorageReaderWriter {\n static defaultOptions() {\n return {\n feeModel: { model: 'sat/kb', value: 1 },\n commissionSatoshis: 0,\n commissionPubKeyHex: undefined\n };\n }\n static createStorageBaseOptions(chain) {\n const options = {\n ...StorageProvider.defaultOptions(),\n chain\n };\n return options;\n }\n constructor(options) {\n super(options);\n this.isDirty = false;\n this.feeModel = options.feeModel;\n this.commissionPubKeyHex = options.commissionPubKeyHex;\n this.commissionSatoshis = options.commissionSatoshis;\n }\n isStorageProvider() {\n return true;\n }\n setServices(v) {\n this._services = v;\n }\n getServices() {\n if (!this._services)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('Must setServices first.');\n return this._services;\n }\n async abortAction(auth, args) {\n if (!auth.userId)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('auth.userId', 'valid');\n const userId = auth.userId;\n let reference = args.reference;\n let txid = undefined;\n const r = await this.transaction(async (trx) => {\n let tx = (0, index_client_1.verifyOneOrNone)(await this.findTransactions({\n partial: { reference, userId },\n noRawTx: true,\n trx\n }));\n if (!tx && args.reference.length === 64) {\n // reference may also be a txid\n txid = reference;\n reference = undefined;\n tx = (0, index_client_1.verifyOneOrNone)(await this.findTransactions({\n partial: { txid, userId },\n noRawTx: true,\n trx\n }));\n }\n const unAbortableStatus = ['completed', 'failed', 'sending', 'unproven'];\n if (!tx || !tx.isOutgoing || -1 < unAbortableStatus.findIndex(s => s === tx.status))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('reference', 'an inprocess, outgoing action that has not been signed and shared to the network.');\n await this.updateTransactionStatus('failed', tx.transactionId, userId, reference, trx);\n if (tx.txid) {\n const req = await entities_1.EntityProvenTxReq.fromStorageTxid(this, tx.txid, trx);\n if (req) {\n req.addHistoryNote({ what: 'abortAction', reference: args.reference });\n req.status = 'invalid';\n await req.updateStorageDynamicProperties(this, trx);\n }\n }\n const r = {\n aborted: true\n };\n return r;\n });\n return r;\n }\n async internalizeAction(auth, args) {\n return await (0, internalizeAction_1.internalizeAction)(this, auth, args);\n }\n /**\n * Given an array of transaction txids with current ProvenTxReq ready-to-share status,\n * lookup their ProvenTxReqApi req records.\n * For the txids with reqs and status still ready to send construct a single merged beef.\n *\n * @param txids\n * @param knownTxids\n * @param trx\n */\n async getReqsAndBeefToShareWithWorld(txids, knownTxids, trx) {\n const r = {\n beef: new sdk_1.Beef(),\n details: []\n };\n for (const txid of txids) {\n const d = {\n txid,\n status: 'unknown'\n };\n r.details.push(d);\n try {\n d.proven = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid }, trx }));\n if (d.proven)\n d.status = 'alreadySent';\n else {\n const alreadySentStatus = ['unmined', 'callback', 'unconfirmed', 'completed'];\n const readyToSendStatus = ['sending', 'unsent', 'nosend', 'unprocessed'];\n const errorStatus = ['unknown', 'nonfinal', 'invalid', 'doubleSpend'];\n d.req = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid }, trx }));\n if (!d.req) {\n d.status = 'error';\n d.error = `ERR_UNKNOWN_TXID: ${txid} was not found.`;\n }\n else if (errorStatus.indexOf(d.req.status) > -1) {\n d.status = 'error';\n d.error = `ERR_INVALID_PARAMETER: ${txid} is not ready to send.`;\n }\n else if (alreadySentStatus.indexOf(d.req.status) > -1) {\n d.status = 'alreadySent';\n }\n else if (readyToSendStatus.indexOf(d.req.status) > -1) {\n if (!d.req.rawTx || !d.req.inputBEEF) {\n d.status = 'error';\n d.error = `ERR_INTERNAL: ${txid} req is missing rawTx or beef.`;\n }\n else\n d.status = 'readyToSend';\n }\n else {\n d.status = 'error';\n d.error = `ERR_INTERNAL: ${txid} has unexpected req status ${d.req.status}`;\n }\n if (d.status === 'readyToSend') {\n await this.mergeReqToBeefToShareExternally(d.req, r.beef, knownTxids, trx);\n }\n }\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n d.error = `${e.name}: ${e.message}`;\n }\n }\n return r;\n }\n async mergeReqToBeefToShareExternally(req, mergeToBeef, knownTxids, trx) {\n const { rawTx, inputBEEF: beef } = req;\n if (!rawTx || !beef)\n throw new index_client_1.sdk.WERR_INTERNAL(`req rawTx and beef must be valid.`);\n mergeToBeef.mergeRawTx((0, index_client_1.asArray)(rawTx));\n mergeToBeef.mergeBeef((0, index_client_1.asArray)(beef));\n const tx = sdk_1.Transaction.fromBinary((0, index_client_1.asArray)(rawTx));\n for (const input of tx.inputs) {\n if (!input.sourceTXID)\n throw new index_client_1.sdk.WERR_INTERNAL(`req all transaction inputs must have valid sourceTXID`);\n const txid = input.sourceTXID;\n const btx = mergeToBeef.findTxid(txid);\n if (!btx) {\n if (knownTxids && knownTxids.indexOf(txid) > -1)\n mergeToBeef.mergeTxidOnly(txid);\n else\n await this.getValidBeefForKnownTxid(txid, mergeToBeef, undefined, knownTxids, trx);\n }\n }\n }\n /**\n * Checks if txid is a known valid ProvenTx and returns it if found.\n * Next checks if txid is a current ProvenTxReq and returns that if found.\n * If `newReq` is provided and an existing ProvenTxReq isn't found,\n * use `newReq` to create a new ProvenTxReq.\n *\n * This is safe \"findOrInsert\" operation using retry if unique index constraint\n * is violated by a race condition insert.\n *\n * @param txid\n * @param newReq\n * @param trx\n * @returns\n */\n async getProvenOrReq(txid, newReq, trx) {\n if (newReq && txid !== newReq.txid)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('newReq', `same txid`);\n const r = { proven: undefined, req: undefined };\n r.proven = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid }, trx }));\n if (r.proven)\n return r;\n for (let retry = 0;; retry++) {\n try {\n r.req = (0, index_client_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid }, trx }));\n if (!r.req && !newReq)\n break;\n if (!r.req && newReq) {\n await this.insertProvenTxReq(newReq, trx);\n }\n if (r.req && newReq) {\n // Merge history and notify into existing\n const req1 = new entities_1.EntityProvenTxReq(r.req);\n req1.mergeHistory(newReq, undefined, true);\n req1.mergeNotifyTransactionIds(newReq);\n await req1.updateStorageDynamicProperties(this, trx);\n }\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return r;\n }\n async updateTransactionsStatus(transactionIds, status, trx) {\n await this.transaction(async (trx) => {\n for (const id of transactionIds) {\n await this.updateTransactionStatus(status, id, undefined, undefined, trx);\n }\n }, trx);\n }\n /**\n * For all `status` values besides 'failed', just updates the transaction records status property.\n *\n * For 'status' of 'failed', attempts to make outputs previously allocated as inputs to this transaction usable again.\n *\n * @param status\n * @param transactionId\n * @param userId\n * @param reference\n * @param trx\n */\n async updateTransactionStatus(status, transactionId, userId, reference, trx) {\n if (!transactionId && !(userId && reference))\n throw new index_client_1.sdk.WERR_MISSING_PARAMETER('either transactionId or userId and reference');\n await this.transaction(async (trx) => {\n const where = {};\n if (transactionId)\n where.transactionId = transactionId;\n if (userId)\n where.userId = userId;\n if (reference)\n where.reference = reference;\n const tx = (0, index_client_1.verifyOne)(await this.findTransactions({ partial: where, noRawTx: true, trx }));\n //if (tx.status === status)\n // no change required. Assume inputs and outputs spendable and spentBy are valid for status.\n //return\n // Once completed, this method cannot be used to \"uncomplete\" transaction.\n if ((status !== 'completed' && tx.status === 'completed') || tx.provenTxId)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('The status of a \"completed\" transaction cannot be changed.');\n // It is not possible to un-fail a transaction. Information is lost and not recoverable.\n if (status !== 'failed' && tx.status === 'failed')\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`A \"failed\" transaction may not be un-failed by this method.`);\n switch (status) {\n case 'failed':\n {\n // Attempt to make outputs previously allocated as inputs to this transaction usable again.\n // Only clear input's spentBy and reset spendable = true if it references this transaction\n const t = new entities_1.EntityTransaction(tx);\n const inputs = await t.getInputs(this, trx);\n for (const input of inputs) {\n // input is a prior output belonging to userId that reference this transaction either by `spentBy`\n // or by txid and vout.\n await this.updateOutput((0, index_client_1.verifyId)(input.outputId), { spendable: true, spentBy: undefined }, trx);\n }\n }\n break;\n case 'nosend':\n case 'unsigned':\n case 'unprocessed':\n case 'sending':\n case 'unproven':\n case 'completed':\n break;\n default:\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('status', `not be ${status}`);\n }\n await this.updateTransaction(tx.transactionId, { status }, trx);\n }, trx);\n }\n async createAction(auth, args) {\n if (!auth.userId)\n throw new index_client_1.sdk.WERR_UNAUTHORIZED();\n return await (0, createAction_1.createAction)(this, auth, args);\n }\n async processAction(auth, args) {\n if (!auth.userId)\n throw new index_client_1.sdk.WERR_UNAUTHORIZED();\n return await (0, processAction_1.processAction)(this, auth, args);\n }\n async attemptToPostReqsToNetwork(reqs, trx) {\n return await (0, attemptToPostReqsToNetwork_1.attemptToPostReqsToNetwork)(this, reqs, trx);\n }\n async listCertificates(auth, args) {\n return await (0, listCertificates_1.listCertificates)(this, auth, args);\n }\n async verifyKnownValidTransaction(txid, trx) {\n const { proven, rawTx } = await this.getProvenOrRawTx(txid, trx);\n return proven != undefined || rawTx != undefined;\n }\n async getValidBeefForKnownTxid(txid, mergeToBeef, trustSelf, knownTxids, trx, requiredLevels) {\n const beef = await this.getValidBeefForTxid(txid, mergeToBeef, trustSelf, knownTxids, trx, requiredLevels);\n if (!beef)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('txid', `known to storage. ${txid} is not known.`);\n return beef;\n }\n async getValidBeefForTxid(txid, mergeToBeef, trustSelf, knownTxids, trx, requiredLevels) {\n const beef = mergeToBeef || new sdk_1.Beef();\n const r = await this.getProvenOrRawTx(txid, trx);\n if (r.proven) {\n if (requiredLevels) {\n r.rawTx = r.proven.rawTx;\n }\n else {\n if (trustSelf === 'known')\n beef.mergeTxidOnly(txid);\n else {\n beef.mergeRawTx(r.proven.rawTx);\n const mp = new entities_1.EntityProvenTx(r.proven).getMerklePath();\n beef.mergeBump(mp);\n return beef;\n }\n }\n }\n if (!r.rawTx)\n return undefined;\n if (trustSelf === 'known') {\n beef.mergeTxidOnly(txid);\n }\n else {\n beef.mergeRawTx(r.rawTx);\n if (r.inputBEEF)\n beef.mergeBeef(r.inputBEEF);\n const tx = sdk_1.Transaction.fromBinary(r.rawTx);\n if (requiredLevels)\n requiredLevels--;\n for (const input of tx.inputs) {\n const btx = beef.findTxid(input.sourceTXID);\n if (!btx) {\n if (!requiredLevels && knownTxids && knownTxids.indexOf(input.sourceTXID) > -1)\n beef.mergeTxidOnly(input.sourceTXID);\n else\n await this.getValidBeefForKnownTxid(input.sourceTXID, beef, trustSelf, knownTxids, trx, requiredLevels);\n }\n }\n }\n return beef;\n }\n async getBeefForTransaction(txid, options) {\n const beef = await (0, getBeefForTransaction_1.getBeefForTransaction)(this, txid, options);\n return beef;\n }\n async findMonitorEventById(id, trx) {\n return (0, index_client_1.verifyOneOrNone)(await this.findMonitorEvents({ partial: { id }, trx }));\n }\n async relinquishCertificate(auth, args) {\n const vargs = index_client_1.sdk.validateRelinquishCertificateArgs(args);\n const cert = (0, index_client_1.verifyOne)(await this.findCertificates({\n partial: {\n certifier: vargs.certifier,\n serialNumber: vargs.serialNumber,\n type: vargs.type\n }\n }));\n return await this.updateCertificate(cert.certificateId, {\n isDeleted: true\n });\n }\n async relinquishOutput(auth, args) {\n const vargs = index_client_1.sdk.validateRelinquishOutputArgs(args);\n const { txid, vout } = index_client_1.sdk.parseWalletOutpoint(vargs.output);\n const output = (0, index_client_1.verifyOne)(await this.findOutputs({ partial: { txid, vout } }));\n return await this.updateOutput(output.outputId, { basketId: undefined });\n }\n async processSyncChunk(args, chunk) {\n const user = (0, index_client_1.verifyTruthy)(await this.findUserByIdentityKey(args.identityKey));\n const ss = new entities_1.EntitySyncState((0, index_client_1.verifyOne)(await this.findSyncStates({\n partial: {\n storageIdentityKey: args.fromStorageIdentityKey,\n userId: user.userId\n }\n })));\n const r = await ss.processSyncChunk(this, args, chunk);\n return r;\n }\n /**\n * Handles storage changes when a valid MerklePath and mined block header are found for a ProvenTxReq txid.\n *\n * Performs the following storage updates (typically):\n * 1. Lookup the exising `ProvenTxReq` record for its rawTx\n * 2. Insert a new ProvenTx record using properties from `args` and rawTx, yielding a new provenTxId\n * 3. Update ProvenTxReq record with status 'completed' and new provenTxId value (and history of status changed)\n * 4. Unpack notify transactionIds from req and update each transaction's status to 'completed', provenTxId value.\n * 5. Update ProvenTxReq history again to record that transactions have been notified.\n * 6. Return results...\n *\n * Alterations of \"typically\" to handle:\n */\n async updateProvenTxReqWithNewProvenTx(args) {\n const req = await entities_1.EntityProvenTxReq.fromStorageId(this, args.provenTxReqId);\n let proven;\n if (req.provenTxId) {\n // Someone beat us to it, grab what we need for results...\n proven = new entities_1.EntityProvenTx((0, index_client_1.verifyOne)(await this.findProvenTxs({ partial: { txid: args.txid } })));\n }\n else {\n let isNew;\n ({ proven, isNew } = await this.transaction(async (trx) => {\n const { proven: api, isNew } = await this.findOrInsertProvenTx({\n created_at: new Date(),\n updated_at: new Date(),\n provenTxId: 0,\n txid: args.txid,\n height: args.height,\n index: args.index,\n merklePath: args.merklePath,\n rawTx: req.rawTx,\n blockHash: args.blockHash,\n merkleRoot: args.merkleRoot\n }, trx);\n proven = new entities_1.EntityProvenTx(api);\n if (isNew) {\n req.status = 'completed';\n req.provenTxId = proven.provenTxId;\n await req.updateStorageDynamicProperties(this, trx);\n // upate the transaction notifications outside of storage transaction....\n }\n return { proven, isNew };\n }));\n if (isNew) {\n const ids = req.notify.transactionIds || [];\n if (ids.length > 0) {\n for (const id of ids) {\n try {\n await this.updateTransaction(id, {\n provenTxId: proven.provenTxId,\n status: 'completed'\n });\n req.addHistoryNote({ what: 'notifyTxOfProof', transactionId: id });\n }\n catch (eu) {\n const { code, description } = index_client_1.sdk.WalletError.fromUnknown(eu);\n const { provenTxId } = proven;\n req.addHistoryNote({ what: 'notifyTxOfProofError', id, provenTxId, code, description });\n }\n }\n await req.updateStorageDynamicProperties(this);\n }\n }\n }\n const r = {\n status: req.status,\n history: req.apiHistory,\n provenTxId: proven.provenTxId\n };\n return r;\n }\n /**\n * For each spendable output in the 'default' basket of the authenticated user,\n * verify that the output script, satoshis, vout and txid match that of an output\n * still in the mempool of at least one service provider.\n *\n * @returns object with invalidSpendableOutputs array. A good result is an empty array.\n */\n async confirmSpendableOutputs() {\n const invalidSpendableOutputs = [];\n const users = await this.findUsers({ partial: {} });\n for (const { userId } of users) {\n const defaultBasket = (0, index_client_1.verifyOne)(await this.findOutputBaskets({ partial: { userId, name: 'default' } }));\n const where = {\n userId,\n basketId: defaultBasket.basketId,\n spendable: true\n };\n const outputs = await this.findOutputs({ partial: where });\n const services = this.getServices();\n for (let i = outputs.length - 1; i >= 0; i--) {\n const o = outputs[i];\n const oid = (0, index_client_1.verifyId)(o.outputId);\n if (o.spendable) {\n let ok = false;\n if (o.lockingScript && o.lockingScript.length > 0) {\n const hash = services.hashOutputScript((0, index_client_1.asString)(o.lockingScript));\n const r = await services.getUtxoStatus(hash, undefined, `${o.txid}.${o.vout}`);\n if (r.isUtxo === true)\n ok = true;\n }\n if (!ok)\n invalidSpendableOutputs.push(o);\n }\n }\n }\n return { invalidSpendableOutputs };\n }\n async updateProvenTxReqDynamics(id, update, trx) {\n const partial = {};\n if (update['updated_at'])\n partial['updated_at'] = update['updated_at'];\n if (update['provenTxId'])\n partial['provenTxId'] = update['provenTxId'];\n if (update['status'])\n partial['status'] = update['status'];\n if (Number.isInteger(update['attempts']))\n partial['attempts'] = update['attempts'];\n if (update['notified'] !== undefined)\n partial['notified'] = update['notified'];\n if (update['batch'])\n partial['batch'] = update['batch'];\n if (update['history'])\n partial['history'] = update['history'];\n if (update['notify'])\n partial['notify'] = update['notify'];\n return await this.updateProvenTxReq(id, partial, trx);\n }\n async extendOutput(o, includeBasket = false, includeTags = false, trx) {\n const ox = o;\n if (includeBasket && ox.basketId)\n ox.basket = await this.findOutputBasketById(o.basketId, trx);\n if (includeTags) {\n ox.tags = await this.getTagsForOutputId(o.outputId);\n }\n return o;\n }\n async validateOutputScript(o, trx) {\n // without offset and length values return what we have (make no changes)\n if (!o.scriptLength || !o.scriptOffset || !o.txid)\n return;\n // if there is an outputScript and its length is the expected length return what we have.\n if (o.lockingScript && o.lockingScript.length === o.scriptLength)\n return;\n // outputScript is missing or has incorrect length...\n const script = await this.getRawTxOfKnownValidTransaction(o.txid, o.scriptOffset, o.scriptLength, trx);\n if (!script)\n return;\n o.lockingScript = script;\n }\n}\nexports.StorageProvider = StorageProvider;\nfunction validateStorageFeeModel(v) {\n const r = {\n model: 'sat/kb',\n value: 1\n };\n if (typeof v === 'object') {\n if (v.model !== 'sat/kb')\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('StorageFeeModel.model', `\"sat/kb\"`);\n if (typeof v.value === 'number') {\n r.value = v.value;\n }\n }\n return r;\n}\n//# sourceMappingURL=StorageProvider.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageProvider.js?\n}"); /***/ }), @@ -3630,7 +3432,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageReaderWriter = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst StorageReader_1 = __webpack_require__(/*! ./StorageReader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageReader.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./schema/entities/EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass StorageReaderWriter extends StorageReader_1.StorageReader {\n constructor(options) {\n super(options);\n }\n async setActive(auth, newActiveStorageIdentityKey) {\n return await this.updateUser((0, utilityHelpers_1.verifyId)(auth.userId), {\n activeStorage: newActiveStorageIdentityKey\n });\n }\n async findCertificateById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findCertificates({ partial: { certificateId: id }, trx }));\n }\n async findCommissionById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findCommissions({ partial: { commissionId: id }, trx }));\n }\n async findOutputById(id, trx, noScript) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputs({ partial: { outputId: id }, noScript, trx }));\n }\n async findOutputBasketById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputBaskets({ partial: { basketId: id }, trx }));\n }\n async findProvenTxById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { provenTxId: id }, trx }));\n }\n async findProvenTxReqById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { provenTxReqId: id }, trx }));\n }\n async findSyncStateById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findSyncStates({ partial: { syncStateId: id }, trx }));\n }\n async findTransactionById(id, trx, noRawTx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findTransactions({\n partial: { transactionId: id },\n noRawTx,\n trx\n }));\n }\n async findTxLabelById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findTxLabels({ partial: { txLabelId: id }, trx }));\n }\n async findOutputTagById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputTags({ partial: { outputTagId: id }, trx }));\n }\n async findUserById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findUsers({ partial: { userId: id }, trx }));\n }\n async findOrInsertUser(identityKey, trx) {\n let user;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n user = (0, utilityHelpers_1.verifyOneOrNone)(await this.findUsers({ partial: { identityKey }, trx }));\n //console.log(`findOrInsertUser oneOrNone: ${JSON.stringify(user || 'none').slice(0,512)}`)\n if (user)\n break;\n const now = new Date();\n user = {\n created_at: now,\n updated_at: new Date('1971-01-01'), // Default constructed user, sync will override with any updated user.\n userId: 0,\n identityKey,\n activeStorage: this.getSettings().storageIdentityKey\n };\n user.userId = await this.insertUser(user, trx);\n isNew = true;\n // Add default change basket for new user.\n await this.insertOutputBasket({\n created_at: now,\n updated_at: new Date('1971-01-01'), // Default constructed basket, sync will override with any updated basket.\n basketId: 0,\n userId: user.userId,\n name: 'default',\n numberOfDesiredUTXOs: 144,\n minimumDesiredUTXOValue: 32,\n isDeleted: false\n });\n break;\n }\n catch (eu) {\n console.log(`findOrInsertUser catch: ${JSON.stringify(eu).slice(0, 512)}`);\n if (retry > 0)\n throw eu;\n }\n }\n return { user, isNew };\n }\n async findOrInsertTransaction(newTx, trx) {\n let tx;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n tx = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTransactions({\n partial: { userId: newTx.userId, txid: newTx.txid },\n trx\n }));\n if (tx)\n break;\n newTx.transactionId = await this.insertTransaction(newTx, trx);\n isNew = true;\n tx = newTx;\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return { tx, isNew };\n }\n async findOrInsertOutputBasket(userId, name, trx) {\n const partial = { name, userId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let basket = (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputBaskets({ partial, trx }));\n if (!basket) {\n basket = {\n ...partial,\n minimumDesiredUTXOValue: 0,\n numberOfDesiredUTXOs: 0,\n basketId: 0,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n basket.basketId = await this.insertOutputBasket(basket, trx);\n }\n if (basket.isDeleted) {\n await this.updateOutputBasket((0, utilityHelpers_1.verifyId)(basket.basketId), {\n isDeleted: false\n });\n }\n return basket;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertTxLabel(userId, label, trx) {\n const partial = { label, userId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let txLabel = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTxLabels({ partial, trx }));\n if (!txLabel) {\n txLabel = {\n ...partial,\n txLabelId: 0,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n txLabel.txLabelId = await this.insertTxLabel(txLabel, trx);\n }\n if (txLabel.isDeleted) {\n await this.updateTxLabel((0, utilityHelpers_1.verifyId)(txLabel.txLabelId), {\n isDeleted: false\n });\n }\n return txLabel;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertTxLabelMap(transactionId, txLabelId, trx) {\n const partial = { transactionId, txLabelId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let txLabelMap = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTxLabelMaps({ partial, trx }));\n if (!txLabelMap) {\n txLabelMap = {\n ...partial,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n await this.insertTxLabelMap(txLabelMap, trx);\n }\n if (txLabelMap.isDeleted) {\n await this.updateTxLabelMap(transactionId, txLabelId, {\n isDeleted: false\n });\n }\n return txLabelMap;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertOutputTag(userId, tag, trx) {\n const partial = { tag, userId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let outputTag = (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputTags({ partial, trx }));\n if (!outputTag) {\n outputTag = {\n ...partial,\n outputTagId: 0,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n outputTag.outputTagId = await this.insertOutputTag(outputTag, trx);\n }\n if (outputTag.isDeleted) {\n await this.updateOutputTag((0, utilityHelpers_1.verifyId)(outputTag.outputTagId), {\n isDeleted: false\n });\n }\n return outputTag;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertOutputTagMap(outputId, outputTagId, trx) {\n const partial = { outputId, outputTagId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let outputTagMap = (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputTagMaps({ partial, trx }));\n if (!outputTagMap) {\n outputTagMap = {\n ...partial,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n await this.insertOutputTagMap(outputTagMap, trx);\n }\n if (outputTagMap.isDeleted) {\n await this.updateOutputTagMap(outputId, outputTagId, {\n isDeleted: false\n });\n }\n return outputTagMap;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertSyncStateAuth(auth, storageIdentityKey, storageName) {\n const partial = { userId: auth.userId, storageIdentityKey, storageName };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let syncState = (0, utilityHelpers_1.verifyOneOrNone)(await this.findSyncStates({ partial }));\n if (!syncState) {\n syncState = {\n ...partial,\n created_at: now,\n updated_at: now,\n syncStateId: 0,\n status: 'unknown',\n init: false,\n refNum: (0, utilityHelpers_1.randomBytesBase64)(12),\n syncMap: JSON.stringify((0, EntityBase_1.createSyncMap)())\n };\n await this.insertSyncState(syncState);\n return { syncState, isNew: true };\n }\n return { syncState, isNew: false };\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertProvenTxReq(newReq, trx) {\n let req;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n req = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid: newReq.txid }, trx }));\n if (req)\n break;\n newReq.provenTxReqId = await this.insertProvenTxReq(newReq, trx);\n isNew = true;\n req = newReq;\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return { req, isNew };\n }\n async findOrInsertProvenTx(newProven, trx) {\n let proven;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n proven = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid: newProven.txid }, trx }));\n if (proven)\n break;\n newProven.provenTxId = await this.insertProvenTx(newProven, trx);\n isNew = true;\n proven = newProven;\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return { proven, isNew };\n }\n async tagOutput(partial, tag, trx) {\n await this.transaction(async (trx) => {\n const o = (0, utilityHelpers_1.verifyOne)(await this.findOutputs({ partial, noScript: true, trx }));\n const outputTag = await this.findOrInsertOutputTag(o.userId, tag, trx);\n await this.findOrInsertOutputTagMap((0, utilityHelpers_1.verifyId)(o.outputId), (0, utilityHelpers_1.verifyId)(outputTag.outputTagId), trx);\n }, trx);\n }\n}\nexports.StorageReaderWriter = StorageReaderWriter;\n//# sourceMappingURL=StorageReaderWriter.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageReaderWriter.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageReaderWriter = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst entities_1 = __webpack_require__(/*! ./schema/entities */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nconst StorageReader_1 = __webpack_require__(/*! ./StorageReader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageReader.js\");\nclass StorageReaderWriter extends StorageReader_1.StorageReader {\n constructor(options) {\n super(options);\n }\n async setActive(auth, newActiveStorageIdentityKey) {\n return await this.updateUser((0, utilityHelpers_1.verifyId)(auth.userId), {\n activeStorage: newActiveStorageIdentityKey\n });\n }\n async findCertificateById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findCertificates({ partial: { certificateId: id }, trx }));\n }\n async findCommissionById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findCommissions({ partial: { commissionId: id }, trx }));\n }\n async findOutputById(id, trx, noScript) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputs({ partial: { outputId: id }, noScript, trx }));\n }\n async findOutputBasketById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputBaskets({ partial: { basketId: id }, trx }));\n }\n async findProvenTxById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { provenTxId: id }, trx }));\n }\n async findProvenTxReqById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { provenTxReqId: id }, trx }));\n }\n async findSyncStateById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findSyncStates({ partial: { syncStateId: id }, trx }));\n }\n async findTransactionById(id, trx, noRawTx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findTransactions({\n partial: { transactionId: id },\n noRawTx,\n trx\n }));\n }\n async findTxLabelById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findTxLabels({ partial: { txLabelId: id }, trx }));\n }\n async findOutputTagById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputTags({ partial: { outputTagId: id }, trx }));\n }\n async findUserById(id, trx) {\n return (0, utilityHelpers_1.verifyOneOrNone)(await this.findUsers({ partial: { userId: id }, trx }));\n }\n async findOrInsertUser(identityKey, trx) {\n let user;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n user = (0, utilityHelpers_1.verifyOneOrNone)(await this.findUsers({ partial: { identityKey }, trx }));\n //console.log(`findOrInsertUser oneOrNone: ${JSON.stringify(user || 'none').slice(0,512)}`)\n if (user)\n break;\n const now = new Date();\n user = {\n created_at: now,\n updated_at: new Date('1971-01-01'), // Default constructed user, sync will override with any updated user.\n userId: 0,\n identityKey,\n activeStorage: this.getSettings().storageIdentityKey\n };\n user.userId = await this.insertUser(user, trx);\n isNew = true;\n // Add default change basket for new user.\n await this.insertOutputBasket({\n created_at: now,\n updated_at: new Date('1971-01-01'), // Default constructed basket, sync will override with any updated basket.\n basketId: 0,\n userId: user.userId,\n name: 'default',\n numberOfDesiredUTXOs: 144,\n minimumDesiredUTXOValue: 32,\n isDeleted: false\n });\n break;\n }\n catch (eu) {\n console.log(`findOrInsertUser catch: ${JSON.stringify(eu).slice(0, 512)}`);\n if (retry > 0)\n throw eu;\n }\n }\n return { user, isNew };\n }\n async findOrInsertTransaction(newTx, trx) {\n let tx;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n tx = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTransactions({\n partial: { userId: newTx.userId, txid: newTx.txid },\n trx\n }));\n if (tx)\n break;\n newTx.transactionId = await this.insertTransaction(newTx, trx);\n isNew = true;\n tx = newTx;\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return { tx, isNew };\n }\n async findOrInsertOutputBasket(userId, name, trx) {\n const partial = { name, userId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let basket = (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputBaskets({ partial, trx }));\n if (!basket) {\n basket = {\n ...partial,\n minimumDesiredUTXOValue: 0,\n numberOfDesiredUTXOs: 0,\n basketId: 0,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n basket.basketId = await this.insertOutputBasket(basket, trx);\n }\n if (basket.isDeleted) {\n await this.updateOutputBasket((0, utilityHelpers_1.verifyId)(basket.basketId), {\n isDeleted: false\n });\n }\n return basket;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertTxLabel(userId, label, trx) {\n const partial = { label, userId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let txLabel = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTxLabels({ partial, trx }));\n if (!txLabel) {\n txLabel = {\n ...partial,\n txLabelId: 0,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n txLabel.txLabelId = await this.insertTxLabel(txLabel, trx);\n }\n if (txLabel.isDeleted) {\n await this.updateTxLabel((0, utilityHelpers_1.verifyId)(txLabel.txLabelId), {\n isDeleted: false\n });\n }\n return txLabel;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertTxLabelMap(transactionId, txLabelId, trx) {\n const partial = { transactionId, txLabelId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let txLabelMap = (0, utilityHelpers_1.verifyOneOrNone)(await this.findTxLabelMaps({ partial, trx }));\n if (!txLabelMap) {\n txLabelMap = {\n ...partial,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n await this.insertTxLabelMap(txLabelMap, trx);\n }\n if (txLabelMap.isDeleted) {\n await this.updateTxLabelMap(transactionId, txLabelId, {\n isDeleted: false\n });\n }\n return txLabelMap;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertOutputTag(userId, tag, trx) {\n const partial = { tag, userId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let outputTag = (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputTags({ partial, trx }));\n if (!outputTag) {\n outputTag = {\n ...partial,\n outputTagId: 0,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n outputTag.outputTagId = await this.insertOutputTag(outputTag, trx);\n }\n if (outputTag.isDeleted) {\n await this.updateOutputTag((0, utilityHelpers_1.verifyId)(outputTag.outputTagId), {\n isDeleted: false\n });\n }\n return outputTag;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertOutputTagMap(outputId, outputTagId, trx) {\n const partial = { outputId, outputTagId };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let outputTagMap = (0, utilityHelpers_1.verifyOneOrNone)(await this.findOutputTagMaps({ partial, trx }));\n if (!outputTagMap) {\n outputTagMap = {\n ...partial,\n created_at: now,\n updated_at: now,\n isDeleted: false\n };\n await this.insertOutputTagMap(outputTagMap, trx);\n }\n if (outputTagMap.isDeleted) {\n await this.updateOutputTagMap(outputId, outputTagId, {\n isDeleted: false\n });\n }\n return outputTagMap;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertSyncStateAuth(auth, storageIdentityKey, storageName) {\n const partial = { userId: auth.userId, storageIdentityKey, storageName };\n for (let retry = 0;; retry++) {\n try {\n const now = new Date();\n let syncState = (0, utilityHelpers_1.verifyOneOrNone)(await this.findSyncStates({ partial }));\n if (!syncState) {\n syncState = {\n ...partial,\n created_at: now,\n updated_at: now,\n syncStateId: 0,\n status: 'unknown',\n init: false,\n refNum: (0, utilityHelpers_1.randomBytesBase64)(12),\n syncMap: JSON.stringify((0, entities_1.createSyncMap)())\n };\n await this.insertSyncState(syncState);\n return { syncState, isNew: true };\n }\n return { syncState, isNew: false };\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n }\n async findOrInsertProvenTxReq(newReq, trx) {\n let req;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n req = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxReqs({ partial: { txid: newReq.txid }, trx }));\n if (req)\n break;\n newReq.provenTxReqId = await this.insertProvenTxReq(newReq, trx);\n isNew = true;\n req = newReq;\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return { req, isNew };\n }\n async findOrInsertProvenTx(newProven, trx) {\n let proven;\n let isNew = false;\n for (let retry = 0;; retry++) {\n try {\n proven = (0, utilityHelpers_1.verifyOneOrNone)(await this.findProvenTxs({ partial: { txid: newProven.txid }, trx }));\n if (proven)\n break;\n newProven.provenTxId = await this.insertProvenTx(newProven, trx);\n isNew = true;\n proven = newProven;\n break;\n }\n catch (eu) {\n if (retry > 0)\n throw eu;\n }\n }\n return { proven, isNew };\n }\n async tagOutput(partial, tag, trx) {\n await this.transaction(async (trx) => {\n const o = (0, utilityHelpers_1.verifyOne)(await this.findOutputs({ partial, noScript: true, trx }));\n const outputTag = await this.findOrInsertOutputTag(o.userId, tag, trx);\n await this.findOrInsertOutputTagMap((0, utilityHelpers_1.verifyId)(o.outputId), (0, utilityHelpers_1.verifyId)(outputTag.outputTagId), trx);\n }, trx);\n }\n}\nexports.StorageReaderWriter = StorageReaderWriter;\n//# sourceMappingURL=StorageReaderWriter.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageReaderWriter.js?\n}"); /***/ }), @@ -3656,6 +3458,17 @@ /***/ }), +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.client.js": +/*!*********************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.client.js ***! + \*********************************************************************************/ +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +eval("{\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\n__exportStar(__webpack_require__(/*! ./WalletStorageManager */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/WalletStorageManager.js\"), exports);\n__exportStar(__webpack_require__(/*! ./StorageIdb */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageIdb.js\"), exports);\n__exportStar(__webpack_require__(/*! ./StorageProvider */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageProvider.js\"), exports);\n__exportStar(__webpack_require__(/*! ./StorageSyncReader */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageSyncReader.js\"), exports);\n__exportStar(__webpack_require__(/*! ./schema/tables/index */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/tables/index.js\"), exports);\n__exportStar(__webpack_require__(/*! ./schema/entities/index */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\"), exports);\n__exportStar(__webpack_require__(/*! ./remoting/StorageClient */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/remoting/StorageClient.js\"), exports);\n//# sourceMappingURL=index.client.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.client.js?\n}"); + +/***/ }), + /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.mobile.js": /*!*********************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/index.mobile.js ***! @@ -3667,6 +3480,28 @@ /***/ }), +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListActionsSpecOp.js": +/*!**********************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListActionsSpecOp.js ***! + \**********************************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getLabelToSpecOp = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst getLabelToSpecOp = () => {\n return {\n [index_client_1.sdk.specOpNoSendActions]: {\n name: 'noSendActions',\n labelsToIntercept: ['abort'],\n setStatusFilter: () => ['nosend'],\n postProcess: async (s, auth, vargs, specOpLabels, txs) => {\n if (specOpLabels.indexOf('abort') >= 0) {\n for (const tx of txs) {\n if (tx.status === 'nosend') {\n await s.abortAction(auth, { reference: tx.reference });\n tx.status = 'failed';\n }\n }\n }\n }\n },\n [index_client_1.sdk.specOpFailedActions]: {\n name: 'failedActions',\n labelsToIntercept: ['unfail'],\n setStatusFilter: () => ['failed'],\n postProcess: async (s, auth, vargs, specOpLabels, txs) => {\n if (specOpLabels.indexOf('unfail') >= 0) {\n for (const tx of txs) {\n if (tx.status === 'failed') {\n await s.updateTransaction(tx.transactionId, { status: 'unfail' });\n // wallet wire does not support 'unfail' status, return as 'failed'.\n }\n }\n }\n }\n }\n };\n};\nexports.getLabelToSpecOp = getLabelToSpecOp;\n//# sourceMappingURL=ListActionsSpecOp.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListActionsSpecOp.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListOutputsSpecOp.js": +/*!**********************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListOutputsSpecOp.js ***! + \**********************************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getBasketToSpecOp = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst getBasketToSpecOp = () => {\n return {\n [index_client_1.sdk.specOpWalletBalance]: {\n name: 'totalOutputsIsWalletBalance',\n useBasket: 'default',\n ignoreLimit: true,\n resultFromOutputs: async (s, auth, vargs, specOpTags, outputs) => {\n let totalOutputs = 0;\n for (const o of outputs)\n totalOutputs += o.satoshis;\n return { totalOutputs, outputs: [] };\n }\n },\n [index_client_1.sdk.specOpInvalidChange]: {\n name: 'invalidChangeOutputs',\n useBasket: 'default',\n ignoreLimit: true,\n includeOutputScripts: true,\n includeSpent: false,\n tagsToIntercept: ['release', 'all'],\n filterOutputs: async (s, auth, vargs, specOpTags, outputs) => {\n const filteredOutputs = [];\n const services = s.getServices();\n for (const o of outputs) {\n await s.validateOutputScript(o);\n let ok = false;\n if (o.lockingScript && o.lockingScript.length > 0) {\n ok = await services.isUtxo(o);\n }\n else {\n ok = undefined;\n }\n if (ok === false) {\n filteredOutputs.push(o);\n }\n }\n if (specOpTags.indexOf('release') >= 0) {\n for (const o of filteredOutputs) {\n await s.updateOutput(o.outputId, { spendable: false });\n o.spendable = false;\n }\n }\n return filteredOutputs;\n }\n },\n [index_client_1.sdk.specOpSetWalletChangeParams]: {\n name: 'setWalletChangeParams',\n tagsParamsCount: 2,\n resultFromTags: async (s, auth, vargs, specOpTags) => {\n if (specOpTags.length !== 2)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('numberOfDesiredUTXOs and minimumDesiredUTXOValue', 'valid');\n const numberOfDesiredUTXOs = (0, index_client_1.verifyInteger)(Number(specOpTags[0]));\n const minimumDesiredUTXOValue = (0, index_client_1.verifyInteger)(Number(specOpTags[1]));\n const basket = (0, index_client_1.verifyOne)(await s.findOutputBaskets({\n partial: { userId: (0, index_client_1.verifyId)(auth.userId), name: 'default' }\n }));\n await s.updateOutputBasket(basket.basketId, {\n numberOfDesiredUTXOs,\n minimumDesiredUTXOValue\n });\n return { totalOutputs: 0, outputs: [] };\n }\n }\n };\n};\nexports.getBasketToSpecOp = getBasketToSpecOp;\n//# sourceMappingURL=ListOutputsSpecOp.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListOutputsSpecOp.js?\n}"); + +/***/ }), + /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/attemptToPostReqsToNetwork.js": /*!*******************************************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/attemptToPostReqsToNetwork.js ***! @@ -3685,7 +3520,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.setDisableDoubleSpendCheckForTest = setDisableDoubleSpendCheckForTest;\nexports.createAction = createAction;\nexports.offsetPubKey = offsetPubKey;\nexports.lockScriptWithKeyOffsetFromPubKey = lockScriptWithKeyOffsetFromPubKey;\nexports.createStorageServiceChargeScript = createStorageServiceChargeScript;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst generateChange_1 = __webpack_require__(/*! ./generateChange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/generateChange.js\");\nconst StorageProvider_1 = __webpack_require__(/*! ../StorageProvider */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageProvider.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst EntityProvenTx_1 = __webpack_require__(/*! ../schema/entities/EntityProvenTx */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTx.js\");\nlet disableDoubleSpendCheckForTest = true;\nfunction setDisableDoubleSpendCheckForTest(v) {\n disableDoubleSpendCheckForTest = v;\n}\nasync function createAction(storage, auth, vargs, originator) {\n //stampLog(vargs, `start storage createTransactionSdk`)\n if (!vargs.isNewTx)\n // The purpose of this function is to create the initial storage records associated\n // with a new transaction. It's an error if we have no new inputs or outputs...\n throw new WERR_errors_1.WERR_INTERNAL();\n /**\n * Steps to create a transaction:\n * - Verify that all inputs either have proof in vargs.inputBEEF or that options.trustSelf === 'known' and input txid.vout are known valid to storage.\n * - Create a new transaction record with status 'unsigned' as the anchor for construction work and to new outputs.\n * - Create all transaction labels.\n * - Add new commission output\n * - Attempt to fund the transaction by allocating change outputs:\n * - As each change output is selected it is simultaneously locked.\n * - Create all new output, basket, tag records\n * - If requested, create result Beef with complete proofs for all inputs used\n * - Create result inputs with source locking scripts\n * - Create result outputs with new locking scripts.\n * - Create and return result.\n */\n const userId = auth.userId;\n const { storageBeef, beef, xinputs } = await validateRequiredInputs(storage, userId, vargs);\n const xoutputs = validateRequiredOutputs(storage, userId, vargs);\n const changeBasketName = 'default';\n const changeBasket = (0, utilityHelpers_1.verifyOne)(await storage.findOutputBaskets({\n partial: { userId, name: changeBasketName }\n }), `Invalid outputGeneration basket \"${changeBasketName}\"`);\n const noSendChangeIn = await validateNoSendChange(storage, userId, vargs, changeBasket);\n const availableChangeCount = await storage.countChangeInputs(userId, changeBasket.basketId, !vargs.isDelayed);\n const feeModel = (0, StorageProvider_1.validateStorageFeeModel)(storage.feeModel);\n const newTx = await createNewTxRecord(storage, userId, vargs, storageBeef);\n const ctx = {\n xinputs,\n xoutputs,\n changeBasket,\n noSendChangeIn,\n availableChangeCount,\n feeModel,\n transactionId: newTx.transactionId\n };\n const { allocatedChange, changeOutputs, derivationPrefix, maxPossibleSatoshisAdjustment } = await fundNewTransactionSdk(storage, userId, vargs, ctx);\n if (maxPossibleSatoshisAdjustment) {\n const a = maxPossibleSatoshisAdjustment;\n if (ctx.xoutputs[a.fixedOutputIndex].satoshis !== generateChange_1.maxPossibleSatoshis)\n throw new WERR_errors_1.WERR_INTERNAL();\n ctx.xoutputs[a.fixedOutputIndex].satoshis = a.satoshis;\n }\n // The satoshis of the transaction is the satoshis we get back in change minus the satoshis we spend.\n const satoshis = changeOutputs.reduce((a, e) => a + e.satoshis, 0) - allocatedChange.reduce((a, e) => a + e.satoshis, 0);\n await storage.updateTransaction(newTx.transactionId, { satoshis });\n const { outputs, changeVouts } = await createNewOutputs(storage, userId, vargs, ctx, changeOutputs);\n const inputBeef = await mergeAllocatedChangeBeefs(storage, userId, vargs, allocatedChange, beef);\n const inputs = await createNewInputs(storage, userId, vargs, ctx, allocatedChange);\n const r = {\n reference: newTx.reference,\n version: newTx.version,\n lockTime: newTx.lockTime,\n inputs,\n outputs,\n derivationPrefix,\n inputBeef,\n noSendChangeOutputVouts: vargs.isNoSend ? changeVouts : undefined\n };\n //stampLog(vargs, `end storage createTransactionSdk`)\n return r;\n}\nfunction makeDefaultOutput(userId, transactionId, satoshis, vout) {\n const now = new Date();\n const output = {\n created_at: now,\n updated_at: now,\n outputId: 0,\n userId,\n transactionId,\n satoshis: satoshis,\n vout,\n basketId: undefined,\n change: false,\n customInstructions: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined,\n outputDescription: '',\n lockingScript: undefined,\n providedBy: 'you',\n purpose: '',\n senderIdentityKey: undefined,\n spendable: true,\n spendingDescription: undefined,\n spentBy: undefined,\n txid: undefined,\n type: ''\n };\n return output;\n}\nasync function createNewInputs(storage, userId, vargs, ctx, allocatedChange) {\n const r = [];\n const newInputs = [];\n for (const i of ctx.xinputs) {\n const o = i.output;\n newInputs.push({ i, o });\n if (o) {\n await storage.transaction(async (trx) => {\n const o2 = (0, utilityHelpers_1.verifyOne)(await storage.findOutputs({ partial: { outputId: o.outputId }, trx }));\n if (o2.spendable != true || o2.spentBy !== undefined)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`inputs[${i.vin}]`, `spendable output. output ${o.txid}:${o.vout} appears to have been spent.`);\n await storage.updateOutput(o.outputId, {\n spendable: false,\n spentBy: ctx.transactionId,\n spendingDescription: i.inputDescription\n }, trx);\n });\n }\n }\n for (const o of allocatedChange) {\n newInputs.push({ o, unlockLen: 107 });\n }\n let vin = -1;\n for (const { i, o, unlockLen } of newInputs) {\n vin++;\n if (o) {\n if (!i && !unlockLen)\n throw new WERR_errors_1.WERR_INTERNAL(`vin ${vin} non-fixedInput without unlockLen`);\n const sourceTransaction = vargs.includeAllSourceTransactions && vargs.isSignAction\n ? await storage.getRawTxOfKnownValidTransaction(o.txid)\n : undefined;\n const ri = {\n vin,\n sourceTxid: o.txid,\n sourceVout: o.vout,\n sourceSatoshis: o.satoshis,\n sourceLockingScript: (0, utilityHelpers_noBuffer_1.asString)(o.lockingScript),\n sourceTransaction,\n unlockingScriptLength: unlockLen ? unlockLen : i.unlockingScriptLength,\n providedBy: i && o.providedBy === 'storage' ? 'you-and-storage' : o.providedBy,\n type: o.type,\n spendingDescription: o.spendingDescription || undefined,\n derivationPrefix: o.derivationPrefix || undefined,\n derivationSuffix: o.derivationSuffix || undefined,\n senderIdentityKey: o.senderIdentityKey || undefined\n };\n r.push(ri);\n }\n else {\n if (!i)\n throw new WERR_errors_1.WERR_INTERNAL(`vin ${vin} without output or xinput`);\n // user specified input with no corresponding output being spent.\n const ri = {\n vin,\n sourceTxid: i.outpoint.txid,\n sourceVout: i.outpoint.vout,\n sourceSatoshis: i.satoshis,\n sourceLockingScript: i.lockingScript.toHex(),\n unlockingScriptLength: i.unlockingScriptLength,\n providedBy: 'you',\n type: 'custom',\n spendingDescription: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined,\n senderIdentityKey: undefined\n };\n r.push(ri);\n }\n }\n return r;\n}\nasync function createNewOutputs(storage, userId, vargs, ctx, changeOutputs) {\n var _a;\n const outputs = [];\n // Lookup output baskets\n const txBaskets = {};\n for (const xo of ctx.xoutputs) {\n if (xo.basket !== undefined && !txBaskets[xo.basket])\n txBaskets[xo.basket] = await storage.findOrInsertOutputBasket(userId, xo.basket);\n }\n // Lookup output tags\n const txTags = {};\n for (const xo of ctx.xoutputs) {\n for (const tag of xo.tags) {\n txTags[tag] = await storage.findOrInsertOutputTag(userId, tag);\n }\n }\n const newOutputs = [];\n for (const xo of ctx.xoutputs) {\n const lockingScript = (0, utilityHelpers_noBuffer_1.asArray)(xo.lockingScript);\n if (xo.purpose === 'service-charge') {\n const now = new Date();\n await storage.insertCommission({\n userId,\n transactionId: ctx.transactionId,\n lockingScript,\n satoshis: xo.satoshis,\n isRedeemed: false,\n keyOffset: (0, utilityHelpers_1.verifyTruthy)(xo.keyOffset),\n created_at: now,\n updated_at: now,\n commissionId: 0\n });\n const o = makeDefaultOutput(userId, ctx.transactionId, xo.satoshis, xo.vout);\n o.lockingScript = lockingScript;\n o.providedBy = 'storage';\n o.purpose = 'storage-commission';\n o.type = 'custom';\n o.spendable = false;\n newOutputs.push({ o, tags: [] });\n }\n else {\n // The user wants tracking if they put their output in a basket\n const basketId = !xo.basket ? undefined : txBaskets[xo.basket].basketId;\n const o = makeDefaultOutput(userId, ctx.transactionId, xo.satoshis, xo.vout);\n o.lockingScript = lockingScript;\n o.basketId = basketId;\n o.customInstructions = xo.customInstructions;\n o.outputDescription = xo.outputDescription;\n o.providedBy = xo.providedBy;\n o.purpose = xo.purpose || '';\n o.type = 'custom';\n newOutputs.push({ o, tags: xo.tags });\n }\n }\n for (const o of changeOutputs) {\n o.spendable = true;\n newOutputs.push({ o, tags: [] });\n }\n if (vargs.options.randomizeOutputs) {\n const randomVals = [];\n const nextRandomVal = () => {\n let val = 0;\n if (!randomVals || randomVals.length === 0) {\n val = Math.random();\n }\n else {\n val = randomVals.shift() || 0;\n randomVals.push(val);\n }\n return val;\n };\n /** In-place array shuffle */\n const shuffleArray = (array) => {\n let currentIndex = array.length;\n let temporaryValue;\n let randomIndex;\n while (currentIndex !== 0) {\n randomIndex = Math.floor(nextRandomVal() * currentIndex);\n currentIndex -= 1;\n temporaryValue = array[currentIndex];\n array[currentIndex] = array[randomIndex];\n array[randomIndex] = temporaryValue;\n }\n return array;\n };\n let vout = -1;\n const newVouts = Array(newOutputs.length);\n for (let i = 0; i < newVouts.length; i++)\n newVouts[i] = i;\n shuffleArray(newVouts);\n for (const no of newOutputs) {\n vout++;\n if (no.o.vout !== vout)\n throw new WERR_errors_1.WERR_INTERNAL(`new output ${vout} has out of order vout ${no.o.vout}`);\n no.o.vout = newVouts[vout];\n }\n }\n const changeVouts = [];\n for (const { o, tags } of newOutputs) {\n o.outputId = await storage.insertOutput(o);\n if (o.change && o.purpose === 'change' && o.providedBy === 'storage')\n changeVouts.push(o.vout);\n // Add tags to the output\n for (const tagName of tags) {\n const tag = txTags[tagName];\n await storage.findOrInsertOutputTagMap((0, utilityHelpers_1.verifyId)(o.outputId), (0, utilityHelpers_1.verifyId)(tag.outputTagId));\n }\n const ro = {\n vout: (0, utilityHelpers_1.verifyInteger)(o.vout),\n satoshis: (0, utilityHelpers_1.verifyTruthy)(o.satoshis),\n lockingScript: !o.lockingScript ? '' : (0, utilityHelpers_noBuffer_1.asString)(o.lockingScript),\n providedBy: (0, utilityHelpers_1.verifyTruthy)(o.providedBy),\n purpose: o.purpose || undefined,\n basket: (_a = Object.values(txBaskets).find(b => b.basketId === o.basketId)) === null || _a === void 0 ? void 0 : _a.name,\n tags: tags,\n outputDescription: o.outputDescription,\n derivationSuffix: o.derivationSuffix,\n customInstructions: o.customInstructions\n };\n outputs.push(ro);\n }\n return { outputs, changeVouts };\n}\nasync function createNewTxRecord(storage, userId, vargs, storageBeef) {\n const now = new Date();\n const newTx = {\n created_at: now,\n updated_at: now,\n transactionId: 0,\n version: vargs.version,\n lockTime: vargs.lockTime,\n status: 'unsigned',\n reference: (0, utilityHelpers_1.randomBytesBase64)(12),\n satoshis: 0, // updated after fundingTransaction\n userId,\n isOutgoing: true,\n inputBEEF: storageBeef.toBinary(),\n description: vargs.description,\n txid: undefined,\n rawTx: undefined\n };\n newTx.transactionId = await storage.insertTransaction(newTx);\n for (const label of vargs.labels) {\n const txLabel = await storage.findOrInsertTxLabel(userId, label);\n await storage.findOrInsertTxLabelMap((0, utilityHelpers_1.verifyId)(newTx.transactionId), (0, utilityHelpers_1.verifyId)(txLabel.txLabelId));\n }\n return newTx;\n}\n/**\n * Convert vargs.outputs:\n *\n * lockingScript: HexString\n * satoshis: SatoshiValue\n * outputDescription: DescriptionString5to50Bytes\n * basket?: BasketStringUnder300Bytes\n * customInstructions?: string\n * tags: BasketStringUnderBytes[]\n *\n * to XValidCreateActionOutput (which aims for StorageCreateTransactionSdkOutput)\n *\n * adds:\n * vout: number\n * providedBy: StorageProvidedBy\n * purpose?: string\n * derivationSuffix?: string\n * keyOffset?: string\n *\n * @param vargs\n * @returns xoutputs\n */\nfunction validateRequiredOutputs(storage, userId, vargs) {\n const xoutputs = [];\n let vout = -1;\n for (const output of vargs.outputs) {\n vout++;\n const xo = {\n ...output,\n vout,\n providedBy: 'you',\n purpose: undefined,\n derivationSuffix: undefined,\n keyOffset: undefined\n };\n xoutputs.push(xo);\n }\n if (storage.commissionSatoshis > 0 && storage.commissionPubKeyHex) {\n vout++;\n const { script, keyOffset } = createStorageServiceChargeScript(storage.commissionPubKeyHex);\n xoutputs.push({\n lockingScript: script,\n satoshis: storage.commissionSatoshis,\n outputDescription: 'Storage Service Charge',\n basket: undefined,\n tags: [],\n vout,\n providedBy: 'storage',\n purpose: 'service-charge',\n keyOffset\n });\n }\n return xoutputs;\n}\n/**\n * Verify that we are in posession of validity proof data for any inputs being proposed for a new transaction.\n *\n * `vargs.inputs` is the source of inputs.\n * `vargs.inputBEEF` may include new user supplied validity data.\n * 'vargs.options.trustSelf === 'known'` indicates whether we can rely on the storage database records.\n *\n * If there are no inputs, returns an empty `Beef`.\n *\n * Always pulls rawTx data into first level of validity chains so that parsed transaction data is available\n * and checks input sourceSatoshis as well as filling in input sourceLockingScript.\n *\n * This data may be pruned again before being returned to the user based on `vargs.options.knownTxids`.\n *\n * @param storage\n * @param userId\n * @param vargs\n * @returns {storageBeef} containing only validity proof data for only unknown required inputs.\n * @returns {beef} containing verified validity proof data for all required inputs.\n * @returns {xinputs} extended validated required inputs.\n */\nasync function validateRequiredInputs(storage, userId, vargs) {\n //stampLog(vargs, `start storage verifyInputBeef`)\n const beef = new sdk_1.Beef();\n if (vargs.inputs.length === 0)\n return { storageBeef: beef, beef, xinputs: [] };\n if (vargs.inputBEEF)\n beef.mergeBeef(vargs.inputBEEF);\n const xinputs = vargs.inputs.map((input, vin) => ({\n ...input,\n vin,\n satoshis: -1,\n lockingScript: new sdk_1.Script(),\n output: undefined\n }));\n const trustSelf = vargs.options.trustSelf === 'known';\n const inputTxids = {};\n for (const input of xinputs)\n inputTxids[input.outpoint.txid] = true;\n // Check beef from user that either there are no txidOnly entries,\n // or that we can trust storage data and it does indeed vouch\n // for any txidOnly entries\n for (const btx of beef.txs) {\n if (btx.isTxidOnly) {\n if (!trustSelf)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain complete proof data for ${btx.txid}`);\n if (!inputTxids[btx.txid]) {\n // inputTxids are checked next\n const isKnown = await storage.verifyKnownValidTransaction(btx.txid);\n if (!isKnown)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain complete proof data for unknown ${btx.txid}`);\n }\n }\n }\n // Make sure that there's an entry for all inputs txid values:\n for (const txid of Object.keys(inputTxids)) {\n let btx = beef.findTxid(txid);\n if (!btx && trustSelf) {\n if (await storage.verifyKnownValidTransaction(txid))\n btx = beef.mergeTxidOnly(txid);\n }\n if (!btx)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain proof data for possibly known ${txid}`);\n }\n if (!(await beef.verify(await storage.getServices().getChainTracker(), true))) {\n console.log(`verifyInputBeef failed, inputBEEF failed to verify.\\n${beef.toLogString()}\\n`);\n //console.log(`verifyInputBeef failed, inputBEEF failed to verify.\\n${stampLogFormat(vargs.log)}\\n${beef.toLogString()}\\n`)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('inputBEEF', 'valid Beef when factoring options.trustSelf');\n }\n // beef may now be trusted and has a BeefTx for every input txid.\n const storageBeef = beef.clone();\n for (const input of xinputs) {\n const { txid, vout } = input.outpoint;\n const output = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputs({ partial: { userId, txid, vout } }));\n if (output) {\n if (output.change) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`inputs[${input.vin}]`, 'an unmanaged input. Change outputs are managed by your wallet.');\n }\n input.output = output;\n if (!Array.isArray(output.lockingScript) || !Number.isInteger(output.satoshis))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`${txid}.${vout}`, 'output with valid lockingScript and satoshis');\n if (!disableDoubleSpendCheckForTest && !output.spendable && !vargs.isNoSend)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`${txid}.${vout}`, 'spendable output unless noSend is true');\n // input is spending an existing user output which has an lockingScript\n input.satoshis = (0, utilityHelpers_1.verifyNumber)(output.satoshis);\n input.lockingScript = sdk_1.Script.fromBinary((0, utilityHelpers_noBuffer_1.asArray)(output.lockingScript));\n }\n else {\n let btx = beef.findTxid(txid);\n if (btx.isTxidOnly) {\n const { rawTx, proven } = await storage.getProvenOrRawTx(txid);\n //stampLog(vargs, `... storage verifyInputBeef getProvenOrRawTx ${txid} ${proven ? 'proven' : rawTx ? 'rawTx' : 'unknown'}`)\n if (!rawTx)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain proof data for ${txid}`);\n btx = beef.mergeRawTx((0, utilityHelpers_noBuffer_1.asArray)(rawTx));\n if (proven)\n beef.mergeBump(new EntityProvenTx_1.EntityProvenTx(proven).getMerklePath());\n }\n // btx is valid has parsed transaction data.\n if (vout >= btx.tx.outputs.length)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`${txid}.${vout}`, 'valid outpoint');\n const so = btx.tx.outputs[vout];\n input.satoshis = (0, utilityHelpers_1.verifyTruthy)(so.satoshis);\n input.lockingScript = so.lockingScript;\n }\n }\n return { beef, storageBeef, xinputs };\n}\nasync function verifyBeefFixOrhpans(beef, storage) {\n const r = beef.verifyValid();\n if (!r.valid) {\n // Beef is structurally invalid.\n return false;\n }\n const heights = Object.keys(r.roots);\n const services = storage.getServices();\n const chainTracker = await services.getChainTracker();\n let rootsAreValid = true;\n for (const height of heights) {\n const isValid = await chainTracker.isValidRootForHeight(r.roots[height], Number(height));\n if (isValid)\n continue;\n // The original block may have been orphaned, check for a new proof.\n const mp = beef.bumps.find(b => b.blockHeight === Number(height));\n //const p = await services.getMerklePath()\n }\n return false;\n}\nasync function validateNoSendChange(storage, userId, vargs, changeBasket) {\n const r = [];\n if (!vargs.isNoSend)\n return [];\n const noSendChange = vargs.options.noSendChange;\n if (noSendChange && noSendChange.length > 0) {\n for (const op of noSendChange) {\n const output = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputs({\n partial: { userId, txid: op.txid, vout: op.vout }\n }));\n // noSendChange is not marked spendable until sent, may not already be spent, and must have a valid greater than zero satoshis\n if (!output ||\n output.providedBy !== 'storage' ||\n output.purpose !== 'change' ||\n output.spendable === false ||\n Number.isInteger(output.spentBy) ||\n !(0, utilityHelpers_1.verifyNumber)(output.satoshis) ||\n output.basketId !== changeBasket.basketId)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('noSendChange outpoint', 'valid');\n if (-1 < r.findIndex(o => o.outputId === output.outputId))\n // noSendChange duplicate OutPoints are not allowed.\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('noSendChange outpoint', 'unique. Duplicates are not allowed.');\n r.push(output);\n }\n }\n return r;\n}\nasync function fundNewTransactionSdk(storage, userId, vargs, ctx) {\n const params = {\n fixedInputs: ctx.xinputs.map(xi => ({\n satoshis: xi.satoshis,\n unlockingScriptLength: xi.unlockingScriptLength\n })),\n fixedOutputs: ctx.xoutputs.map(xo => ({\n satoshis: xo.satoshis,\n lockingScriptLength: xo.lockingScript.length / 2\n })),\n feeModel: ctx.feeModel,\n changeInitialSatoshis: ctx.changeBasket.minimumDesiredUTXOValue,\n changeFirstSatoshis: Math.max(1, Math.round(ctx.changeBasket.minimumDesiredUTXOValue / 4)),\n changeLockingScriptLength: 25,\n changeUnlockingScriptLength: 107,\n targetNetCount: ctx.changeBasket.numberOfDesiredUTXOs - ctx.availableChangeCount,\n randomVals: vargs.randomVals\n };\n const noSendChange = [...ctx.noSendChangeIn];\n const outputs = {};\n const allocateChangeInput = async (targetSatoshis, exactSatoshis) => {\n // noSendChange gets allocated first...typically only one input...just allocate in order...\n if (noSendChange.length > 0) {\n const o = noSendChange.pop();\n outputs[o.outputId] = o;\n // allocate the output in storage, noSendChange is by definition spendable false and part of noSpend transaction batch.\n await storage.updateOutput(o.outputId, {\n spendable: false,\n spentBy: ctx.transactionId\n });\n o.spendable = false;\n o.spentBy = ctx.transactionId;\n const r = {\n outputId: o.outputId,\n satoshis: o.satoshis\n };\n return r;\n }\n const basketId = ctx.changeBasket.basketId;\n const o = await storage.allocateChangeInput(userId, basketId, targetSatoshis, exactSatoshis, !vargs.isDelayed, ctx.transactionId);\n if (!o)\n return undefined;\n outputs[o.outputId] = o;\n const r = {\n outputId: o.outputId,\n satoshis: o.satoshis\n };\n return r;\n };\n const releaseChangeInput = async (outputId) => {\n const nsco = ctx.noSendChangeIn.find(o => o.outputId === outputId);\n if (nsco) {\n noSendChange.push(nsco);\n return;\n }\n await storage.updateOutput(outputId, {\n spendable: true,\n spentBy: undefined\n });\n };\n const gcr = await (0, generateChange_1.generateChangeSdk)(params, allocateChangeInput, releaseChangeInput);\n const nextRandomVal = () => {\n let val = 0;\n if (!vargs.randomVals || vargs.randomVals.length === 0) {\n val = Math.random();\n }\n else {\n val = vargs.randomVals.shift() || 0;\n vargs.randomVals.push(val);\n }\n return val;\n };\n /**\n * @returns a random integer betweenn min and max, inclussive.\n */\n const rand = (min, max) => {\n if (max < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('max', `less than min (${min}). max is (${max})`);\n return Math.floor(nextRandomVal() * (max - min + 1) + min);\n };\n const randomDerivation = (count) => {\n let val = [];\n if (!vargs.randomVals || vargs.randomVals.length === 0) {\n val = (0, sdk_1.Random)(count);\n }\n else {\n for (let i = 0; i < count; i++)\n val.push(rand(0, 255));\n }\n return sdk_1.Utils.toBase64(val);\n };\n // Generate a derivation prefix for the payment\n const derivationPrefix = randomDerivation(16);\n const r = {\n maxPossibleSatoshisAdjustment: gcr.maxPossibleSatoshisAdjustment,\n allocatedChange: gcr.allocatedChangeInputs.map(i => outputs[i.outputId]),\n changeOutputs: gcr.changeOutputs.map((o, i) => ({\n // what we knnow now and can insert into the database for this new transaction's change output\n created_at: new Date(),\n updated_at: new Date(),\n outputId: 0,\n userId,\n transactionId: ctx.transactionId,\n vout: params.fixedOutputs.length + i,\n satoshis: o.satoshis,\n basketId: ctx.changeBasket.basketId,\n spendable: false,\n change: true,\n type: 'P2PKH',\n derivationPrefix,\n derivationSuffix: randomDerivation(16),\n providedBy: 'storage',\n purpose: 'change',\n customInstructions: undefined,\n senderIdentityKey: undefined,\n outputDescription: '',\n // what will be known when transaction is signed\n txid: undefined,\n lockingScript: undefined,\n // when this output gets spent\n spentBy: undefined,\n spendingDescription: undefined\n })),\n derivationPrefix\n };\n return r;\n}\n/**\n * Avoid returning any known raw transaction data by converting any known transaction\n * in the `beef` to txidOnly.\n * @returns undefined if `vargs.options.returnTXIDOnly` or trimmed `Beef`\n */\nfunction trimInputBeef(beef, vargs) {\n if (vargs.options.returnTXIDOnly)\n return undefined;\n const knownTxids = {};\n for (const txid of vargs.options.knownTxids)\n knownTxids[txid] = true;\n for (const txid of beef.txs.map(btx => btx.txid))\n if (knownTxids[txid])\n beef.makeTxidOnly(txid);\n return beef.toBinary();\n}\nasync function mergeAllocatedChangeBeefs(storage, userId, vargs, allocatedChange, beef) {\n const options = {\n trustSelf: undefined,\n knownTxids: vargs.options.knownTxids,\n mergeToBeef: beef,\n ignoreStorage: false,\n ignoreServices: true,\n ignoreNewProven: false,\n minProofLevel: undefined\n };\n if (vargs.options.returnTXIDOnly)\n return undefined;\n for (const o of allocatedChange) {\n if (!beef.findTxid(o.txid) && !vargs.options.knownTxids.find(txid => txid === o.txid)) {\n await storage.getBeefForTransaction(o.txid, options);\n }\n }\n return trimInputBeef(beef, vargs);\n}\nfunction keyOffsetToHashedSecret(pub, keyOffset) {\n let offset;\n if (keyOffset !== undefined && typeof keyOffset === 'string') {\n if (keyOffset.length === 64)\n offset = sdk_1.PrivateKey.fromString(keyOffset, 'hex');\n else\n offset = sdk_1.PrivateKey.fromWif(keyOffset);\n }\n else {\n offset = sdk_1.PrivateKey.fromRandom();\n keyOffset = offset.toWif();\n }\n const sharedSecret = pub.mul(offset).encode(true, undefined);\n const hashedSecret = (0, utilityHelpers_1.sha256Hash)(sharedSecret);\n return { hashedSecret: new sdk_1.BigNumber(hashedSecret), keyOffset };\n}\nfunction offsetPubKey(pubKey, keyOffset) {\n const pub = sdk_1.PublicKey.fromString(pubKey);\n const r = keyOffsetToHashedSecret(pub, keyOffset);\n // The hashed secret is multiplied by the generator point.\n const point = new sdk_1.Curve().g.mul(r.hashedSecret);\n // The resulting point is added to the recipient public key.\n const offsetPubKey = new sdk_1.PublicKey(pub.add(point));\n return { offsetPubKey: offsetPubKey.toString(), keyOffset: r.keyOffset };\n}\nfunction lockScriptWithKeyOffsetFromPubKey(pubKey, keyOffset) {\n const r = offsetPubKey(pubKey, keyOffset);\n const offsetPub = sdk_1.PublicKey.fromString(r.offsetPubKey);\n const hash = offsetPub.toHash();\n const script = new sdk_1.P2PKH().lock(hash).toHex();\n return { script, keyOffset: r.keyOffset };\n}\nfunction createStorageServiceChargeScript(pubKeyHex) {\n return lockScriptWithKeyOffsetFromPubKey(pubKeyHex);\n}\n//# sourceMappingURL=createAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/createAction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.setDisableDoubleSpendCheckForTest = setDisableDoubleSpendCheckForTest;\nexports.createAction = createAction;\nexports.offsetPubKey = offsetPubKey;\nexports.lockScriptWithKeyOffsetFromPubKey = lockScriptWithKeyOffsetFromPubKey;\nexports.createStorageServiceChargeScript = createStorageServiceChargeScript;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst generateChange_1 = __webpack_require__(/*! ./generateChange */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/generateChange.js\");\nlet disableDoubleSpendCheckForTest = true;\nfunction setDisableDoubleSpendCheckForTest(v) {\n disableDoubleSpendCheckForTest = v;\n}\nasync function createAction(storage, auth, vargs, originator) {\n //stampLog(vargs, `start storage createTransactionSdk`)\n if (!vargs.isNewTx)\n // The purpose of this function is to create the initial storage records associated\n // with a new transaction. It's an error if we have no new inputs or outputs...\n throw new index_client_1.sdk.WERR_INTERNAL();\n /**\n * Steps to create a transaction:\n * - Verify that all inputs either have proof in vargs.inputBEEF or that options.trustSelf === 'known' and input txid.vout are known valid to storage.\n * - Create a new transaction record with status 'unsigned' as the anchor for construction work and to new outputs.\n * - Create all transaction labels.\n * - Add new commission output\n * - Attempt to fund the transaction by allocating change outputs:\n * - As each change output is selected it is simultaneously locked.\n * - Create all new output, basket, tag records\n * - If requested, create result Beef with complete proofs for all inputs used\n * - Create result inputs with source locking scripts\n * - Create result outputs with new locking scripts.\n * - Create and return result.\n */\n const userId = auth.userId;\n const { storageBeef, beef, xinputs } = await validateRequiredInputs(storage, userId, vargs);\n const xoutputs = validateRequiredOutputs(storage, userId, vargs);\n const changeBasketName = 'default';\n const changeBasket = (0, index_client_1.verifyOne)(await storage.findOutputBaskets({\n partial: { userId, name: changeBasketName }\n }), `Invalid outputGeneration basket \"${changeBasketName}\"`);\n const noSendChangeIn = await validateNoSendChange(storage, userId, vargs, changeBasket);\n const availableChangeCount = await storage.countChangeInputs(userId, changeBasket.basketId, !vargs.isDelayed);\n const feeModel = (0, index_client_1.validateStorageFeeModel)(storage.feeModel);\n const newTx = await createNewTxRecord(storage, userId, vargs, storageBeef);\n const ctx = {\n xinputs,\n xoutputs,\n changeBasket,\n noSendChangeIn,\n availableChangeCount,\n feeModel,\n transactionId: newTx.transactionId\n };\n const { allocatedChange, changeOutputs, derivationPrefix, maxPossibleSatoshisAdjustment } = await fundNewTransactionSdk(storage, userId, vargs, ctx);\n if (maxPossibleSatoshisAdjustment) {\n const a = maxPossibleSatoshisAdjustment;\n if (ctx.xoutputs[a.fixedOutputIndex].satoshis !== generateChange_1.maxPossibleSatoshis)\n throw new index_client_1.sdk.WERR_INTERNAL();\n ctx.xoutputs[a.fixedOutputIndex].satoshis = a.satoshis;\n }\n // The satoshis of the transaction is the satoshis we get back in change minus the satoshis we spend.\n const satoshis = changeOutputs.reduce((a, e) => a + e.satoshis, 0) - allocatedChange.reduce((a, e) => a + e.satoshis, 0);\n await storage.updateTransaction(newTx.transactionId, { satoshis });\n const { outputs, changeVouts } = await createNewOutputs(storage, userId, vargs, ctx, changeOutputs);\n const inputBeef = await mergeAllocatedChangeBeefs(storage, userId, vargs, allocatedChange, beef);\n const inputs = await createNewInputs(storage, userId, vargs, ctx, allocatedChange);\n const r = {\n reference: newTx.reference,\n version: newTx.version,\n lockTime: newTx.lockTime,\n inputs,\n outputs,\n derivationPrefix,\n inputBeef,\n noSendChangeOutputVouts: vargs.isNoSend ? changeVouts : undefined\n };\n //stampLog(vargs, `end storage createTransactionSdk`)\n return r;\n}\nfunction makeDefaultOutput(userId, transactionId, satoshis, vout) {\n const now = new Date();\n const output = {\n created_at: now,\n updated_at: now,\n outputId: 0,\n userId,\n transactionId,\n satoshis: satoshis,\n vout,\n basketId: undefined,\n change: false,\n customInstructions: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined,\n outputDescription: '',\n lockingScript: undefined,\n providedBy: 'you',\n purpose: '',\n senderIdentityKey: undefined,\n spendable: true,\n spendingDescription: undefined,\n spentBy: undefined,\n txid: undefined,\n type: ''\n };\n return output;\n}\nasync function createNewInputs(storage, userId, vargs, ctx, allocatedChange) {\n const r = [];\n const newInputs = [];\n for (const i of ctx.xinputs) {\n const o = i.output;\n newInputs.push({ i, o });\n if (o) {\n await storage.transaction(async (trx) => {\n const o2 = (0, index_client_1.verifyOne)(await storage.findOutputs({ partial: { outputId: o.outputId }, trx }));\n if (o2.spendable != true || o2.spentBy !== undefined)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`inputs[${i.vin}]`, `spendable output. output ${o.txid}:${o.vout} appears to have been spent.`);\n await storage.updateOutput(o.outputId, {\n spendable: false,\n spentBy: ctx.transactionId,\n spendingDescription: i.inputDescription\n }, trx);\n });\n }\n }\n for (const o of allocatedChange) {\n newInputs.push({ o, unlockLen: 107 });\n }\n let vin = -1;\n for (const { i, o, unlockLen } of newInputs) {\n vin++;\n if (o) {\n if (!i && !unlockLen)\n throw new index_client_1.sdk.WERR_INTERNAL(`vin ${vin} non-fixedInput without unlockLen`);\n const sourceTransaction = vargs.includeAllSourceTransactions && vargs.isSignAction\n ? await storage.getRawTxOfKnownValidTransaction(o.txid)\n : undefined;\n const ri = {\n vin,\n sourceTxid: o.txid,\n sourceVout: o.vout,\n sourceSatoshis: o.satoshis,\n sourceLockingScript: (0, index_client_1.asString)(o.lockingScript),\n sourceTransaction,\n unlockingScriptLength: unlockLen ? unlockLen : i.unlockingScriptLength,\n providedBy: i && o.providedBy === 'storage' ? 'you-and-storage' : o.providedBy,\n type: o.type,\n spendingDescription: o.spendingDescription || undefined,\n derivationPrefix: o.derivationPrefix || undefined,\n derivationSuffix: o.derivationSuffix || undefined,\n senderIdentityKey: o.senderIdentityKey || undefined\n };\n r.push(ri);\n }\n else {\n if (!i)\n throw new index_client_1.sdk.WERR_INTERNAL(`vin ${vin} without output or xinput`);\n // user specified input with no corresponding output being spent.\n const ri = {\n vin,\n sourceTxid: i.outpoint.txid,\n sourceVout: i.outpoint.vout,\n sourceSatoshis: i.satoshis,\n sourceLockingScript: i.lockingScript.toHex(),\n unlockingScriptLength: i.unlockingScriptLength,\n providedBy: 'you',\n type: 'custom',\n spendingDescription: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined,\n senderIdentityKey: undefined\n };\n r.push(ri);\n }\n }\n return r;\n}\nasync function createNewOutputs(storage, userId, vargs, ctx, changeOutputs) {\n var _a;\n const outputs = [];\n // Lookup output baskets\n const txBaskets = {};\n for (const xo of ctx.xoutputs) {\n if (xo.basket !== undefined && !txBaskets[xo.basket])\n txBaskets[xo.basket] = await storage.findOrInsertOutputBasket(userId, xo.basket);\n }\n // Lookup output tags\n const txTags = {};\n for (const xo of ctx.xoutputs) {\n for (const tag of xo.tags) {\n txTags[tag] = await storage.findOrInsertOutputTag(userId, tag);\n }\n }\n const newOutputs = [];\n for (const xo of ctx.xoutputs) {\n const lockingScript = (0, index_client_1.asArray)(xo.lockingScript);\n if (xo.purpose === 'service-charge') {\n const now = new Date();\n await storage.insertCommission({\n userId,\n transactionId: ctx.transactionId,\n lockingScript,\n satoshis: xo.satoshis,\n isRedeemed: false,\n keyOffset: (0, index_client_1.verifyTruthy)(xo.keyOffset),\n created_at: now,\n updated_at: now,\n commissionId: 0\n });\n const o = makeDefaultOutput(userId, ctx.transactionId, xo.satoshis, xo.vout);\n o.lockingScript = lockingScript;\n o.providedBy = 'storage';\n o.purpose = 'storage-commission';\n o.type = 'custom';\n o.spendable = false;\n newOutputs.push({ o, tags: [] });\n }\n else {\n // The user wants tracking if they put their output in a basket\n const basketId = !xo.basket ? undefined : txBaskets[xo.basket].basketId;\n const o = makeDefaultOutput(userId, ctx.transactionId, xo.satoshis, xo.vout);\n o.lockingScript = lockingScript;\n o.basketId = basketId;\n o.customInstructions = xo.customInstructions;\n o.outputDescription = xo.outputDescription;\n o.providedBy = xo.providedBy;\n o.purpose = xo.purpose || '';\n o.type = 'custom';\n newOutputs.push({ o, tags: xo.tags });\n }\n }\n for (const o of changeOutputs) {\n o.spendable = true;\n newOutputs.push({ o, tags: [] });\n }\n if (vargs.options.randomizeOutputs) {\n const randomVals = [];\n const nextRandomVal = () => {\n let val = 0;\n if (!randomVals || randomVals.length === 0) {\n val = Math.random();\n }\n else {\n val = randomVals.shift() || 0;\n randomVals.push(val);\n }\n return val;\n };\n /** In-place array shuffle */\n const shuffleArray = (array) => {\n let currentIndex = array.length;\n let temporaryValue;\n let randomIndex;\n while (currentIndex !== 0) {\n randomIndex = Math.floor(nextRandomVal() * currentIndex);\n currentIndex -= 1;\n temporaryValue = array[currentIndex];\n array[currentIndex] = array[randomIndex];\n array[randomIndex] = temporaryValue;\n }\n return array;\n };\n let vout = -1;\n const newVouts = Array(newOutputs.length);\n for (let i = 0; i < newVouts.length; i++)\n newVouts[i] = i;\n shuffleArray(newVouts);\n for (const no of newOutputs) {\n vout++;\n if (no.o.vout !== vout)\n throw new index_client_1.sdk.WERR_INTERNAL(`new output ${vout} has out of order vout ${no.o.vout}`);\n no.o.vout = newVouts[vout];\n }\n }\n const changeVouts = [];\n for (const { o, tags } of newOutputs) {\n o.outputId = await storage.insertOutput(o);\n if (o.change && o.purpose === 'change' && o.providedBy === 'storage')\n changeVouts.push(o.vout);\n // Add tags to the output\n for (const tagName of tags) {\n const tag = txTags[tagName];\n await storage.findOrInsertOutputTagMap((0, index_client_1.verifyId)(o.outputId), (0, index_client_1.verifyId)(tag.outputTagId));\n }\n const ro = {\n vout: (0, index_client_1.verifyInteger)(o.vout),\n satoshis: (0, index_client_1.verifyTruthy)(o.satoshis),\n lockingScript: !o.lockingScript ? '' : (0, index_client_1.asString)(o.lockingScript),\n providedBy: (0, index_client_1.verifyTruthy)(o.providedBy),\n purpose: o.purpose || undefined,\n basket: (_a = Object.values(txBaskets).find(b => b.basketId === o.basketId)) === null || _a === void 0 ? void 0 : _a.name,\n tags: tags,\n outputDescription: o.outputDescription,\n derivationSuffix: o.derivationSuffix,\n customInstructions: o.customInstructions\n };\n outputs.push(ro);\n }\n return { outputs, changeVouts };\n}\nasync function createNewTxRecord(storage, userId, vargs, storageBeef) {\n const now = new Date();\n const newTx = {\n created_at: now,\n updated_at: now,\n transactionId: 0,\n version: vargs.version,\n lockTime: vargs.lockTime,\n status: 'unsigned',\n reference: (0, index_client_1.randomBytesBase64)(12),\n satoshis: 0, // updated after fundingTransaction\n userId,\n isOutgoing: true,\n inputBEEF: storageBeef.toBinary(),\n description: vargs.description,\n txid: undefined,\n rawTx: undefined\n };\n newTx.transactionId = await storage.insertTransaction(newTx);\n for (const label of vargs.labels) {\n const txLabel = await storage.findOrInsertTxLabel(userId, label);\n await storage.findOrInsertTxLabelMap((0, index_client_1.verifyId)(newTx.transactionId), (0, index_client_1.verifyId)(txLabel.txLabelId));\n }\n return newTx;\n}\n/**\n * Convert vargs.outputs:\n *\n * lockingScript: HexString\n * satoshis: SatoshiValue\n * outputDescription: DescriptionString5to50Bytes\n * basket?: BasketStringUnder300Bytes\n * customInstructions?: string\n * tags: BasketStringUnderBytes[]\n *\n * to XValidCreateActionOutput (which aims for sdk.StorageCreateTransactionSdkOutput)\n *\n * adds:\n * vout: number\n * providedBy: sdk.StorageProvidedBy\n * purpose?: string\n * derivationSuffix?: string\n * keyOffset?: string\n *\n * @param vargs\n * @returns xoutputs\n */\nfunction validateRequiredOutputs(storage, userId, vargs) {\n const xoutputs = [];\n let vout = -1;\n for (const output of vargs.outputs) {\n vout++;\n const xo = {\n ...output,\n vout,\n providedBy: 'you',\n purpose: undefined,\n derivationSuffix: undefined,\n keyOffset: undefined\n };\n xoutputs.push(xo);\n }\n if (storage.commissionSatoshis > 0 && storage.commissionPubKeyHex) {\n vout++;\n const { script, keyOffset } = createStorageServiceChargeScript(storage.commissionPubKeyHex);\n xoutputs.push({\n lockingScript: script,\n satoshis: storage.commissionSatoshis,\n outputDescription: 'Storage Service Charge',\n basket: undefined,\n tags: [],\n vout,\n providedBy: 'storage',\n purpose: 'service-charge',\n keyOffset\n });\n }\n return xoutputs;\n}\n/**\n * Verify that we are in posession of validity proof data for any inputs being proposed for a new transaction.\n *\n * `vargs.inputs` is the source of inputs.\n * `vargs.inputBEEF` may include new user supplied validity data.\n * 'vargs.options.trustSelf === 'known'` indicates whether we can rely on the storage database records.\n *\n * If there are no inputs, returns an empty `Beef`.\n *\n * Always pulls rawTx data into first level of validity chains so that parsed transaction data is available\n * and checks input sourceSatoshis as well as filling in input sourceLockingScript.\n *\n * This data may be pruned again before being returned to the user based on `vargs.options.knownTxids`.\n *\n * @param storage\n * @param userId\n * @param vargs\n * @returns {storageBeef} containing only validity proof data for only unknown required inputs.\n * @returns {beef} containing verified validity proof data for all required inputs.\n * @returns {xinputs} extended validated required inputs.\n */\nasync function validateRequiredInputs(storage, userId, vargs) {\n //stampLog(vargs, `start storage verifyInputBeef`)\n const beef = new sdk_1.Beef();\n if (vargs.inputs.length === 0)\n return { storageBeef: beef, beef, xinputs: [] };\n if (vargs.inputBEEF)\n beef.mergeBeef(vargs.inputBEEF);\n const xinputs = vargs.inputs.map((input, vin) => ({\n ...input,\n vin,\n satoshis: -1,\n lockingScript: new sdk_1.Script(),\n output: undefined\n }));\n const trustSelf = vargs.options.trustSelf === 'known';\n const inputTxids = {};\n for (const input of xinputs)\n inputTxids[input.outpoint.txid] = true;\n // Check beef from user that either there are no txidOnly entries,\n // or that we can trust storage data and it does indeed vouch\n // for any txidOnly entries\n for (const btx of beef.txs) {\n if (btx.isTxidOnly) {\n if (!trustSelf)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain complete proof data for ${btx.txid}`);\n if (!inputTxids[btx.txid]) {\n // inputTxids are checked next\n const isKnown = await storage.verifyKnownValidTransaction(btx.txid);\n if (!isKnown)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain complete proof data for unknown ${btx.txid}`);\n }\n }\n }\n // Make sure that there's an entry for all inputs txid values:\n for (const txid of Object.keys(inputTxids)) {\n let btx = beef.findTxid(txid);\n if (!btx && trustSelf) {\n if (await storage.verifyKnownValidTransaction(txid))\n btx = beef.mergeTxidOnly(txid);\n }\n if (!btx)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain proof data for possibly known ${txid}`);\n }\n if (!(await beef.verify(await storage.getServices().getChainTracker(), true))) {\n console.log(`verifyInputBeef failed, inputBEEF failed to verify.\\n${beef.toLogString()}\\n`);\n //console.log(`verifyInputBeef failed, inputBEEF failed to verify.\\n${stampLogFormat(vargs.log)}\\n${beef.toLogString()}\\n`)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('inputBEEF', 'valid Beef when factoring options.trustSelf');\n }\n // beef may now be trusted and has a BeefTx for every input txid.\n const storageBeef = beef.clone();\n for (const input of xinputs) {\n const { txid, vout } = input.outpoint;\n const output = (0, index_client_1.verifyOneOrNone)(await storage.findOutputs({ partial: { userId, txid, vout } }));\n if (output) {\n if (output.change) {\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`inputs[${input.vin}]`, 'an unmanaged input. Change outputs are managed by your wallet.');\n }\n input.output = output;\n if (!Array.isArray(output.lockingScript) || !Number.isInteger(output.satoshis))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`${txid}.${vout}`, 'output with valid lockingScript and satoshis');\n if (!disableDoubleSpendCheckForTest && !output.spendable && !vargs.isNoSend)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`${txid}.${vout}`, 'spendable output unless noSend is true');\n // input is spending an existing user output which has an lockingScript\n input.satoshis = (0, index_client_1.verifyNumber)(output.satoshis);\n input.lockingScript = sdk_1.Script.fromBinary((0, index_client_1.asArray)(output.lockingScript));\n }\n else {\n let btx = beef.findTxid(txid);\n if (btx.isTxidOnly) {\n const { rawTx, proven } = await storage.getProvenOrRawTx(txid);\n //stampLog(vargs, `... storage verifyInputBeef getProvenOrRawTx ${txid} ${proven ? 'proven' : rawTx ? 'rawTx' : 'unknown'}`)\n if (!rawTx)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('inputBEEF', `valid and contain proof data for ${txid}`);\n btx = beef.mergeRawTx((0, index_client_1.asArray)(rawTx));\n if (proven)\n beef.mergeBump(new index_client_1.EntityProvenTx(proven).getMerklePath());\n }\n // btx is valid has parsed transaction data.\n if (vout >= btx.tx.outputs.length)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`${txid}.${vout}`, 'valid outpoint');\n const so = btx.tx.outputs[vout];\n input.satoshis = (0, index_client_1.verifyTruthy)(so.satoshis);\n input.lockingScript = so.lockingScript;\n }\n }\n return { beef, storageBeef, xinputs };\n}\nasync function validateNoSendChange(storage, userId, vargs, changeBasket) {\n const r = [];\n if (!vargs.isNoSend)\n return [];\n const noSendChange = vargs.options.noSendChange;\n if (noSendChange && noSendChange.length > 0) {\n for (const op of noSendChange) {\n const output = (0, index_client_1.verifyOneOrNone)(await storage.findOutputs({\n partial: { userId, txid: op.txid, vout: op.vout }\n }));\n // noSendChange is not marked spendable until sent, may not already be spent, and must have a valid greater than zero satoshis\n if (!output ||\n output.providedBy !== 'storage' ||\n output.purpose !== 'change' ||\n output.spendable === false ||\n Number.isInteger(output.spentBy) ||\n !(0, index_client_1.verifyNumber)(output.satoshis) ||\n output.basketId !== changeBasket.basketId)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('noSendChange outpoint', 'valid');\n if (-1 < r.findIndex(o => o.outputId === output.outputId))\n // noSendChange duplicate OutPoints are not allowed.\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('noSendChange outpoint', 'unique. Duplicates are not allowed.');\n r.push(output);\n }\n }\n return r;\n}\nasync function fundNewTransactionSdk(storage, userId, vargs, ctx) {\n const params = {\n fixedInputs: ctx.xinputs.map(xi => ({\n satoshis: xi.satoshis,\n unlockingScriptLength: xi.unlockingScriptLength\n })),\n fixedOutputs: ctx.xoutputs.map(xo => ({\n satoshis: xo.satoshis,\n lockingScriptLength: xo.lockingScript.length / 2\n })),\n feeModel: ctx.feeModel,\n changeInitialSatoshis: ctx.changeBasket.minimumDesiredUTXOValue,\n changeFirstSatoshis: Math.max(1, Math.round(ctx.changeBasket.minimumDesiredUTXOValue / 4)),\n changeLockingScriptLength: 25,\n changeUnlockingScriptLength: 107,\n targetNetCount: ctx.changeBasket.numberOfDesiredUTXOs - ctx.availableChangeCount,\n randomVals: vargs.randomVals\n };\n const noSendChange = [...ctx.noSendChangeIn];\n const outputs = {};\n const allocateChangeInput = async (targetSatoshis, exactSatoshis) => {\n // noSendChange gets allocated first...typically only one input...just allocate in order...\n if (noSendChange.length > 0) {\n const o = noSendChange.pop();\n outputs[o.outputId] = o;\n // allocate the output in storage, noSendChange is by definition spendable false and part of noSpend transaction batch.\n await storage.updateOutput(o.outputId, {\n spendable: false,\n spentBy: ctx.transactionId\n });\n o.spendable = false;\n o.spentBy = ctx.transactionId;\n const r = {\n outputId: o.outputId,\n satoshis: o.satoshis\n };\n return r;\n }\n const basketId = ctx.changeBasket.basketId;\n const o = await storage.allocateChangeInput(userId, basketId, targetSatoshis, exactSatoshis, !vargs.isDelayed, ctx.transactionId);\n if (!o)\n return undefined;\n outputs[o.outputId] = o;\n const r = {\n outputId: o.outputId,\n satoshis: o.satoshis\n };\n return r;\n };\n const releaseChangeInput = async (outputId) => {\n const nsco = ctx.noSendChangeIn.find(o => o.outputId === outputId);\n if (nsco) {\n noSendChange.push(nsco);\n return;\n }\n await storage.updateOutput(outputId, {\n spendable: true,\n spentBy: undefined\n });\n };\n const gcr = await (0, generateChange_1.generateChangeSdk)(params, allocateChangeInput, releaseChangeInput);\n const nextRandomVal = () => {\n let val = 0;\n if (!vargs.randomVals || vargs.randomVals.length === 0) {\n val = Math.random();\n }\n else {\n val = vargs.randomVals.shift() || 0;\n vargs.randomVals.push(val);\n }\n return val;\n };\n /**\n * @returns a random integer betweenn min and max, inclussive.\n */\n const rand = (min, max) => {\n if (max < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('max', `less than min (${min}). max is (${max})`);\n return Math.floor(nextRandomVal() * (max - min + 1) + min);\n };\n const randomDerivation = (count) => {\n let val = [];\n if (!vargs.randomVals || vargs.randomVals.length === 0) {\n val = (0, sdk_1.Random)(count);\n }\n else {\n for (let i = 0; i < count; i++)\n val.push(rand(0, 255));\n }\n return sdk_1.Utils.toBase64(val);\n };\n // Generate a derivation prefix for the payment\n const derivationPrefix = randomDerivation(16);\n const r = {\n maxPossibleSatoshisAdjustment: gcr.maxPossibleSatoshisAdjustment,\n allocatedChange: gcr.allocatedChangeInputs.map(i => outputs[i.outputId]),\n changeOutputs: gcr.changeOutputs.map((o, i) => ({\n // what we knnow now and can insert into the database for this new transaction's change output\n created_at: new Date(),\n updated_at: new Date(),\n outputId: 0,\n userId,\n transactionId: ctx.transactionId,\n vout: params.fixedOutputs.length + i,\n satoshis: o.satoshis,\n basketId: ctx.changeBasket.basketId,\n spendable: false,\n change: true,\n type: 'P2PKH',\n derivationPrefix,\n derivationSuffix: randomDerivation(16),\n providedBy: 'storage',\n purpose: 'change',\n customInstructions: undefined,\n senderIdentityKey: undefined,\n outputDescription: '',\n // what will be known when transaction is signed\n txid: undefined,\n lockingScript: undefined,\n // when this output gets spent\n spentBy: undefined,\n spendingDescription: undefined\n })),\n derivationPrefix\n };\n return r;\n}\n/**\n * Avoid returning any known raw transaction data by converting any known transaction\n * in the `beef` to txidOnly.\n * @returns undefined if `vargs.options.returnTXIDOnly` or trimmed `Beef`\n */\nfunction trimInputBeef(beef, vargs) {\n if (vargs.options.returnTXIDOnly)\n return undefined;\n const knownTxids = {};\n for (const txid of vargs.options.knownTxids)\n knownTxids[txid] = true;\n for (const txid of beef.txs.map(btx => btx.txid))\n if (knownTxids[txid])\n beef.makeTxidOnly(txid);\n return beef.toBinary();\n}\nasync function mergeAllocatedChangeBeefs(storage, userId, vargs, allocatedChange, beef) {\n const options = {\n trustSelf: undefined,\n knownTxids: vargs.options.knownTxids,\n mergeToBeef: beef,\n ignoreStorage: false,\n ignoreServices: true,\n ignoreNewProven: false,\n minProofLevel: undefined\n };\n if (vargs.options.returnTXIDOnly)\n return undefined;\n for (const o of allocatedChange) {\n if (!beef.findTxid(o.txid) && !vargs.options.knownTxids.find(txid => txid === o.txid)) {\n await storage.getBeefForTransaction(o.txid, options);\n }\n }\n return trimInputBeef(beef, vargs);\n}\nfunction keyOffsetToHashedSecret(pub, keyOffset) {\n let offset;\n if (keyOffset !== undefined && typeof keyOffset === 'string') {\n if (keyOffset.length === 64)\n offset = sdk_1.PrivateKey.fromString(keyOffset, 'hex');\n else\n offset = sdk_1.PrivateKey.fromWif(keyOffset);\n }\n else {\n offset = sdk_1.PrivateKey.fromRandom();\n keyOffset = offset.toWif();\n }\n const sharedSecret = pub.mul(offset).encode(true, undefined);\n const hashedSecret = (0, index_client_1.sha256Hash)(sharedSecret);\n return { hashedSecret: new sdk_1.BigNumber(hashedSecret), keyOffset };\n}\nfunction offsetPubKey(pubKey, keyOffset) {\n const pub = sdk_1.PublicKey.fromString(pubKey);\n const r = keyOffsetToHashedSecret(pub, keyOffset);\n // The hashed secret is multiplied by the generator point.\n const point = new sdk_1.Curve().g.mul(r.hashedSecret);\n // The resulting point is added to the recipient public key.\n const offsetPubKey = new sdk_1.PublicKey(pub.add(point));\n return { offsetPubKey: offsetPubKey.toString(), keyOffset: r.keyOffset };\n}\nfunction lockScriptWithKeyOffsetFromPubKey(pubKey, keyOffset) {\n const r = offsetPubKey(pubKey, keyOffset);\n const offsetPub = sdk_1.PublicKey.fromString(r.offsetPubKey);\n const hash = offsetPub.toHash();\n const script = new sdk_1.P2PKH().lock(hash).toHex();\n return { script, keyOffset: r.keyOffset };\n}\nfunction createStorageServiceChargeScript(pubKeyHex) {\n return lockScriptWithKeyOffsetFromPubKey(pubKeyHex);\n}\n//# sourceMappingURL=createAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/createAction.js?\n}"); /***/ }), @@ -3696,7 +3531,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.maxPossibleSatoshis = void 0;\nexports.generateChangeSdk = generateChangeSdk;\nexports.validateGenerateChangeSdkResult = validateGenerateChangeSdkResult;\nexports.validateGenerateChangeSdkParams = validateGenerateChangeSdkParams;\nexports.generateChangeSdkMakeStorage = generateChangeSdkMakeStorage;\nconst validationHelpers_1 = __webpack_require__(/*! ../../sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst StorageProvider_1 = __webpack_require__(/*! ../StorageProvider */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/StorageProvider.js\");\nconst utils_1 = __webpack_require__(/*! ./utils */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/utils.js\");\n/**\n * An output of this satoshis amount will be adjusted to the largest fundable amount.\n */\nexports.maxPossibleSatoshis = 2099999999999999;\n/**\n * Simplifications:\n * - only support one change type with fixed length scripts.\n * - only support satsPerKb fee model.\n *\n * Confirms for each availbleChange output that it remains available as they are allocated and selects alternate if not.\n *\n * @param params\n * @returns\n */\nasync function generateChangeSdk(params, allocateChangeInput, releaseChangeInput) {\n if (params.noLogging === false)\n logGenerateChangeSdkParams(params);\n const r = {\n allocatedChangeInputs: [],\n changeOutputs: [],\n size: 0,\n fee: 0,\n satsPerKb: 0\n };\n // eslint-disable-next-line no-useless-catch\n try {\n const vgcpr = validateGenerateChangeSdkParams(params);\n const satsPerKb = params.feeModel.value || 0;\n const randomVals = [...(params.randomVals || [])];\n const randomValsUsed = [];\n const nextRandomVal = () => {\n let val = 0;\n if (!randomVals || randomVals.length === 0) {\n val = Math.random();\n }\n else {\n val = randomVals.shift() || 0;\n randomVals.push(val);\n }\n // Capture random sequence used if not supplied\n randomValsUsed.push(val);\n return val;\n };\n /**\n * @returns a random integer betweenn min and max, inclussive.\n */\n const rand = (min, max) => {\n if (max < min)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('max', `less than min (${min}). max is (${max})`);\n return Math.floor(nextRandomVal() * (max - min + 1) + min);\n };\n const fixedInputs = params.fixedInputs;\n const fixedOutputs = params.fixedOutputs;\n /**\n * @returns sum of transaction fixedInputs satoshis and fundingInputs satoshis\n */\n const funding = () => {\n return (fixedInputs.reduce((a, e) => a + e.satoshis, 0) + r.allocatedChangeInputs.reduce((a, e) => a + e.satoshis, 0));\n };\n /**\n * @returns sum of transaction fixedOutputs satoshis\n */\n const spending = () => {\n return fixedOutputs.reduce((a, e) => a + e.satoshis, 0);\n };\n /**\n * @returns sum of transaction changeOutputs satoshis\n */\n const change = () => {\n return r.changeOutputs.reduce((a, e) => a + e.satoshis, 0);\n };\n const fee = () => funding() - spending() - change();\n const size = (addedChangeInputs, addedChangeOutputs) => {\n const inputScriptLengths = [\n ...fixedInputs.map(x => x.unlockingScriptLength),\n ...Array(r.allocatedChangeInputs.length + (addedChangeInputs || 0)).fill(params.changeUnlockingScriptLength)\n ];\n const outputScriptLengths = [\n ...fixedOutputs.map(x => x.lockingScriptLength),\n ...Array(r.changeOutputs.length + (addedChangeOutputs || 0)).fill(params.changeLockingScriptLength)\n ];\n const size = (0, utils_1.transactionSize)(inputScriptLengths, outputScriptLengths);\n return size;\n };\n /**\n * @returns the target fee required for the transaction as currently configured under feeModel.\n */\n const feeTarget = (addedChangeInputs, addedChangeOutputs) => {\n const fee = Math.ceil((size(addedChangeInputs, addedChangeOutputs) / 1000) * satsPerKb);\n return fee;\n };\n /**\n * @returns the current excess fee for the transaction as currently configured.\n *\n * This is funding() - spending() - change() - feeTarget()\n *\n * The goal is an excess fee of zero.\n *\n * A positive value is okay if the cost of an additional change output is greater.\n *\n * A negative value means the transaction is under funded, or over spends, and may be rejected.\n */\n const feeExcess = (addedChangeInputs, addedChangeOutputs) => {\n const fe = funding() - spending() - change() - feeTarget(addedChangeInputs, addedChangeOutputs);\n if (!addedChangeInputs && !addedChangeOutputs)\n feeExcessNow = fe;\n return fe;\n };\n // The most recent feeExcess()\n let feeExcessNow = 0;\n feeExcess();\n const hasTargetNetCount = params.targetNetCount !== undefined;\n const targetNetCount = params.targetNetCount || 0;\n // current net change in count of change outputs\n const netChangeCount = () => {\n return r.changeOutputs.length - r.allocatedChangeInputs.length;\n };\n const addOutputToBalanceNewInput = () => {\n if (!hasTargetNetCount)\n return false;\n return netChangeCount() - 1 < targetNetCount;\n };\n const releaseAllocatedChangeInputs = async () => {\n while (r.allocatedChangeInputs.length > 0) {\n const i = r.allocatedChangeInputs.pop();\n if (i) {\n await releaseChangeInput(i.outputId);\n }\n }\n feeExcessNow = feeExcess();\n };\n // If we'd like to have more change outputs create them now.\n // They may be removed if it turns out we can't fund them.\n while ((hasTargetNetCount && targetNetCount > netChangeCount()) ||\n (r.changeOutputs.length === 0 && feeExcess() > 0)) {\n r.changeOutputs.push({\n satoshis: r.changeOutputs.length === 0 ? params.changeFirstSatoshis : params.changeInitialSatoshis,\n lockingScriptLength: params.changeLockingScriptLength\n });\n }\n const fundTransaction = async () => {\n let removingOutputs = false;\n const attemptToFundTransaction = async () => {\n if (feeExcess() > 0)\n return true;\n let exactSatoshis = undefined;\n if (!hasTargetNetCount && r.changeOutputs.length === 0) {\n exactSatoshis = -feeExcess(1);\n }\n const ao = addOutputToBalanceNewInput() ? 1 : 0;\n const targetSatoshis = -feeExcess(1, ao) + (ao === 1 ? 2 * params.changeInitialSatoshis : 0);\n const allocatedChangeInput = await allocateChangeInput(targetSatoshis, exactSatoshis);\n if (!allocatedChangeInput) {\n // Unable to add another funding change input\n return false;\n }\n r.allocatedChangeInputs.push(allocatedChangeInput);\n if (!removingOutputs && feeExcess() > 0) {\n if (ao == 1 || r.changeOutputs.length === 0) {\n r.changeOutputs.push({\n satoshis: Math.min(feeExcess(), r.changeOutputs.length === 0 ? params.changeFirstSatoshis : params.changeInitialSatoshis),\n lockingScriptLength: params.changeLockingScriptLength\n });\n }\n }\n return true;\n };\n for (;;) {\n // This is the starvation loop, drops change outputs one at a time if unable to fund them...\n await releaseAllocatedChangeInputs();\n while (feeExcess() < 0) {\n // This is the funding loop, add one change input at a time...\n const ok = await attemptToFundTransaction();\n if (!ok)\n break;\n }\n // Done if blanced overbalanced or impossible (all funding applied, all change outputs removed).\n if (feeExcess() >= 0 || r.changeOutputs.length === 0)\n break;\n removingOutputs = true;\n while (r.changeOutputs.length > 0 && feeExcess() < 0) {\n r.changeOutputs.pop();\n }\n if (feeExcess() < 0)\n // Not enough available funding even if no change outputs\n break;\n // At this point we have a funded transaction, but there may be change outputs that are each costing as change input,\n // resulting in pointless churn of change outputs.\n // And remove change inputs that funded only a single change output (along with that output)...\n const changeInputs = [...r.allocatedChangeInputs];\n while (changeInputs.length > 1 && r.changeOutputs.length > 1) {\n const lastOutput = r.changeOutputs.slice(-1)[0];\n const i = changeInputs.findIndex(ci => ci.satoshis <= lastOutput.satoshis);\n if (i < 0)\n break;\n r.changeOutputs.pop();\n changeInputs.splice(i, 1);\n }\n // and try again...\n }\n };\n /**\n * Add funding to achieve a non-negative feeExcess value, if necessary.\n */\n await fundTransaction();\n if (feeExcess() < 0 && vgcpr.hasMaxPossibleOutput !== undefined) {\n // Reduce the fixed output with satoshis of maxPossibleSatoshis to what will just fund the transaction...\n if (fixedOutputs[vgcpr.hasMaxPossibleOutput].satoshis !== exports.maxPossibleSatoshis)\n throw new WERR_errors_1.WERR_INTERNAL();\n fixedOutputs[vgcpr.hasMaxPossibleOutput].satoshis += feeExcess();\n r.maxPossibleSatoshisAdjustment = {\n fixedOutputIndex: vgcpr.hasMaxPossibleOutput,\n satoshis: fixedOutputs[vgcpr.hasMaxPossibleOutput].satoshis\n };\n }\n /**\n * Trigger an account funding event if we don't have enough to cover this transaction.\n */\n if (feeExcess() < 0) {\n await releaseAllocatedChangeInputs();\n throw new WERR_errors_1.WERR_INSUFFICIENT_FUNDS(spending() + feeTarget(), -feeExcessNow);\n }\n /**\n * If needed, seek funding to avoid overspending on fees without a change output to recapture it.\n */\n if (r.changeOutputs.length === 0 && feeExcessNow > 0) {\n await releaseAllocatedChangeInputs();\n throw new WERR_errors_1.WERR_INSUFFICIENT_FUNDS(spending() + feeTarget(), params.changeFirstSatoshis);\n }\n /**\n * Distribute the excess fees across the changeOutputs added.\n */\n while (r.changeOutputs.length > 0 && feeExcessNow > 0) {\n if (r.changeOutputs.length === 1) {\n r.changeOutputs[0].satoshis += feeExcessNow;\n feeExcessNow = 0;\n }\n else if (r.changeOutputs[0].satoshis < params.changeInitialSatoshis) {\n const sats = Math.min(feeExcessNow, params.changeInitialSatoshis - r.changeOutputs[0].satoshis);\n feeExcessNow -= sats;\n r.changeOutputs[0].satoshis += sats;\n }\n else {\n // Distribute a random percentage between 25% and 50% but at least one satoshi\n const sats = Math.max(1, Math.floor((rand(2500, 5000) / 10000) * feeExcessNow));\n feeExcessNow -= sats;\n const index = rand(0, r.changeOutputs.length - 1);\n r.changeOutputs[index].satoshis += sats;\n }\n }\n r.size = size();\n ((r.fee = fee()), (r.satsPerKb = satsPerKb));\n const { ok, log } = validateGenerateChangeSdkResult(params, r);\n if (!ok) {\n throw new WERR_errors_1.WERR_INTERNAL(`generateChangeSdk error: ${log}`);\n }\n if (r.allocatedChangeInputs.length > 4 && r.changeOutputs.length > 4) {\n console.log('generateChangeSdk_Capture_too_many_ins_and_outs');\n logGenerateChangeSdkParams(params);\n }\n return r;\n }\n catch (eu) {\n const e = WalletError_1.WalletError.fromUnknown(eu);\n if (e.code === 'WERR_INSUFFICIENT_FUNDS')\n throw eu;\n // Capture the params in cloud run log which has a 100k text length limit per line.\n // logGenerateChangeSdkParams(params, eu)\n throw eu;\n }\n}\nfunction validateGenerateChangeSdkResult(params, r) {\n let ok = true;\n let log = '';\n const sumIn = params.fixedInputs.reduce((a, e) => a + e.satoshis, 0) + r.allocatedChangeInputs.reduce((a, e) => a + e.satoshis, 0);\n const sumOut = params.fixedOutputs.reduce((a, e) => a + e.satoshis, 0) + r.changeOutputs.reduce((a, e) => a + e.satoshis, 0);\n if (r.fee && Number.isInteger(r.fee) && r.fee < 0) {\n log += `basic fee error ${r.fee};`;\n ok = false;\n }\n const feePaid = sumIn - sumOut;\n if (feePaid !== r.fee) {\n log += `exact fee error ${feePaid} !== ${r.fee};`;\n ok = false;\n }\n const feeRequired = Math.ceil(((r.size || 0) / 1000) * (r.satsPerKb || 0));\n if (feeRequired !== r.fee) {\n log += `required fee error ${feeRequired} !== ${r.fee};`;\n ok = false;\n }\n return { ok, log };\n}\nfunction logGenerateChangeSdkParams(params, eu) {\n let s = JSON.stringify(params);\n console.log(`generateChangeSdk params length ${s.length}${eu ? ` error: ${eu}` : ''}`);\n let i = -1;\n const maxlen = 99900;\n for (;;) {\n i++;\n console.log(`generateChangeSdk params ${i} XXX${s.slice(0, maxlen)}XXX`);\n s = s.slice(maxlen);\n if (!s || i > 100)\n break;\n }\n}\nfunction validateGenerateChangeSdkParams(params) {\n if (!Array.isArray(params.fixedInputs))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('fixedInputs', 'an array of objects');\n const r = {};\n params.fixedInputs.forEach((x, i) => {\n (0, validationHelpers_1.validateSatoshis)(x.satoshis, `fixedInputs[${i}].satoshis`);\n (0, validationHelpers_1.validateInteger)(x.unlockingScriptLength, `fixedInputs[${i}].unlockingScriptLength`, undefined, 0);\n });\n if (!Array.isArray(params.fixedOutputs))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('fixedOutputs', 'an array of objects');\n params.fixedOutputs.forEach((x, i) => {\n (0, validationHelpers_1.validateSatoshis)(x.satoshis, `fixedOutputs[${i}].satoshis`);\n (0, validationHelpers_1.validateInteger)(x.lockingScriptLength, `fixedOutputs[${i}].lockingScriptLength`, undefined, 0);\n if (x.satoshis === exports.maxPossibleSatoshis) {\n if (r.hasMaxPossibleOutput !== undefined)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`fixedOutputs[${i}].satoshis`, `valid satoshis amount. Only one 'maxPossibleSatoshis' output allowed.`);\n r.hasMaxPossibleOutput = i;\n }\n });\n params.feeModel = (0, StorageProvider_1.validateStorageFeeModel)(params.feeModel);\n if (params.feeModel.model !== 'sat/kb')\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('feeModel.model', `'sat/kb'`);\n (0, validationHelpers_1.validateOptionalInteger)(params.targetNetCount, `targetNetCount`);\n (0, validationHelpers_1.validateSatoshis)(params.changeFirstSatoshis, 'changeFirstSatoshis', 1);\n (0, validationHelpers_1.validateSatoshis)(params.changeInitialSatoshis, 'changeInitialSatoshis', 1);\n (0, validationHelpers_1.validateInteger)(params.changeLockingScriptLength, `changeLockingScriptLength`);\n (0, validationHelpers_1.validateInteger)(params.changeUnlockingScriptLength, `changeUnlockingScriptLength`);\n return r;\n}\nfunction generateChangeSdkMakeStorage(availableChange) {\n const change = availableChange.map(c => ({\n ...c,\n spendable: true\n }));\n change.sort((a, b) => a.satoshis < b.satoshis\n ? -1\n : a.satoshis > b.satoshis\n ? 1\n : a.outputId < b.outputId\n ? -1\n : a.outputId > b.outputId\n ? 1\n : 0);\n let log = '';\n for (const c of change)\n log += `change ${c.satoshis} ${c.outputId}\\n`;\n const getLog = () => log;\n const allocate = (c) => {\n log += ` -> ${c.satoshis} sats, id ${c.outputId}\\n`;\n c.spendable = false;\n return c;\n };\n const allocateChangeInput = async (targetSatoshis, exactSatoshis) => {\n log += `allocate target ${targetSatoshis} exact ${exactSatoshis}`;\n if (exactSatoshis !== undefined) {\n const exact = change.find(c => c.spendable && c.satoshis === exactSatoshis);\n if (exact)\n return allocate(exact);\n }\n const over = change.find(c => c.spendable && c.satoshis >= targetSatoshis);\n if (over)\n return allocate(over);\n let under = undefined;\n for (let i = change.length - 1; i >= 0; i--) {\n if (change[i].spendable) {\n under = change[i];\n break;\n }\n }\n if (under)\n return allocate(under);\n log += `\\n`;\n return undefined;\n };\n const releaseChangeInput = async (outputId) => {\n log += `release id ${outputId}\\n`;\n const c = change.find(x => x.outputId === outputId);\n if (!c)\n throw new WERR_errors_1.WERR_INTERNAL(`unknown outputId ${outputId}`);\n if (c.spendable)\n throw new WERR_errors_1.WERR_INTERNAL(`release of spendable outputId ${outputId}`);\n c.spendable = true;\n };\n return { allocateChangeInput, releaseChangeInput, getLog };\n}\n//# sourceMappingURL=generateChange.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/generateChange.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.maxPossibleSatoshis = void 0;\nexports.generateChangeSdk = generateChangeSdk;\nexports.validateGenerateChangeSdkResult = validateGenerateChangeSdkResult;\nexports.validateGenerateChangeSdkParams = validateGenerateChangeSdkParams;\nexports.generateChangeSdkMakeStorage = generateChangeSdkMakeStorage;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst utils_1 = __webpack_require__(/*! ./utils */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/utils.js\");\n/**\n * An output of this satoshis amount will be adjusted to the largest fundable amount.\n */\nexports.maxPossibleSatoshis = 2099999999999999;\n/**\n * Simplifications:\n * - only support one change type with fixed length scripts.\n * - only support satsPerKb fee model.\n *\n * Confirms for each availbleChange output that it remains available as they are allocated and selects alternate if not.\n *\n * @param params\n * @returns\n */\nasync function generateChangeSdk(params, allocateChangeInput, releaseChangeInput) {\n if (params.noLogging === false)\n logGenerateChangeSdkParams(params);\n const r = {\n allocatedChangeInputs: [],\n changeOutputs: [],\n size: 0,\n fee: 0,\n satsPerKb: 0\n };\n // eslint-disable-next-line no-useless-catch\n try {\n const vgcpr = validateGenerateChangeSdkParams(params);\n const satsPerKb = params.feeModel.value || 0;\n const randomVals = [...(params.randomVals || [])];\n const randomValsUsed = [];\n const nextRandomVal = () => {\n let val = 0;\n if (!randomVals || randomVals.length === 0) {\n val = Math.random();\n }\n else {\n val = randomVals.shift() || 0;\n randomVals.push(val);\n }\n // Capture random sequence used if not supplied\n randomValsUsed.push(val);\n return val;\n };\n /**\n * @returns a random integer betweenn min and max, inclussive.\n */\n const rand = (min, max) => {\n if (max < min)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('max', `less than min (${min}). max is (${max})`);\n return Math.floor(nextRandomVal() * (max - min + 1) + min);\n };\n const fixedInputs = params.fixedInputs;\n const fixedOutputs = params.fixedOutputs;\n /**\n * @returns sum of transaction fixedInputs satoshis and fundingInputs satoshis\n */\n const funding = () => {\n return (fixedInputs.reduce((a, e) => a + e.satoshis, 0) + r.allocatedChangeInputs.reduce((a, e) => a + e.satoshis, 0));\n };\n /**\n * @returns sum of transaction fixedOutputs satoshis\n */\n const spending = () => {\n return fixedOutputs.reduce((a, e) => a + e.satoshis, 0);\n };\n /**\n * @returns sum of transaction changeOutputs satoshis\n */\n const change = () => {\n return r.changeOutputs.reduce((a, e) => a + e.satoshis, 0);\n };\n const fee = () => funding() - spending() - change();\n const size = (addedChangeInputs, addedChangeOutputs) => {\n const inputScriptLengths = [\n ...fixedInputs.map(x => x.unlockingScriptLength),\n ...Array(r.allocatedChangeInputs.length + (addedChangeInputs || 0)).fill(params.changeUnlockingScriptLength)\n ];\n const outputScriptLengths = [\n ...fixedOutputs.map(x => x.lockingScriptLength),\n ...Array(r.changeOutputs.length + (addedChangeOutputs || 0)).fill(params.changeLockingScriptLength)\n ];\n const size = (0, utils_1.transactionSize)(inputScriptLengths, outputScriptLengths);\n return size;\n };\n /**\n * @returns the target fee required for the transaction as currently configured under feeModel.\n */\n const feeTarget = (addedChangeInputs, addedChangeOutputs) => {\n const fee = Math.ceil((size(addedChangeInputs, addedChangeOutputs) / 1000) * satsPerKb);\n return fee;\n };\n /**\n * @returns the current excess fee for the transaction as currently configured.\n *\n * This is funding() - spending() - change() - feeTarget()\n *\n * The goal is an excess fee of zero.\n *\n * A positive value is okay if the cost of an additional change output is greater.\n *\n * A negative value means the transaction is under funded, or over spends, and may be rejected.\n */\n const feeExcess = (addedChangeInputs, addedChangeOutputs) => {\n const fe = funding() - spending() - change() - feeTarget(addedChangeInputs, addedChangeOutputs);\n if (!addedChangeInputs && !addedChangeOutputs)\n feeExcessNow = fe;\n return fe;\n };\n // The most recent feeExcess()\n let feeExcessNow = 0;\n feeExcess();\n const hasTargetNetCount = params.targetNetCount !== undefined;\n const targetNetCount = params.targetNetCount || 0;\n // current net change in count of change outputs\n const netChangeCount = () => {\n return r.changeOutputs.length - r.allocatedChangeInputs.length;\n };\n const addOutputToBalanceNewInput = () => {\n if (!hasTargetNetCount)\n return false;\n return netChangeCount() - 1 < targetNetCount;\n };\n const releaseAllocatedChangeInputs = async () => {\n while (r.allocatedChangeInputs.length > 0) {\n const i = r.allocatedChangeInputs.pop();\n if (i) {\n await releaseChangeInput(i.outputId);\n }\n }\n feeExcessNow = feeExcess();\n };\n // If we'd like to have more change outputs create them now.\n // They may be removed if it turns out we can't fund them.\n while ((hasTargetNetCount && targetNetCount > netChangeCount()) ||\n (r.changeOutputs.length === 0 && feeExcess() > 0)) {\n r.changeOutputs.push({\n satoshis: r.changeOutputs.length === 0 ? params.changeFirstSatoshis : params.changeInitialSatoshis,\n lockingScriptLength: params.changeLockingScriptLength\n });\n }\n const fundTransaction = async () => {\n let removingOutputs = false;\n const attemptToFundTransaction = async () => {\n if (feeExcess() > 0)\n return true;\n let exactSatoshis = undefined;\n if (!hasTargetNetCount && r.changeOutputs.length === 0) {\n exactSatoshis = -feeExcess(1);\n }\n const ao = addOutputToBalanceNewInput() ? 1 : 0;\n const targetSatoshis = -feeExcess(1, ao) + (ao === 1 ? 2 * params.changeInitialSatoshis : 0);\n const allocatedChangeInput = await allocateChangeInput(targetSatoshis, exactSatoshis);\n if (!allocatedChangeInput) {\n // Unable to add another funding change input\n return false;\n }\n r.allocatedChangeInputs.push(allocatedChangeInput);\n if (!removingOutputs && feeExcess() > 0) {\n if (ao == 1 || r.changeOutputs.length === 0) {\n r.changeOutputs.push({\n satoshis: Math.min(feeExcess(), r.changeOutputs.length === 0 ? params.changeFirstSatoshis : params.changeInitialSatoshis),\n lockingScriptLength: params.changeLockingScriptLength\n });\n }\n }\n return true;\n };\n for (;;) {\n // This is the starvation loop, drops change outputs one at a time if unable to fund them...\n await releaseAllocatedChangeInputs();\n while (feeExcess() < 0) {\n // This is the funding loop, add one change input at a time...\n const ok = await attemptToFundTransaction();\n if (!ok)\n break;\n }\n // Done if blanced overbalanced or impossible (all funding applied, all change outputs removed).\n if (feeExcess() >= 0 || r.changeOutputs.length === 0)\n break;\n removingOutputs = true;\n while (r.changeOutputs.length > 0 && feeExcess() < 0) {\n r.changeOutputs.pop();\n }\n if (feeExcess() < 0)\n // Not enough available funding even if no change outputs\n break;\n // At this point we have a funded transaction, but there may be change outputs that are each costing as change input,\n // resulting in pointless churn of change outputs.\n // And remove change inputs that funded only a single change output (along with that output)...\n const changeInputs = [...r.allocatedChangeInputs];\n while (changeInputs.length > 1 && r.changeOutputs.length > 1) {\n const lastOutput = r.changeOutputs.slice(-1)[0];\n const i = changeInputs.findIndex(ci => ci.satoshis <= lastOutput.satoshis);\n if (i < 0)\n break;\n r.changeOutputs.pop();\n changeInputs.splice(i, 1);\n }\n // and try again...\n }\n };\n /**\n * Add funding to achieve a non-negative feeExcess value, if necessary.\n */\n await fundTransaction();\n if (feeExcess() < 0 && vgcpr.hasMaxPossibleOutput !== undefined) {\n // Reduce the fixed output with satoshis of maxPossibleSatoshis to what will just fund the transaction...\n if (fixedOutputs[vgcpr.hasMaxPossibleOutput].satoshis !== exports.maxPossibleSatoshis)\n throw new index_client_1.sdk.WERR_INTERNAL();\n fixedOutputs[vgcpr.hasMaxPossibleOutput].satoshis += feeExcess();\n r.maxPossibleSatoshisAdjustment = {\n fixedOutputIndex: vgcpr.hasMaxPossibleOutput,\n satoshis: fixedOutputs[vgcpr.hasMaxPossibleOutput].satoshis\n };\n }\n /**\n * Trigger an account funding event if we don't have enough to cover this transaction.\n */\n if (feeExcess() < 0) {\n await releaseAllocatedChangeInputs();\n throw new index_client_1.sdk.WERR_INSUFFICIENT_FUNDS(spending() + feeTarget(), -feeExcessNow);\n }\n /**\n * If needed, seek funding to avoid overspending on fees without a change output to recapture it.\n */\n if (r.changeOutputs.length === 0 && feeExcessNow > 0) {\n await releaseAllocatedChangeInputs();\n throw new index_client_1.sdk.WERR_INSUFFICIENT_FUNDS(spending() + feeTarget(), params.changeFirstSatoshis);\n }\n /**\n * Distribute the excess fees across the changeOutputs added.\n */\n while (r.changeOutputs.length > 0 && feeExcessNow > 0) {\n if (r.changeOutputs.length === 1) {\n r.changeOutputs[0].satoshis += feeExcessNow;\n feeExcessNow = 0;\n }\n else if (r.changeOutputs[0].satoshis < params.changeInitialSatoshis) {\n const sats = Math.min(feeExcessNow, params.changeInitialSatoshis - r.changeOutputs[0].satoshis);\n feeExcessNow -= sats;\n r.changeOutputs[0].satoshis += sats;\n }\n else {\n // Distribute a random percentage between 25% and 50% but at least one satoshi\n const sats = Math.max(1, Math.floor((rand(2500, 5000) / 10000) * feeExcessNow));\n feeExcessNow -= sats;\n const index = rand(0, r.changeOutputs.length - 1);\n r.changeOutputs[index].satoshis += sats;\n }\n }\n r.size = size();\n (r.fee = fee()), (r.satsPerKb = satsPerKb);\n const { ok, log } = validateGenerateChangeSdkResult(params, r);\n if (!ok) {\n throw new index_client_1.sdk.WERR_INTERNAL(`generateChangeSdk error: ${log}`);\n }\n if (r.allocatedChangeInputs.length > 4 && r.changeOutputs.length > 4) {\n console.log('generateChangeSdk_Capture_too_many_ins_and_outs');\n logGenerateChangeSdkParams(params);\n }\n return r;\n }\n catch (eu) {\n const e = index_client_1.sdk.WalletError.fromUnknown(eu);\n if (e.code === 'WERR_INSUFFICIENT_FUNDS')\n throw eu;\n // Capture the params in cloud run log which has a 100k text length limit per line.\n // logGenerateChangeSdkParams(params, eu)\n throw eu;\n }\n}\nfunction validateGenerateChangeSdkResult(params, r) {\n let ok = true;\n let log = '';\n const sumIn = params.fixedInputs.reduce((a, e) => a + e.satoshis, 0) + r.allocatedChangeInputs.reduce((a, e) => a + e.satoshis, 0);\n const sumOut = params.fixedOutputs.reduce((a, e) => a + e.satoshis, 0) + r.changeOutputs.reduce((a, e) => a + e.satoshis, 0);\n if (r.fee && Number.isInteger(r.fee) && r.fee < 0) {\n log += `basic fee error ${r.fee};`;\n ok = false;\n }\n const feePaid = sumIn - sumOut;\n if (feePaid !== r.fee) {\n log += `exact fee error ${feePaid} !== ${r.fee};`;\n ok = false;\n }\n const feeRequired = Math.ceil(((r.size || 0) / 1000) * (r.satsPerKb || 0));\n if (feeRequired !== r.fee) {\n log += `required fee error ${feeRequired} !== ${r.fee};`;\n ok = false;\n }\n return { ok, log };\n}\nfunction logGenerateChangeSdkParams(params, eu) {\n let s = JSON.stringify(params);\n console.log(`generateChangeSdk params length ${s.length}${eu ? ` error: ${eu}` : ''}`);\n let i = -1;\n const maxlen = 99900;\n for (;;) {\n i++;\n console.log(`generateChangeSdk params ${i} XXX${s.slice(0, maxlen)}XXX`);\n s = s.slice(maxlen);\n if (!s || i > 100)\n break;\n }\n}\nfunction validateGenerateChangeSdkParams(params) {\n if (!Array.isArray(params.fixedInputs))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('fixedInputs', 'an array of objects');\n const r = {};\n params.fixedInputs.forEach((x, i) => {\n index_client_1.sdk.validateSatoshis(x.satoshis, `fixedInputs[${i}].satoshis`);\n index_client_1.sdk.validateInteger(x.unlockingScriptLength, `fixedInputs[${i}].unlockingScriptLength`, undefined, 0);\n });\n if (!Array.isArray(params.fixedOutputs))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('fixedOutputs', 'an array of objects');\n params.fixedOutputs.forEach((x, i) => {\n index_client_1.sdk.validateSatoshis(x.satoshis, `fixedOutputs[${i}].satoshis`);\n index_client_1.sdk.validateInteger(x.lockingScriptLength, `fixedOutputs[${i}].lockingScriptLength`, undefined, 0);\n if (x.satoshis === exports.maxPossibleSatoshis) {\n if (r.hasMaxPossibleOutput !== undefined)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`fixedOutputs[${i}].satoshis`, `valid satoshis amount. Only one 'maxPossibleSatoshis' output allowed.`);\n r.hasMaxPossibleOutput = i;\n }\n });\n params.feeModel = (0, index_client_1.validateStorageFeeModel)(params.feeModel);\n if (params.feeModel.model !== 'sat/kb')\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('feeModel.model', `'sat/kb'`);\n index_client_1.sdk.validateOptionalInteger(params.targetNetCount, `targetNetCount`);\n index_client_1.sdk.validateSatoshis(params.changeFirstSatoshis, 'changeFirstSatoshis', 1);\n index_client_1.sdk.validateSatoshis(params.changeInitialSatoshis, 'changeInitialSatoshis', 1);\n index_client_1.sdk.validateInteger(params.changeLockingScriptLength, `changeLockingScriptLength`);\n index_client_1.sdk.validateInteger(params.changeUnlockingScriptLength, `changeUnlockingScriptLength`);\n return r;\n}\nfunction generateChangeSdkMakeStorage(availableChange) {\n const change = availableChange.map(c => ({\n ...c,\n spendable: true\n }));\n change.sort((a, b) => a.satoshis < b.satoshis\n ? -1\n : a.satoshis > b.satoshis\n ? 1\n : a.outputId < b.outputId\n ? -1\n : a.outputId > b.outputId\n ? 1\n : 0);\n let log = '';\n for (const c of change)\n log += `change ${c.satoshis} ${c.outputId}\\n`;\n const getLog = () => log;\n const allocate = (c) => {\n log += ` -> ${c.satoshis} sats, id ${c.outputId}\\n`;\n c.spendable = false;\n return c;\n };\n const allocateChangeInput = async (targetSatoshis, exactSatoshis) => {\n log += `allocate target ${targetSatoshis} exact ${exactSatoshis}`;\n if (exactSatoshis !== undefined) {\n const exact = change.find(c => c.spendable && c.satoshis === exactSatoshis);\n if (exact)\n return allocate(exact);\n }\n const over = change.find(c => c.spendable && c.satoshis >= targetSatoshis);\n if (over)\n return allocate(over);\n let under = undefined;\n for (let i = change.length - 1; i >= 0; i--) {\n if (change[i].spendable) {\n under = change[i];\n break;\n }\n }\n if (under)\n return allocate(under);\n log += `\\n`;\n return undefined;\n };\n const releaseChangeInput = async (outputId) => {\n log += `release id ${outputId}\\n`;\n const c = change.find(x => x.outputId === outputId);\n if (!c)\n throw new index_client_1.sdk.WERR_INTERNAL(`unknown outputId ${outputId}`);\n if (c.spendable)\n throw new index_client_1.sdk.WERR_INTERNAL(`release of spendable outputId ${outputId}`);\n c.spendable = true;\n };\n return { allocateChangeInput, releaseChangeInput, getLog };\n}\n//# sourceMappingURL=generateChange.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/generateChange.js?\n}"); /***/ }), @@ -3707,7 +3542,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getBeefForTransaction = getBeefForTransaction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst EntityProvenTx_1 = __webpack_require__(/*! ../schema/entities/EntityProvenTx */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTx.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\n/**\n * Creates a `Beef` to support the validity of a transaction identified by its `txid`.\n *\n * `storage` is used to retrieve proven transactions and their merkle paths,\n * or proven_tx_req record with beef of external inputs (internal inputs meged by recursion).\n * Otherwise external services are used.\n *\n * `options.maxRecursionDepth` can be set to prevent overly deep chained dependencies. Will throw ERR_EXTSVS_ENVELOPE_DEPTH if exceeded.\n *\n * If `trustSelf` is true, a partial `Beef` will be returned where transactions known by `storage` to\n * be valid by verified proof are represented solely by 'txid'.\n *\n * If `knownTxids` is defined, any 'txid' required by the `Beef` that appears in the array is represented solely as a 'known' txid.\n *\n * @param storage the chain on which txid exists.\n * @param txid the transaction hash for which an envelope is requested.\n * @param options\n */\nasync function getBeefForTransaction(storage, txid, options) {\n const beef = \n // deserialize mergeToBeef if it is an array\n Array.isArray(options.mergeToBeef)\n ? sdk_1.Beef.fromBinary(options.mergeToBeef)\n : // otherwise if undefined create a new Beef\n options.mergeToBeef || new sdk_1.Beef();\n await mergeBeefForTransactionRecurse(beef, storage, txid, options, 0);\n return beef;\n}\n/**\n * @returns rawTx if txid known to network, if merkle proof available then also proven result is valid.\n */\nasync function getProvenOrRawTxFromServices(storage, txid, options) {\n var _a;\n const services = storage.getServices();\n const por = await EntityProvenTx_1.EntityProvenTx.fromTxid(txid, await storage.getServices());\n if (por.proven && !options.ignoreStorage && !options.ignoreNewProven) {\n por.proven.provenTxId = await storage.insertProvenTx(por.proven.toApi());\n }\n return { proven: (_a = por.proven) === null || _a === void 0 ? void 0 : _a.toApi(), rawTx: por.rawTx };\n}\nasync function mergeBeefForTransactionRecurse(beef, storage, txid, options, recursionDepth) {\n const maxDepth = storage.maxRecursionDepth;\n if (maxDepth && maxDepth <= recursionDepth)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`Maximum BEEF depth exceeded. Limit is ${storage.maxRecursionDepth}`);\n if (options.knownTxids && options.knownTxids.indexOf(txid) > -1) {\n // This txid is one of the txids the caller claims to already know are valid...\n beef.mergeTxidOnly(txid);\n return beef;\n }\n if (!options.ignoreStorage) {\n // if we can use storage, ask storage if it has the txid\n const requiredLevels = options.minProofLevel === undefined ? undefined : options.minProofLevel + recursionDepth;\n const knownBeef = await storage.getValidBeefForTxid(txid, beef, options.trustSelf, options.knownTxids, undefined, requiredLevels);\n if (knownBeef)\n return knownBeef;\n }\n if (options.ignoreServices)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`txid ${txid}`, `valid transaction on chain ${storage.chain}`);\n // if storage doesn't know about txid, use services\n // to find it and if it has a proof, remember it.\n const r = await getProvenOrRawTxFromServices(storage, txid, options);\n if (r.proven && options.minProofLevel !== undefined && options.minProofLevel > recursionDepth) {\n // ignore proof at this recursion depth\n r.proven = undefined;\n }\n if (r.proven) {\n // storage has proven this txid,\n // merge both the raw transaction and its merkle path\n beef.mergeRawTx(r.proven.rawTx);\n beef.mergeBump(new EntityProvenTx_1.EntityProvenTx(r.proven).getMerklePath());\n return beef;\n }\n if (!r.rawTx)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER(`txid ${txid}`, `valid transaction on chain ${storage.chain}`);\n // merge the raw transaction and recurse over its inputs.\n beef.mergeRawTx(r.rawTx);\n // recurse inputs\n const tx = (0, utilityHelpers_1.asBsvSdkTx)(r.rawTx);\n for (const input of tx.inputs) {\n const inputTxid = (0, utilityHelpers_1.verifyTruthy)(input.sourceTXID);\n if (!beef.findTxid(inputTxid)) {\n // Only if the txid is not already in the list of beef transactions.\n await mergeBeefForTransactionRecurse(beef, storage, inputTxid, options, recursionDepth + 1);\n }\n }\n return beef;\n}\n//# sourceMappingURL=getBeefForTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/getBeefForTransaction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getBeefForTransaction = getBeefForTransaction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * Creates a `Beef` to support the validity of a transaction identified by its `txid`.\n *\n * `storage` is used to retrieve proven transactions and their merkle paths,\n * or proven_tx_req record with beef of external inputs (internal inputs meged by recursion).\n * Otherwise external services are used.\n *\n * `options.maxRecursionDepth` can be set to prevent overly deep chained dependencies. Will throw ERR_EXTSVS_ENVELOPE_DEPTH if exceeded.\n *\n * If `trustSelf` is true, a partial `Beef` will be returned where transactions known by `storage` to\n * be valid by verified proof are represented solely by 'txid'.\n *\n * If `knownTxids` is defined, any 'txid' required by the `Beef` that appears in the array is represented solely as a 'known' txid.\n *\n * @param storage the chain on which txid exists.\n * @param txid the transaction hash for which an envelope is requested.\n * @param options\n */\nasync function getBeefForTransaction(storage, txid, options) {\n const beef = \n // deserialize mergeToBeef if it is an array\n Array.isArray(options.mergeToBeef)\n ? sdk_1.Beef.fromBinary(options.mergeToBeef)\n : // otherwise if undefined create a new Beef\n options.mergeToBeef || new sdk_1.Beef();\n await mergeBeefForTransactionRecurse(beef, storage, txid, options, 0);\n return beef;\n}\n/**\n * @returns rawTx if txid known to network, if merkle proof available then also proven result is valid.\n */\nasync function getProvenOrRawTxFromServices(storage, txid, options) {\n var _a;\n const services = storage.getServices();\n const por = await index_client_1.EntityProvenTx.fromTxid(txid, await storage.getServices());\n if (por.proven && !options.ignoreStorage && !options.ignoreNewProven) {\n por.proven.provenTxId = await storage.insertProvenTx(por.proven.toApi());\n }\n return { proven: (_a = por.proven) === null || _a === void 0 ? void 0 : _a.toApi(), rawTx: por.rawTx };\n}\nasync function mergeBeefForTransactionRecurse(beef, storage, txid, options, recursionDepth) {\n const maxDepth = storage.maxRecursionDepth;\n if (maxDepth && maxDepth <= recursionDepth)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`Maximum BEEF depth exceeded. Limit is ${storage.maxRecursionDepth}`);\n if (options.knownTxids && options.knownTxids.indexOf(txid) > -1) {\n // This txid is one of the txids the caller claims to already know are valid...\n beef.mergeTxidOnly(txid);\n return beef;\n }\n if (!options.ignoreStorage) {\n // if we can use storage, ask storage if it has the txid\n const requiredLevels = options.minProofLevel === undefined ? undefined : options.minProofLevel + recursionDepth;\n const knownBeef = await storage.getValidBeefForTxid(txid, beef, options.trustSelf, options.knownTxids, undefined, requiredLevels);\n if (knownBeef)\n return knownBeef;\n }\n if (options.ignoreServices)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`txid ${txid}`, `valid transaction on chain ${storage.chain}`);\n // if storage doesn't know about txid, use services\n // to find it and if it has a proof, remember it.\n const r = await getProvenOrRawTxFromServices(storage, txid, options);\n if (r.proven && options.minProofLevel !== undefined && options.minProofLevel > recursionDepth) {\n // ignore proof at this recursion depth\n r.proven = undefined;\n }\n if (r.proven) {\n // storage has proven this txid,\n // merge both the raw transaction and its merkle path\n beef.mergeRawTx(r.proven.rawTx);\n beef.mergeBump(new index_client_1.EntityProvenTx(r.proven).getMerklePath());\n return beef;\n }\n if (!r.rawTx)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER(`txid ${txid}`, `valid transaction on chain ${storage.chain}`);\n // merge the raw transaction and recurse over its inputs.\n beef.mergeRawTx(r.rawTx);\n // recurse inputs\n const tx = (0, index_client_1.asBsvSdkTx)(r.rawTx);\n for (const input of tx.inputs) {\n const inputTxid = (0, index_client_1.verifyTruthy)(input.sourceTXID);\n if (!beef.findTxid(inputTxid)) {\n // Only if the txid is not already in the list of beef transactions.\n await mergeBeefForTransactionRecurse(beef, storage, inputTxid, options, recursionDepth + 1);\n }\n }\n return beef;\n}\n//# sourceMappingURL=getBeefForTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/getBeefForTransaction.js?\n}"); /***/ }), @@ -3718,7 +3553,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getSyncChunk = getSyncChunk;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\n/**\n * Gets the next sync chunk of updated data from un-remoted storage (could be using a remote DB connection).\n * @param storage\n * @param args\n * @returns\n */\nasync function getSyncChunk(storage, args) {\n const r = {\n fromStorageIdentityKey: args.fromStorageIdentityKey,\n toStorageIdentityKey: args.toStorageIdentityKey,\n userIdentityKey: args.identityKey\n };\n let itemCount = args.maxItems;\n let roughSize = args.maxRoughSize;\n let i = 0;\n let done = false;\n const user = (0, utilityHelpers_1.verifyTruthy)(await storage.findUserByIdentityKey(args.identityKey));\n if (!args.since || user.updated_at > new Date(args.since))\n r.user = user;\n const chunkers = [\n {\n name: 'provenTx',\n maxDivider: 100,\n preAdd: () => {\n r.provenTxs = [];\n },\n addItem: (i) => {\n r.provenTxs.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getProvenTxsForUser(args);\n }\n },\n {\n name: 'outputBasket',\n maxDivider: 1,\n preAdd: () => {\n r.outputBaskets = [];\n },\n addItem: (i) => {\n r.outputBaskets.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findOutputBaskets({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'outputTag',\n maxDivider: 1,\n preAdd: () => {\n r.outputTags = [];\n },\n addItem: (i) => {\n r.outputTags.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findOutputTags({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'txLabel',\n maxDivider: 1,\n preAdd: () => {\n r.txLabels = [];\n },\n addItem: (i) => {\n r.txLabels.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findTxLabels({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'transaction',\n maxDivider: 25,\n preAdd: () => {\n r.transactions = [];\n },\n addItem: (i) => {\n r.transactions.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findTransactions({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'output',\n maxDivider: 25,\n preAdd: () => {\n r.outputs = [];\n },\n addItem: (i) => {\n r.outputs.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findOutputs({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'txLabelMap',\n maxDivider: 1,\n preAdd: () => {\n r.txLabelMaps = [];\n },\n addItem: (i) => {\n r.txLabelMaps.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getTxLabelMapsForUser(args);\n }\n },\n {\n name: 'outputTagMap',\n maxDivider: 1,\n preAdd: () => {\n r.outputTagMaps = [];\n },\n addItem: (i) => {\n r.outputTagMaps.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getOutputTagMapsForUser(args);\n }\n },\n {\n name: 'certificate',\n maxDivider: 25,\n preAdd: () => {\n r.certificates = [];\n },\n addItem: (i) => {\n r.certificates.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findCertificates({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'certificateField',\n maxDivider: 25,\n preAdd: () => {\n r.certificateFields = [];\n },\n addItem: (i) => {\n r.certificateFields.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findCertificateFields({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'commission',\n maxDivider: 25,\n preAdd: () => {\n r.commissions = [];\n },\n addItem: (i) => {\n r.commissions.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findCommissions({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'provenTxReq',\n maxDivider: 100,\n preAdd: () => {\n r.provenTxReqs = [];\n },\n addItem: (i) => {\n r.provenTxReqs.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getProvenTxReqsForUser(args);\n }\n }\n ];\n const addItems = async (a) => {\n if (i >= args.offsets.length) {\n done = true;\n return;\n }\n let { offset, name: oname } = args.offsets[i++];\n if (a.name !== oname)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('offsets', `in dependency order. '${a.name}' expected, found ${oname}.`);\n let preAddCalled = false;\n for (; !done;) {\n const limit = Math.min(itemCount, Math.max(10, args.maxItems / a.maxDivider));\n if (limit <= 0)\n break;\n const items = await a.findItems(storage, {\n userId: user.userId,\n since: args.since,\n paged: { limit, offset }\n });\n checkEntityValues(items);\n if (!preAddCalled) {\n a.preAdd();\n preAddCalled = true;\n }\n if (items.length === 0)\n break;\n for (const item of items) {\n offset++;\n a.addItem(item);\n itemCount--;\n roughSize -= JSON.stringify(item).length;\n if (itemCount <= 0 || roughSize < 0) {\n done = true;\n break;\n }\n }\n }\n };\n for (; !done;) {\n for (const c of chunkers) {\n await addItems(c);\n }\n }\n return r;\n}\nfunction checkIsDate(v) {\n if (!(v instanceof Date))\n throw new WERR_errors_1.WERR_INVALID_OPERATION('bad date');\n}\nfunction checkEntityValues(es) {\n for (const e of es) {\n checkIsDate(e['created_at']);\n checkIsDate(e['updated_at']);\n for (const key of Object.keys(e))\n if (e[key] === null)\n throw new WERR_errors_1.WERR_INVALID_OPERATION();\n }\n}\n//# sourceMappingURL=getSyncChunk.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/getSyncChunk.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getSyncChunk = getSyncChunk;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * Gets the next sync chunk of updated data from un-remoted storage (could be using a remote DB connection).\n * @param storage\n * @param args\n * @returns\n */\nasync function getSyncChunk(storage, args) {\n const r = {\n fromStorageIdentityKey: args.fromStorageIdentityKey,\n toStorageIdentityKey: args.toStorageIdentityKey,\n userIdentityKey: args.identityKey\n };\n let itemCount = args.maxItems;\n let roughSize = args.maxRoughSize;\n let i = 0;\n let done = false;\n const user = (0, index_client_1.verifyTruthy)(await storage.findUserByIdentityKey(args.identityKey));\n if (!args.since || user.updated_at > new Date(args.since))\n r.user = user;\n const chunkers = [\n {\n name: 'provenTx',\n maxDivider: 100,\n preAdd: () => {\n r.provenTxs = [];\n },\n addItem: (i) => {\n r.provenTxs.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getProvenTxsForUser(args);\n }\n },\n {\n name: 'outputBasket',\n maxDivider: 1,\n preAdd: () => {\n r.outputBaskets = [];\n },\n addItem: (i) => {\n r.outputBaskets.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findOutputBaskets({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'outputTag',\n maxDivider: 1,\n preAdd: () => {\n r.outputTags = [];\n },\n addItem: (i) => {\n r.outputTags.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findOutputTags({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'txLabel',\n maxDivider: 1,\n preAdd: () => {\n r.txLabels = [];\n },\n addItem: (i) => {\n r.txLabels.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findTxLabels({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'transaction',\n maxDivider: 25,\n preAdd: () => {\n r.transactions = [];\n },\n addItem: (i) => {\n r.transactions.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findTransactions({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'output',\n maxDivider: 25,\n preAdd: () => {\n r.outputs = [];\n },\n addItem: (i) => {\n r.outputs.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findOutputs({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'txLabelMap',\n maxDivider: 1,\n preAdd: () => {\n r.txLabelMaps = [];\n },\n addItem: (i) => {\n r.txLabelMaps.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getTxLabelMapsForUser(args);\n }\n },\n {\n name: 'outputTagMap',\n maxDivider: 1,\n preAdd: () => {\n r.outputTagMaps = [];\n },\n addItem: (i) => {\n r.outputTagMaps.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getOutputTagMapsForUser(args);\n }\n },\n {\n name: 'certificate',\n maxDivider: 25,\n preAdd: () => {\n r.certificates = [];\n },\n addItem: (i) => {\n r.certificates.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findCertificates({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'certificateField',\n maxDivider: 25,\n preAdd: () => {\n r.certificateFields = [];\n },\n addItem: (i) => {\n r.certificateFields.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findCertificateFields({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'commission',\n maxDivider: 25,\n preAdd: () => {\n r.commissions = [];\n },\n addItem: (i) => {\n r.commissions.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.findCommissions({\n partial: { userId: args.userId },\n since: args.since,\n paged: args.paged\n });\n }\n },\n {\n name: 'provenTxReq',\n maxDivider: 100,\n preAdd: () => {\n r.provenTxReqs = [];\n },\n addItem: (i) => {\n r.provenTxReqs.push(i);\n },\n findItems: async (storage, args) => {\n return await storage.getProvenTxReqsForUser(args);\n }\n }\n ];\n const addItems = async (a) => {\n if (i >= args.offsets.length) {\n done = true;\n return;\n }\n let { offset, name: oname } = args.offsets[i++];\n if (a.name !== oname)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('offsets', `in dependency order. '${a.name}' expected, found ${oname}.`);\n let preAddCalled = false;\n for (; !done;) {\n const limit = Math.min(itemCount, Math.max(10, args.maxItems / a.maxDivider));\n if (limit <= 0)\n break;\n const items = await a.findItems(storage, {\n userId: user.userId,\n since: args.since,\n paged: { limit, offset }\n });\n checkEntityValues(items);\n if (!preAddCalled) {\n a.preAdd();\n preAddCalled = true;\n }\n if (items.length === 0)\n break;\n for (const item of items) {\n offset++;\n a.addItem(item);\n itemCount--;\n roughSize -= JSON.stringify(item).length;\n if (itemCount <= 0 || roughSize < 0) {\n done = true;\n break;\n }\n }\n }\n };\n for (; !done;) {\n for (const c of chunkers) {\n await addItems(c);\n }\n }\n return r;\n}\nfunction checkIsDate(v) {\n if (!(v instanceof Date))\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('bad date');\n}\nfunction checkEntityValues(es) {\n for (const e of es) {\n checkIsDate(e['created_at']);\n checkIsDate(e['updated_at']);\n for (const key of Object.keys(e))\n if (e[key] === null)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION();\n }\n}\n//# sourceMappingURL=getSyncChunk.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/getSyncChunk.js?\n}"); /***/ }), @@ -3729,7 +3564,18 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.internalizeAction = internalizeAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst processAction_1 = __webpack_require__(/*! ./processAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js\");\nconst validationHelpers_1 = __webpack_require__(/*! ../../sdk/validationHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/validationHelpers.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityProvenTxReq_1 = __webpack_require__(/*! ../schema/entities/EntityProvenTxReq */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTxReq.js\");\n/**\n * Internalize Action allows a wallet to take ownership of outputs in a pre-existing transaction.\n * The transaction may, or may not already be known to both the storage and user.\n *\n * Two types of outputs are handled: \"wallet payments\" and \"basket insertions\".\n *\n * A \"basket insertion\" output is considered a custom output and has no effect on the wallet's \"balance\".\n *\n * A \"wallet payment\" adds an outputs value to the wallet's change \"balance\". These outputs are assigned to the \"default\" basket.\n *\n * Processing starts with simple validation and then checks for a pre-existing transaction.\n * If the transaction is already known to the user, then the outputs are reviewed against the existing outputs treatment,\n * and merge rules are added to the arguments passed to the storage layer.\n * The existing transaction must be in the 'unproven' or 'completed' status. Any other status is an error.\n *\n * When the transaction already exists, the description is updated. The isOutgoing sense is not changed.\n *\n * \"basket insertion\" Merge Rules:\n * 1. The \"default\" basket may not be specified as the insertion basket.\n * 2. A change output in the \"default\" basket may not be target of an insertion into a different basket.\n * 3. These baskets do not affect the wallet's balance and are typed \"custom\".\n *\n * \"wallet payment\" Merge Rules:\n * 1. Targetting an existing change \"default\" basket output results in a no-op. No error. No alterations made.\n * 2. Targetting a previously \"custom\" non-change output converts it into a change output. This alters the transaction's `satoshis`, and the wallet balance.\n */\nasync function internalizeAction(storage, auth, args) {\n const ctx = new InternalizeActionContext(storage, auth, args);\n await ctx.asyncSetup();\n if (ctx.isMerge)\n await ctx.mergedInternalize();\n else\n await ctx.newInternalize();\n return ctx.r;\n}\nclass InternalizeActionContext {\n constructor(storage, auth, args) {\n this.storage = storage;\n this.auth = auth;\n this.args = args;\n this.vargs = (0, validationHelpers_1.validateInternalizeActionArgs)(args);\n this.userId = auth.userId;\n this.r = {\n accepted: true,\n isMerge: false,\n txid: '',\n satoshis: 0\n };\n this.ab = new sdk_1.Beef();\n this.tx = new sdk_1.Transaction();\n this.changeBasket = {};\n this.baskets = {};\n this.basketInsertions = [];\n this.walletPayments = [];\n this.eos = [];\n }\n get isMerge() {\n return this.r.isMerge;\n }\n set isMerge(v) {\n this.r.isMerge = v;\n }\n get txid() {\n return this.r.txid;\n }\n set txid(v) {\n this.r.txid = v;\n }\n get satoshis() {\n return this.r.satoshis;\n }\n set satoshis(v) {\n this.r.satoshis = v;\n }\n async getBasket(basketName) {\n let b = this.baskets[basketName];\n if (b)\n return b;\n b = await this.storage.findOrInsertOutputBasket(this.userId, basketName);\n this.baskets[basketName] = b;\n return b;\n }\n async asyncSetup() {\n ;\n ({ ab: this.ab, tx: this.tx, txid: this.txid } = await this.validateAtomicBeef(this.args.tx));\n for (const o of this.args.outputs) {\n if (o.outputIndex < 0 || o.outputIndex >= this.tx.outputs.length)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('outputIndex', `a valid output index in range 0 to ${this.tx.outputs.length - 1}`);\n const txo = this.tx.outputs[o.outputIndex];\n switch (o.protocol) {\n case 'basket insertion':\n {\n if (!o.insertionRemittance || o.paymentRemittance)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('basket insertion', 'valid insertionRemittance and no paymentRemittance');\n this.basketInsertions.push({\n ...o.insertionRemittance,\n txo,\n vout: o.outputIndex\n });\n }\n break;\n case 'wallet payment':\n {\n if (o.insertionRemittance || !o.paymentRemittance)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('wallet payment', 'valid paymentRemittance and no insertionRemittance');\n this.walletPayments.push({\n ...o.paymentRemittance,\n txo,\n vout: o.outputIndex,\n ignore: false\n });\n }\n break;\n default:\n throw new WERR_errors_1.WERR_INTERNAL(`unexpected protocol ${o.protocol}`);\n }\n }\n this.changeBasket = (0, utilityHelpers_1.verifyOne)(await this.storage.findOutputBaskets({\n partial: { userId: this.userId, name: 'default' }\n }));\n this.baskets = {};\n this.etx = (0, utilityHelpers_1.verifyOneOrNone)(await this.storage.findTransactions({\n partial: { userId: this.userId, txid: this.txid }\n }));\n if (this.etx && !(this.etx.status == 'completed' || this.etx.status === 'unproven' || this.etx.status === 'nosend'))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', `target transaction of internalizeAction has invalid status ${this.etx.status}.`);\n this.isMerge = !!this.etx;\n if (this.isMerge) {\n this.eos = await this.storage.findOutputs({\n partial: { userId: this.userId, txid: this.txid }\n }); // It is possible for a transaction to have no outputs, or less outputs in storage than in the transaction itself.\n for (const eo of this.eos) {\n const bi = this.basketInsertions.find(b => b.vout === eo.vout);\n const wp = this.walletPayments.find(b => b.vout === eo.vout);\n if (bi && wp)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('outputs', `unique outputIndex values`);\n if (bi)\n bi.eo = eo;\n if (wp)\n wp.eo = eo;\n }\n }\n for (const basket of this.basketInsertions) {\n if (this.isMerge && basket.eo) {\n // merging with an existing user output\n if (basket.eo.basketId === this.changeBasket.basketId) {\n // converting a change output to a user basket custom output\n this.satoshis -= basket.txo.satoshis;\n }\n }\n }\n for (const payment of this.walletPayments) {\n if (this.isMerge) {\n if (payment.eo) {\n // merging with an existing user output\n if (payment.eo.basketId === this.changeBasket.basketId) {\n // ignore attempts to internalize an existing change output.\n payment.ignore = true;\n }\n else {\n // converting an existing non-change output to change... increases net satoshis\n this.satoshis += payment.txo.satoshis;\n }\n }\n else {\n // adding a previously untracked output of an existing transaction as change... increase net satoshis\n this.satoshis += payment.txo.satoshis;\n }\n }\n else {\n // If there are no existing outputs, all incoming wallet payment outputs add to net satoshis\n this.satoshis += payment.txo.satoshis;\n }\n }\n }\n async validateAtomicBeef(atomicBeef) {\n const ab = sdk_1.Beef.fromBinary(atomicBeef);\n const txValid = await ab.verify(await this.storage.getServices().getChainTracker(), false);\n if (!txValid || !ab.atomicTxid)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', 'valid AtomicBEEF');\n const txid = ab.atomicTxid;\n const btx = ab.findTxid(txid);\n if (!btx)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', `valid AtomicBEEF with newest txid of ${txid}`);\n const tx = btx.tx;\n /*\n for (const i of tx.inputs) {\n if (!i.sourceTXID)\n throw new WERR_INTERNAL('beef Transactions must have sourceTXIDs')\n if (!i.sourceTransaction) {\n const btx = ab.findTxid(i.sourceTXID)\n if (!btx)\n throw new WERR_INVALID_PARAMETER('tx', `valid AtomicBEEF and contain input transaction with txid ${i.sourceTXID}`);\n i.sourceTransaction = btx.tx\n }\n }\n */\n return { ab, tx, txid };\n }\n async findOrInsertTargetTransaction(satoshis, status) {\n const now = new Date();\n const newTx = {\n created_at: now,\n updated_at: now,\n transactionId: 0,\n status,\n satoshis,\n version: this.tx.version,\n lockTime: this.tx.lockTime,\n reference: (0, utilityHelpers_1.randomBytesBase64)(7),\n userId: this.userId,\n isOutgoing: false,\n description: this.args.description,\n inputBEEF: undefined,\n txid: this.txid,\n rawTx: undefined\n };\n const tr = await this.storage.findOrInsertTransaction(newTx);\n if (!tr.isNew) {\n if (!this.isMerge)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('tx', `target transaction of internalizeAction is undergoing active changes.`);\n await this.storage.updateTransaction(tr.tx.transactionId, {\n satoshis: tr.tx.satoshis + satoshis\n });\n }\n return tr.tx;\n }\n async mergedInternalize() {\n const transactionId = this.etx.transactionId;\n await this.addLabels(transactionId);\n for (const payment of this.walletPayments) {\n if (payment.eo && !payment.ignore)\n await this.mergeWalletPaymentForOutput(transactionId, payment);\n else if (!payment.ignore)\n await this.storeNewWalletPaymentForOutput(transactionId, payment);\n }\n for (const basket of this.basketInsertions) {\n if (basket.eo)\n await this.mergeBasketInsertionForOutput(transactionId, basket);\n else\n await this.storeNewBasketInsertionForOutput(transactionId, basket);\n }\n }\n async newInternalize() {\n this.etx = await this.findOrInsertTargetTransaction(this.satoshis, 'unproven');\n const transactionId = this.etx.transactionId;\n // transaction record for user is new, but the txid may not be new to storage\n // make sure storage pursues getting a proof for it.\n const newReq = EntityProvenTxReq_1.EntityProvenTxReq.fromTxid(this.txid, this.tx.toBinary(), this.args.tx);\n // this status is only relevant if the transaction is new to storage.\n newReq.status = 'unsent';\n // this history and notify will be merged into an existing req if it exists.\n newReq.addHistoryNote({ what: 'internalizeAction', userId: this.userId });\n newReq.addNotifyTransactionId(transactionId);\n const pr = await this.storage.getProvenOrReq(this.txid, newReq.toApi());\n if (pr.isNew) {\n // This storage doesn't know about this txid yet.\n // TODO Can we immediately prove this txid?\n // TODO Do full validation on the transaction?\n // Attempt to broadcast it to the network, throwing an error if it fails.\n const { swr, ndr } = await (0, processAction_1.shareReqsWithWorld)(this.storage, this.userId, [this.txid], false);\n if (ndr[0].status !== 'success') {\n this.r.sendWithResults = swr;\n this.r.notDelayedResults = ndr;\n // abort the internalize action, WERR_REVIEW_ACTIONS exception will be thrown\n return;\n }\n }\n await this.addLabels(transactionId);\n for (const payment of this.walletPayments) {\n await this.storeNewWalletPaymentForOutput(transactionId, payment);\n }\n for (const basket of this.basketInsertions) {\n await this.storeNewBasketInsertionForOutput(transactionId, basket);\n }\n }\n async addLabels(transactionId) {\n for (const label of this.vargs.labels) {\n const txLabel = await this.storage.findOrInsertTxLabel(this.userId, label);\n await this.storage.findOrInsertTxLabelMap((0, utilityHelpers_1.verifyId)(transactionId), (0, utilityHelpers_1.verifyId)(txLabel.txLabelId));\n }\n }\n async addBasketTags(basket, outputId) {\n for (const tag of basket.tags || []) {\n await this.storage.tagOutput({ outputId, userId: this.userId }, tag);\n }\n }\n async storeNewWalletPaymentForOutput(transactionId, payment) {\n const now = new Date();\n const txOut = {\n created_at: now,\n updated_at: now,\n outputId: 0,\n transactionId,\n userId: this.userId,\n spendable: true,\n lockingScript: payment.txo.lockingScript.toBinary(),\n vout: payment.vout,\n basketId: this.changeBasket.basketId,\n satoshis: payment.txo.satoshis,\n txid: this.txid,\n senderIdentityKey: payment.senderIdentityKey,\n type: 'P2PKH',\n providedBy: 'storage',\n purpose: 'change',\n derivationPrefix: payment.derivationPrefix,\n derivationSuffix: payment.derivationSuffix,\n change: true,\n spentBy: undefined,\n customInstructions: undefined,\n outputDescription: '',\n spendingDescription: undefined\n };\n txOut.outputId = await this.storage.insertOutput(txOut);\n payment.eo = txOut;\n }\n async mergeWalletPaymentForOutput(transactionId, payment) {\n const outputId = payment.eo.outputId;\n const update = {\n basketId: this.changeBasket.basketId,\n type: 'P2PKH',\n customInstructions: undefined,\n change: true,\n providedBy: 'storage',\n purpose: 'change',\n senderIdentityKey: payment.senderIdentityKey,\n derivationPrefix: payment.derivationPrefix,\n derivationSuffix: payment.derivationSuffix\n };\n await this.storage.updateOutput(outputId, update);\n payment.eo = { ...payment.eo, ...update };\n }\n async mergeBasketInsertionForOutput(transactionId, basket) {\n const outputId = basket.eo.outputId;\n const update = {\n basketId: (await this.getBasket(basket.basket)).basketId,\n type: 'custom',\n customInstructions: basket.customInstructions,\n change: false,\n providedBy: 'you',\n purpose: '',\n senderIdentityKey: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined\n };\n await this.storage.updateOutput(outputId, update);\n basket.eo = { ...basket.eo, ...update };\n }\n async storeNewBasketInsertionForOutput(transactionId, basket) {\n const now = new Date();\n const txOut = {\n created_at: now,\n updated_at: now,\n outputId: 0,\n transactionId,\n userId: this.userId,\n spendable: true,\n lockingScript: basket.txo.lockingScript.toBinary(),\n vout: basket.vout,\n basketId: (await this.getBasket(basket.basket)).basketId,\n satoshis: basket.txo.satoshis,\n txid: this.txid,\n type: 'custom',\n customInstructions: basket.customInstructions,\n change: false,\n spentBy: undefined,\n outputDescription: '',\n spendingDescription: undefined,\n providedBy: 'you',\n purpose: '',\n senderIdentityKey: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined\n };\n txOut.outputId = await this.storage.insertOutput(txOut);\n await this.addBasketTags(basket, txOut.outputId);\n basket.eo = txOut;\n }\n}\n//# sourceMappingURL=internalizeAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/internalizeAction.js?\n}"); +eval("{\n/* eslint-disable @typescript-eslint/no-unused-vars */\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.internalizeAction = internalizeAction;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst processAction_1 = __webpack_require__(/*! ./processAction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js\");\n/**\n * Internalize Action allows a wallet to take ownership of outputs in a pre-existing transaction.\n * The transaction may, or may not already be known to both the storage and user.\n *\n * Two types of outputs are handled: \"wallet payments\" and \"basket insertions\".\n *\n * A \"basket insertion\" output is considered a custom output and has no effect on the wallet's \"balance\".\n *\n * A \"wallet payment\" adds an outputs value to the wallet's change \"balance\". These outputs are assigned to the \"default\" basket.\n *\n * Processing starts with simple validation and then checks for a pre-existing transaction.\n * If the transaction is already known to the user, then the outputs are reviewed against the existing outputs treatment,\n * and merge rules are added to the arguments passed to the storage layer.\n * The existing transaction must be in the 'unproven' or 'completed' status. Any other status is an error.\n *\n * When the transaction already exists, the description is updated. The isOutgoing sense is not changed.\n *\n * \"basket insertion\" Merge Rules:\n * 1. The \"default\" basket may not be specified as the insertion basket.\n * 2. A change output in the \"default\" basket may not be target of an insertion into a different basket.\n * 3. These baskets do not affect the wallet's balance and are typed \"custom\".\n *\n * \"wallet payment\" Merge Rules:\n * 1. Targetting an existing change \"default\" basket output results in a no-op. No error. No alterations made.\n * 2. Targetting a previously \"custom\" non-change output converts it into a change output. This alters the transaction's `satoshis`, and the wallet balance.\n */\nasync function internalizeAction(storage, auth, args) {\n const ctx = new InternalizeActionContext(storage, auth, args);\n await ctx.asyncSetup();\n if (ctx.isMerge)\n await ctx.mergedInternalize();\n else\n await ctx.newInternalize();\n return ctx.r;\n}\nclass InternalizeActionContext {\n constructor(storage, auth, args) {\n this.storage = storage;\n this.auth = auth;\n this.args = args;\n this.vargs = index_client_1.sdk.validateInternalizeActionArgs(args);\n this.userId = auth.userId;\n this.r = {\n accepted: true,\n isMerge: false,\n txid: '',\n satoshis: 0\n };\n this.ab = new sdk_1.Beef();\n this.tx = new sdk_1.Transaction();\n this.changeBasket = {};\n this.baskets = {};\n this.basketInsertions = [];\n this.walletPayments = [];\n this.eos = [];\n }\n get isMerge() {\n return this.r.isMerge;\n }\n set isMerge(v) {\n this.r.isMerge = v;\n }\n get txid() {\n return this.r.txid;\n }\n set txid(v) {\n this.r.txid = v;\n }\n get satoshis() {\n return this.r.satoshis;\n }\n set satoshis(v) {\n this.r.satoshis = v;\n }\n async getBasket(basketName) {\n let b = this.baskets[basketName];\n if (b)\n return b;\n b = await this.storage.findOrInsertOutputBasket(this.userId, basketName);\n this.baskets[basketName] = b;\n return b;\n }\n async asyncSetup() {\n ;\n ({ ab: this.ab, tx: this.tx, txid: this.txid } = await this.validateAtomicBeef(this.args.tx));\n for (const o of this.args.outputs) {\n if (o.outputIndex < 0 || o.outputIndex >= this.tx.outputs.length)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('outputIndex', `a valid output index in range 0 to ${this.tx.outputs.length - 1}`);\n const txo = this.tx.outputs[o.outputIndex];\n switch (o.protocol) {\n case 'basket insertion':\n {\n if (!o.insertionRemittance || o.paymentRemittance)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('basket insertion', 'valid insertionRemittance and no paymentRemittance');\n this.basketInsertions.push({\n ...o.insertionRemittance,\n txo,\n vout: o.outputIndex\n });\n }\n break;\n case 'wallet payment':\n {\n if (o.insertionRemittance || !o.paymentRemittance)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('wallet payment', 'valid paymentRemittance and no insertionRemittance');\n this.walletPayments.push({\n ...o.paymentRemittance,\n txo,\n vout: o.outputIndex,\n ignore: false\n });\n }\n break;\n default:\n throw new index_client_1.sdk.WERR_INTERNAL(`unexpected protocol ${o.protocol}`);\n }\n }\n this.changeBasket = (0, index_client_1.verifyOne)(await this.storage.findOutputBaskets({\n partial: { userId: this.userId, name: 'default' }\n }));\n this.baskets = {};\n this.etx = (0, index_client_1.verifyOneOrNone)(await this.storage.findTransactions({\n partial: { userId: this.userId, txid: this.txid }\n }));\n if (this.etx && !(this.etx.status == 'completed' || this.etx.status === 'unproven' || this.etx.status === 'nosend'))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', `target transaction of internalizeAction has invalid status ${this.etx.status}.`);\n this.isMerge = !!this.etx;\n if (this.isMerge) {\n this.eos = await this.storage.findOutputs({\n partial: { userId: this.userId, txid: this.txid }\n }); // It is possible for a transaction to have no outputs, or less outputs in storage than in the transaction itself.\n for (const eo of this.eos) {\n const bi = this.basketInsertions.find(b => b.vout === eo.vout);\n const wp = this.walletPayments.find(b => b.vout === eo.vout);\n if (bi && wp)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('outputs', `unique outputIndex values`);\n if (bi)\n bi.eo = eo;\n if (wp)\n wp.eo = eo;\n }\n }\n for (const basket of this.basketInsertions) {\n if (this.isMerge && basket.eo) {\n // merging with an existing user output\n if (basket.eo.basketId === this.changeBasket.basketId) {\n // converting a change output to a user basket custom output\n this.satoshis -= basket.txo.satoshis;\n }\n }\n }\n for (const payment of this.walletPayments) {\n if (this.isMerge) {\n if (payment.eo) {\n // merging with an existing user output\n if (payment.eo.basketId === this.changeBasket.basketId) {\n // ignore attempts to internalize an existing change output.\n payment.ignore = true;\n }\n else {\n // converting an existing non-change output to change... increases net satoshis\n this.satoshis += payment.txo.satoshis;\n }\n }\n else {\n // adding a previously untracked output of an existing transaction as change... increase net satoshis\n this.satoshis += payment.txo.satoshis;\n }\n }\n else {\n // If there are no existing outputs, all incoming wallet payment outputs add to net satoshis\n this.satoshis += payment.txo.satoshis;\n }\n }\n }\n async validateAtomicBeef(atomicBeef) {\n const ab = sdk_1.Beef.fromBinary(atomicBeef);\n const txValid = await ab.verify(await this.storage.getServices().getChainTracker(), false);\n if (!txValid || !ab.atomicTxid)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', 'valid AtomicBEEF');\n const txid = ab.atomicTxid;\n const btx = ab.findTxid(txid);\n if (!btx)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', `valid AtomicBEEF with newest txid of ${txid}`);\n const tx = btx.tx;\n /*\n for (const i of tx.inputs) {\n if (!i.sourceTXID)\n throw new sdk.WERR_INTERNAL('beef Transactions must have sourceTXIDs')\n if (!i.sourceTransaction) {\n const btx = ab.findTxid(i.sourceTXID)\n if (!btx)\n throw new sdk.WERR_INVALID_PARAMETER('tx', `valid AtomicBEEF and contain input transaction with txid ${i.sourceTXID}`);\n i.sourceTransaction = btx.tx\n }\n }\n */\n return { ab, tx, txid };\n }\n async findOrInsertTargetTransaction(satoshis, status) {\n const now = new Date();\n const newTx = {\n created_at: now,\n updated_at: now,\n transactionId: 0,\n status,\n satoshis,\n version: this.tx.version,\n lockTime: this.tx.lockTime,\n reference: (0, index_client_1.randomBytesBase64)(7),\n userId: this.userId,\n isOutgoing: false,\n description: this.args.description,\n inputBEEF: undefined,\n txid: this.txid,\n rawTx: undefined\n };\n const tr = await this.storage.findOrInsertTransaction(newTx);\n if (!tr.isNew) {\n if (!this.isMerge)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('tx', `target transaction of internalizeAction is undergoing active changes.`);\n await this.storage.updateTransaction(tr.tx.transactionId, {\n satoshis: tr.tx.satoshis + satoshis\n });\n }\n return tr.tx;\n }\n async mergedInternalize() {\n const transactionId = this.etx.transactionId;\n await this.addLabels(transactionId);\n for (const payment of this.walletPayments) {\n if (payment.eo && !payment.ignore)\n await this.mergeWalletPaymentForOutput(transactionId, payment);\n else if (!payment.ignore)\n await this.storeNewWalletPaymentForOutput(transactionId, payment);\n }\n for (const basket of this.basketInsertions) {\n if (basket.eo)\n await this.mergeBasketInsertionForOutput(transactionId, basket);\n else\n await this.storeNewBasketInsertionForOutput(transactionId, basket);\n }\n }\n async newInternalize() {\n this.etx = await this.findOrInsertTargetTransaction(this.satoshis, 'unproven');\n const transactionId = this.etx.transactionId;\n // transaction record for user is new, but the txid may not be new to storage\n // make sure storage pursues getting a proof for it.\n const newReq = index_client_1.EntityProvenTxReq.fromTxid(this.txid, this.tx.toBinary(), this.args.tx);\n // this status is only relevant if the transaction is new to storage.\n newReq.status = 'unsent';\n // this history and notify will be merged into an existing req if it exists.\n newReq.addHistoryNote({ what: 'internalizeAction', userId: this.userId });\n newReq.addNotifyTransactionId(transactionId);\n const pr = await this.storage.getProvenOrReq(this.txid, newReq.toApi());\n if (pr.isNew) {\n // This storage doesn't know about this txid yet.\n // TODO Can we immediately prove this txid?\n // TODO Do full validation on the transaction?\n // Attempt to broadcast it to the network, throwing an error if it fails.\n const { swr, ndr } = await (0, processAction_1.shareReqsWithWorld)(this.storage, this.userId, [this.txid], false);\n if (ndr[0].status !== 'success') {\n this.r.sendWithResults = swr;\n this.r.notDelayedResults = ndr;\n // abort the internalize action, WERR_REVIEW_ACTIONS exception will be thrown\n return;\n }\n }\n await this.addLabels(transactionId);\n for (const payment of this.walletPayments) {\n await this.storeNewWalletPaymentForOutput(transactionId, payment);\n }\n for (const basket of this.basketInsertions) {\n await this.storeNewBasketInsertionForOutput(transactionId, basket);\n }\n }\n async addLabels(transactionId) {\n for (const label of this.vargs.labels) {\n const txLabel = await this.storage.findOrInsertTxLabel(this.userId, label);\n await this.storage.findOrInsertTxLabelMap((0, index_client_1.verifyId)(transactionId), (0, index_client_1.verifyId)(txLabel.txLabelId));\n }\n }\n async addBasketTags(basket, outputId) {\n for (const tag of basket.tags || []) {\n await this.storage.tagOutput({ outputId, userId: this.userId }, tag);\n }\n }\n async storeNewWalletPaymentForOutput(transactionId, payment) {\n const now = new Date();\n const txOut = {\n created_at: now,\n updated_at: now,\n outputId: 0,\n transactionId,\n userId: this.userId,\n spendable: true,\n lockingScript: payment.txo.lockingScript.toBinary(),\n vout: payment.vout,\n basketId: this.changeBasket.basketId,\n satoshis: payment.txo.satoshis,\n txid: this.txid,\n senderIdentityKey: payment.senderIdentityKey,\n type: 'P2PKH',\n providedBy: 'storage',\n purpose: 'change',\n derivationPrefix: payment.derivationPrefix,\n derivationSuffix: payment.derivationSuffix,\n change: true,\n spentBy: undefined,\n customInstructions: undefined,\n outputDescription: '',\n spendingDescription: undefined\n };\n txOut.outputId = await this.storage.insertOutput(txOut);\n payment.eo = txOut;\n }\n async mergeWalletPaymentForOutput(transactionId, payment) {\n const outputId = payment.eo.outputId;\n const update = {\n basketId: this.changeBasket.basketId,\n type: 'P2PKH',\n customInstructions: undefined,\n change: true,\n providedBy: 'storage',\n purpose: 'change',\n senderIdentityKey: payment.senderIdentityKey,\n derivationPrefix: payment.derivationPrefix,\n derivationSuffix: payment.derivationSuffix\n };\n await this.storage.updateOutput(outputId, update);\n payment.eo = { ...payment.eo, ...update };\n }\n async mergeBasketInsertionForOutput(transactionId, basket) {\n const outputId = basket.eo.outputId;\n const update = {\n basketId: (await this.getBasket(basket.basket)).basketId,\n type: 'custom',\n customInstructions: basket.customInstructions,\n change: false,\n providedBy: 'you',\n purpose: '',\n senderIdentityKey: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined\n };\n await this.storage.updateOutput(outputId, update);\n basket.eo = { ...basket.eo, ...update };\n }\n async storeNewBasketInsertionForOutput(transactionId, basket) {\n const now = new Date();\n const txOut = {\n created_at: now,\n updated_at: now,\n outputId: 0,\n transactionId,\n userId: this.userId,\n spendable: true,\n lockingScript: basket.txo.lockingScript.toBinary(),\n vout: basket.vout,\n basketId: (await this.getBasket(basket.basket)).basketId,\n satoshis: basket.txo.satoshis,\n txid: this.txid,\n type: 'custom',\n customInstructions: basket.customInstructions,\n change: false,\n spentBy: undefined,\n outputDescription: '',\n spendingDescription: undefined,\n providedBy: 'you',\n purpose: '',\n senderIdentityKey: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined\n };\n txOut.outputId = await this.storage.insertOutput(txOut);\n await this.addBasketTags(basket, txOut.outputId);\n basket.eo = txOut;\n }\n}\n//# sourceMappingURL=internalizeAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/internalizeAction.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listActionsIdb.js": +/*!*******************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listActionsIdb.js ***! + \*******************************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.listActionsIdb = listActionsIdb;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst sdk_2 = __webpack_require__(/*! ../../sdk */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/index.js\");\nconst ListActionsSpecOp_1 = __webpack_require__(/*! ./ListActionsSpecOp */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListActionsSpecOp.js\");\nasync function listActionsIdb(storage, auth, vargs) {\n const limit = vargs.limit;\n const offset = vargs.offset;\n const r = {\n totalActions: 0,\n actions: []\n };\n let specOp = undefined;\n let specOpLabels = [];\n let labels = [];\n for (const label of vargs.labels) {\n if ((0, sdk_2.isListActionsSpecOp)(label)) {\n specOp = (0, ListActionsSpecOp_1.getLabelToSpecOp)()[label];\n }\n else {\n labels.push(label);\n }\n }\n if ((specOp === null || specOp === void 0 ? void 0 : specOp.labelsToIntercept) !== undefined) {\n const intercept = specOp.labelsToIntercept;\n const labels2 = labels;\n labels = [];\n if (intercept.length === 0) {\n specOpLabels = labels2;\n }\n for (const label of labels2) {\n if (intercept.indexOf(label) >= 0) {\n specOpLabels.push(label);\n }\n else {\n labels.push(label);\n }\n }\n }\n let labelIds = [];\n if (labels.length > 0) {\n await storage.filterTxLabels({ partial: { userId: auth.userId, isDeleted: false } }, tl => {\n if (labels.includes(tl.label)) {\n labelIds.push(tl.txLabelId);\n }\n });\n }\n const isQueryModeAll = vargs.labelQueryMode === 'all';\n if (isQueryModeAll && labelIds.length < labels.length)\n // all the required labels don't exist, impossible to satisfy.\n return r;\n if (!isQueryModeAll && labelIds.length === 0 && labels.length > 0)\n // any and only non-existing labels, impossible to satisfy.\n return r;\n const stati = (specOp === null || specOp === void 0 ? void 0 : specOp.setStatusFilter)\n ? specOp.setStatusFilter()\n : ['completed', 'unprocessed', 'sending', 'unproven', 'unsigned', 'nosend', 'nonfinal'];\n const noLabels = labelIds.length === 0;\n const txs = await storage.findTransactions({\n partial: { userId: auth.userId },\n status: stati,\n paged: { limit: vargs.limit, offset: vargs.offset },\n noRawTx: true\n }, labelIds, isQueryModeAll);\n if (txs.length === vargs.limit) {\n r.totalActions = await storage.countTransactions({ partial: { userId: auth.userId }, status: stati }, labelIds, isQueryModeAll);\n }\n else {\n r.totalActions = txs.length;\n }\n if (specOp === null || specOp === void 0 ? void 0 : specOp.postProcess) {\n await specOp.postProcess(storage, auth, vargs, specOpLabels, txs);\n }\n for (const tx of txs) {\n const wtx = {\n txid: tx.txid || '',\n satoshis: tx.satoshis || 0,\n status: tx.status,\n isOutgoing: !!tx.isOutgoing,\n description: tx.description || '',\n version: tx.version || 0,\n lockTime: tx.lockTime || 0\n };\n r.actions.push(wtx);\n }\n if (vargs.includeLabels || vargs.includeInputs || vargs.includeOutputs) {\n await Promise.all(txs.map(async (tx, i) => {\n var _a, _b, _c;\n //let i = -1\n //for (const tx of txs) {\n // i++\n const action = r.actions[i];\n if (vargs.includeLabels) {\n action.labels = (await storage.getLabelsForTransactionId(tx.transactionId)).map(l => l.label);\n }\n if (vargs.includeOutputs) {\n const outputs = await storage.findOutputs({\n partial: { transactionId: tx.transactionId },\n noScript: !vargs.includeOutputLockingScripts\n });\n action.outputs = [];\n for (const o of outputs) {\n await storage.extendOutput(o, true, true);\n const wo = {\n satoshis: o.satoshis || 0,\n spendable: !!o.spendable,\n tags: ((_a = o.tags) === null || _a === void 0 ? void 0 : _a.map(t => t.tag)) || [],\n outputIndex: Number(o.vout),\n outputDescription: o.outputDescription || '',\n basket: ((_b = o.basket) === null || _b === void 0 ? void 0 : _b.name) || ''\n };\n if (vargs.includeOutputLockingScripts)\n wo.lockingScript = (0, index_client_1.asString)(o.lockingScript || []);\n action.outputs.push(wo);\n }\n }\n if (vargs.includeInputs) {\n const inputs = await storage.findOutputs({\n partial: { spentBy: tx.transactionId },\n noScript: !vargs.includeInputSourceLockingScripts\n });\n action.inputs = [];\n if (inputs.length > 0) {\n const rawTx = await storage.getRawTxOfKnownValidTransaction(tx.txid);\n let bsvTx = undefined;\n if (rawTx) {\n bsvTx = sdk_1.Transaction.fromBinary(rawTx);\n }\n for (const o of inputs) {\n await storage.extendOutput(o, true, true);\n const input = bsvTx === null || bsvTx === void 0 ? void 0 : bsvTx.inputs.find(v => v.sourceTXID === o.txid && v.sourceOutputIndex === o.vout);\n const wo = {\n sourceOutpoint: `${o.txid}.${o.vout}`,\n sourceSatoshis: o.satoshis || 0,\n inputDescription: o.outputDescription || '',\n sequenceNumber: (input === null || input === void 0 ? void 0 : input.sequence) || 0\n };\n action.inputs.push(wo);\n if (vargs.includeInputSourceLockingScripts) {\n wo.sourceLockingScript = (0, index_client_1.asString)(o.lockingScript || []);\n }\n if (vargs.includeInputUnlockingScripts) {\n wo.unlockingScript = (_c = input === null || input === void 0 ? void 0 : input.unlockingScript) === null || _c === void 0 ? void 0 : _c.toHex();\n }\n }\n }\n }\n //}\n }));\n }\n return r;\n}\n//# sourceMappingURL=listActionsIdb.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listActionsIdb.js?\n}"); /***/ }), @@ -3744,6 +3590,17 @@ /***/ }), +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listOutputsIdb.js": +/*!*******************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listOutputsIdb.js ***! + \*******************************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.listOutputsIdb = listOutputsIdb;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst ListOutputsSpecOp_1 = __webpack_require__(/*! ./ListOutputsSpecOp */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/ListOutputsSpecOp.js\");\nasync function listOutputsIdb(storage, auth, vargs, originator) {\n const userId = (0, index_client_1.verifyId)(auth.userId);\n const limit = vargs.limit;\n const offset = vargs.offset;\n const r = {\n totalOutputs: 0,\n outputs: []\n };\n /*\n ListOutputsArgs {\n basket: BasketStringUnder300Bytes\n \n tags?: OutputTagStringUnder300Bytes[]\n tagQueryMode?: 'all' | 'any' // default any\n \n limit?: PositiveIntegerDefault10Max10000\n offset?: PositiveIntegerOrZero\n }\n */\n let specOp = undefined;\n let basketId = undefined;\n const basketsById = {};\n if (vargs.basket) {\n let b = vargs.basket;\n specOp = (0, ListOutputsSpecOp_1.getBasketToSpecOp)()[b];\n b = specOp ? (specOp.useBasket ? specOp.useBasket : '') : b;\n if (b) {\n const baskets = await storage.findOutputBaskets({\n partial: { userId, name: b }\n });\n if (baskets.length !== 1) {\n // If basket does not exist, result is no outputs.\n return r;\n }\n const basket = baskets[0];\n basketId = basket.basketId;\n basketsById[basketId] = basket;\n }\n }\n let tags = [...vargs.tags];\n const specOpTags = [];\n if (specOp && specOp.tagsParamsCount) {\n specOpTags.push(...tags.splice(0, Math.min(tags.length, specOp.tagsParamsCount)));\n }\n if (specOp && specOp.tagsToIntercept) {\n // Pull out tags used by current specOp\n const ts = tags;\n tags = [];\n for (const t of ts) {\n if (specOp.tagsToIntercept.length === 0 || specOp.tagsToIntercept.indexOf(t) >= 0) {\n specOpTags.push(t);\n if (t === 'all') {\n basketId = undefined;\n }\n }\n else {\n tags.push(t);\n }\n }\n }\n if (specOp && specOp.resultFromTags) {\n const r = await specOp.resultFromTags(storage, auth, vargs, specOpTags);\n return r;\n }\n let tagIds = [];\n if (tags && tags.length > 0) {\n await storage.filterOutputTags({ partial: { userId, isDeleted: false } }, ot => {\n if (tags.includes(ot.tag)) {\n tagIds.push(ot.outputTagId);\n }\n });\n }\n const isQueryModeAll = vargs.tagQueryMode === 'all';\n if (isQueryModeAll && tagIds.length < tags.length)\n // all the required tags don't exist, impossible to satisfy.\n return r;\n if (!isQueryModeAll && tagIds.length === 0 && tags.length > 0)\n // any and only non-existing labels, impossible to satisfy.\n return r;\n const noTags = tagIds.length === 0;\n const includeSpent = false;\n const stati = ['completed', 'unproven', 'nosend'];\n const args = {\n partial: {\n userId,\n basketId,\n spendable: !includeSpent ? true : undefined\n },\n txStatus: stati,\n noScript: true\n };\n if (!specOp || !specOp.ignoreLimit)\n args.paged = { limit, offset };\n let outputs = await storage.findOutputs(args, tagIds, isQueryModeAll);\n if (outputs.length === vargs.limit) {\n args.paged = undefined;\n r.totalOutputs = await storage.countOutputs(args, tagIds, isQueryModeAll);\n }\n else {\n r.totalOutputs = outputs.length;\n }\n if (specOp) {\n if (specOp.filterOutputs)\n outputs = await specOp.filterOutputs(storage, auth, vargs, specOpTags, outputs);\n if (specOp.resultFromOutputs) {\n const r = await specOp.resultFromOutputs(storage, auth, vargs, specOpTags, outputs);\n return r;\n }\n }\n /*\n ListOutputsArgs {\n include?: 'locking scripts' | 'entire transactions'\n includeCustomInstructions?: BooleanDefaultFalse\n includeTags?: BooleanDefaultFalse\n includeLabels?: BooleanDefaultFalse\n }\n \n ListOutputsResult {\n totalOutputs: PositiveIntegerOrZero\n BEEF?: BEEF\n outputs: Array\n }\n \n WalletOutput {\n satoshis: SatoshiValue\n spendable: boolean\n outpoint: OutpointString\n \n customInstructions?: string\n lockingScript?: HexString\n tags?: OutputTagStringUnder300Bytes[]\n labels?: LabelStringUnder300Bytes[]\n }\n */\n const labelsByTxid = {};\n const beef = new sdk_1.Beef();\n for (const o of outputs) {\n const wo = {\n satoshis: Number(o.satoshis),\n spendable: !!o.spendable,\n outpoint: `${o.txid}.${o.vout}`\n };\n r.outputs.push(wo);\n //if (vargs.includeBasket && o.basketId) {\n // if (!basketsById[o.basketId]) {\n // basketsById[o.basketId] = verifyTruthy(await dsk.findOutputBasketId(o.basketId!, trx))\n // }\n // wo.basket = basketsById[o.basketId].name\n //}\n if (vargs.includeCustomInstructions && o.customInstructions)\n wo.customInstructions = o.customInstructions;\n if (vargs.includeLabels && o.txid) {\n if (labelsByTxid[o.txid] === undefined) {\n labelsByTxid[o.txid] = (await storage.getLabelsForTransactionId(o.transactionId)).map(l => l.label);\n }\n wo.labels = labelsByTxid[o.txid];\n }\n if (vargs.includeTags) {\n wo.tags = (await storage.getTagsForOutputId(o.outputId)).map(t => t.tag);\n }\n if (vargs.includeLockingScripts) {\n await storage.validateOutputScript(o);\n if (o.lockingScript)\n wo.lockingScript = (0, index_client_1.asString)(o.lockingScript);\n }\n if (vargs.includeTransactions && !beef.findTxid(o.txid)) {\n await storage.getValidBeefForKnownTxid(o.txid, beef, undefined, vargs.knownTxids);\n }\n }\n if (vargs.includeTransactions) {\n r.BEEF = beef.toBinary();\n }\n return r;\n}\n//# sourceMappingURL=listOutputsIdb.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/listOutputsIdb.js?\n}"); + +/***/ }), + /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js": /*!******************************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js ***! @@ -3751,7 +3608,29 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.processAction = processAction;\nexports.shareReqsWithWorld = shareReqsWithWorld;\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst aggregateResults_1 = __webpack_require__(/*! ../../utility/aggregateResults */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/aggregateResults.js\");\nconst stampLog_1 = __webpack_require__(/*! ../../utility/stampLog */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/stampLog.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityProvenTxReq_1 = __webpack_require__(/*! ../schema/entities/EntityProvenTxReq */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTxReq.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst parseTxScriptOffsets_1 = __webpack_require__(/*! ../../utility/parseTxScriptOffsets */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/parseTxScriptOffsets.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ../../utility/utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nasync function processAction(storage, auth, args) {\n (0, stampLog_1.stampLog)(args.log, `start storage processActionSdk`);\n const userId = (0, utilityHelpers_1.verifyId)(auth.userId);\n const r = {\n sendWithResults: undefined\n };\n let req;\n const txidsOfReqsToShareWithWorld = [...args.sendWith];\n if (args.isNewTx) {\n const vargs = await validateCommitNewTxToStorageArgs(storage, userId, args);\n ({ req, log: args.log } = await commitNewTxToStorage(storage, userId, vargs));\n if (!req)\n throw new WERR_errors_1.WERR_INTERNAL();\n // Add the new txid to sendWith unless there are no others to send and the noSend option is set.\n if (args.isNoSend && !args.isSendWith)\n (0, stampLog_1.stampLog)(args.log, `... storage processActionSdk newTx committed noSend`);\n else {\n txidsOfReqsToShareWithWorld.push(req.txid);\n (0, stampLog_1.stampLog)(args.log, `... storage processActionSdk newTx committed sendWith ${req.txid}`);\n }\n }\n const { swr, ndr } = await shareReqsWithWorld(storage, userId, txidsOfReqsToShareWithWorld, args.isDelayed);\n r.sendWithResults = swr;\n r.notDelayedResults = ndr;\n (0, stampLog_1.stampLog)(args.log, `end storage processActionSdk`);\n return r;\n}\n/**\n * Verifies that all the txids are known reqs with ready-to-share status.\n * Assigns a batch identifier and updates all the provenTxReqs.\n * If not isDelayed, triggers an initial attempt to broadcast the batch and returns the results.\n *\n * @param storage\n * @param userId\n * @param txids\n * @param isDelayed\n */\nasync function shareReqsWithWorld(storage, userId, txids, isDelayed) {\n let swr = [];\n let ndr = undefined;\n if (txids.length < 1)\n return { swr, ndr };\n // Collect what we know about these sendWith transaction txids from storage.\n const r = await storage.getReqsAndBeefToShareWithWorld(txids, []);\n const readyToSendReqs = [];\n for (const getReq of r.details) {\n let status = 'failed';\n if (getReq.status === 'alreadySent')\n status = 'unproven';\n else if (getReq.status === 'readyToSend') {\n status = 'sending';\n readyToSendReqs.push(new EntityProvenTxReq_1.EntityProvenTxReq(getReq.req));\n }\n swr.push({\n txid: getReq.txid,\n status\n });\n }\n // Filter original txids down to reqIds that are available and need sending\n const readyToSendReqIds = readyToSendReqs.map(r => r.id);\n const transactionIds = readyToSendReqs.map(r => r.notify.transactionIds || []).flat();\n // If there are reqs to send, verify that we have a valid aggregate beef for them.\n // If isDelayed, this (or a different beef) will have to be rebuilt at the time of sending.\n if (readyToSendReqs.length > 0) {\n const beefIsValid = await r.beef.verify(await storage.getServices().getChainTracker());\n if (!beefIsValid) {\n console.log(`VERIFY FALSE BEEF: ${r.beef.toLogString()}`);\n throw new WERR_errors_1.WERR_INTERNAL(`merged Beef failed validation.`);\n }\n }\n // Set req batch property for the reqs being sent\n // If delayed, also bump status to 'unsent' and we're done here\n const batch = txids.length > 1 ? (0, utilityHelpers_1.randomBytesBase64)(16) : undefined;\n if (isDelayed) {\n // Just bump the req status to 'unsent' to enable background sending...\n if (readyToSendReqIds.length > 0) {\n await storage.transaction(async (trx) => {\n await storage.updateProvenTxReq(readyToSendReqIds, { status: 'unsent', batch }, trx);\n await storage.updateTransaction(transactionIds, { status: 'sending' }, trx);\n });\n }\n return { swr, ndr };\n }\n if (readyToSendReqIds.length < 1) {\n return { swr, ndr };\n }\n if (batch) {\n // Keep batch values in sync...\n for (const req of readyToSendReqs)\n req.batch = batch;\n await storage.updateProvenTxReq(readyToSendReqIds, { batch });\n }\n //\n // Handle the NON-DELAYED-SEND-NOW case\n //\n const prtn = await storage.attemptToPostReqsToNetwork(readyToSendReqs);\n const { swr: swrRes, rar } = await (0, aggregateResults_1.aggregateActionResults)(storage, swr, prtn);\n return { swr: swrRes, ndr: rar };\n}\nasync function validateCommitNewTxToStorageArgs(storage, userId, params) {\n if (!params.reference || !params.txid || !params.rawTx)\n throw new WERR_errors_1.WERR_INVALID_OPERATION('One or more expected params are undefined.');\n let tx;\n try {\n tx = sdk_1.Transaction.fromBinary(params.rawTx);\n }\n catch (e) {\n throw new WERR_errors_1.WERR_INVALID_OPERATION('Parsing serialized transaction failed.');\n }\n if (params.txid !== tx.id('hex'))\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`Hash of serialized transaction doesn't match expected txid`);\n if (!(await storage.getServices()).nLockTimeIsFinal(tx)) {\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`This transaction is not final.\n Ensure that the transaction meets the rules for being a finalized\n which can be found at https://wiki.bitcoinsv.io/index.php/NLocktime_and_nSequence`);\n }\n const txScriptOffsets = (0, parseTxScriptOffsets_1.parseTxScriptOffsets)(params.rawTx);\n const transaction = (0, utilityHelpers_1.verifyOne)(await storage.findTransactions({\n partial: { userId, reference: params.reference }\n }));\n if (!transaction.isOutgoing)\n throw new WERR_errors_1.WERR_INVALID_OPERATION('isOutgoing is not true');\n if (!transaction.inputBEEF)\n throw new WERR_errors_1.WERR_INVALID_OPERATION();\n const beef = sdk_1.Beef.fromBinary((0, utilityHelpers_noBuffer_1.asArray)(transaction.inputBEEF));\n // TODO: Could check beef validates transaction inputs...\n // Transaction must have unsigned or unprocessed status\n if (transaction.status !== 'unsigned' && transaction.status !== 'unprocessed')\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`invalid transaction status ${transaction.status}`);\n const transactionId = (0, utilityHelpers_1.verifyId)(transaction.transactionId);\n const outputOutputs = await storage.findOutputs({\n partial: { userId, transactionId }\n });\n const inputOutputs = await storage.findOutputs({\n partial: { userId, spentBy: transactionId }\n });\n const commission = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findCommissions({ partial: { transactionId, userId } }));\n if (storage.commissionSatoshis > 0) {\n // A commission is required...\n if (!commission)\n throw new WERR_errors_1.WERR_INTERNAL();\n const commissionValid = tx.outputs.some(x => x.satoshis === commission.satoshis && x.lockingScript.toHex() === (0, utilityHelpers_noBuffer_1.asString)(commission.lockingScript));\n if (!commissionValid)\n throw new WERR_errors_1.WERR_INVALID_OPERATION('Transaction did not include an output to cover service fee.');\n }\n const req = EntityProvenTxReq_1.EntityProvenTxReq.fromTxid(params.txid, params.rawTx, transaction.inputBEEF);\n req.addNotifyTransactionId(transactionId);\n // \"Processing\" a transaction is the final step of creating a new one.\n // If it is to be sent to the network directly (prior to return from processAction),\n // then there is status pre-send and post-send.\n // Otherwise there is no post-send status.\n // Note that isSendWith trumps isNoSend, e.g. isNoSend && !isSendWith\n //\n // Determine what status the req and transaction should have pre- at the end of processing.\n // Pre-Status (to newReq/newTx) Post-Status (to all sent reqs/txs)\n // req tx req tx\n // isNoSend noSend noSend\n // !isNoSend && isDelayed unsent unprocessed\n // !isNoSend && !isDelayed unprocessed unprocessed sending/unmined sending/unproven This is the only case that sends immediately.\n let postStatus = undefined;\n let status;\n if (params.isNoSend && !params.isSendWith)\n status = { req: 'nosend', tx: 'nosend' };\n else if (!params.isNoSend && params.isDelayed)\n status = { req: 'unsent', tx: 'unprocessed' };\n else if (!params.isNoSend && !params.isDelayed) {\n status = { req: 'unprocessed', tx: 'unprocessed' };\n postStatus = { req: 'unmined', tx: 'unproven' };\n }\n else\n throw new WERR_errors_1.WERR_INTERNAL('logic error');\n req.status = status.req;\n const vargs = {\n reference: params.reference,\n txid: params.txid,\n rawTx: params.rawTx,\n isSendWith: !!params.sendWith && params.sendWith.length > 0,\n isDelayed: params.isDelayed,\n isNoSend: params.isNoSend,\n // Properties with values added during validation.\n tx,\n txScriptOffsets,\n transactionId,\n transaction,\n inputOutputs,\n outputOutputs,\n commission,\n beef,\n req,\n outputUpdates: [],\n // update txid, status in transactions table and drop rawTransaction value\n transactionUpdate: {\n txid: params.txid,\n rawTx: undefined,\n inputBEEF: undefined,\n status: status.tx\n },\n postStatus\n };\n // update outputs with txid, script offsets and lengths, drop long output scripts from outputs table\n // outputs spendable will be updated for change to true and all others to !!o.tracked when tx has been broadcast\n // MAX_OUTPUTSCRIPT_LENGTH is limit for scripts left in outputs table\n for (const o of vargs.outputOutputs) {\n const vout = (0, utilityHelpers_1.verifyInteger)(o.vout);\n const offset = vargs.txScriptOffsets.outputs[vout];\n const rawTxScript = (0, utilityHelpers_noBuffer_1.asString)(vargs.rawTx.slice(offset.offset, offset.offset + offset.length));\n if (o.lockingScript && rawTxScript !== (0, utilityHelpers_noBuffer_1.asString)(o.lockingScript))\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`rawTx output locking script for vout ${vout} not equal to expected output script.`);\n if (tx.outputs[vout].lockingScript.toHex() !== rawTxScript)\n throw new WERR_errors_1.WERR_INVALID_OPERATION(`parsed transaction output locking script for vout ${vout} not equal to expected output script.`);\n const update = {\n txid: vargs.txid,\n spendable: true, // spendability is gated by transaction status. Remains true until the output is spent.\n scriptLength: offset.length,\n scriptOffset: offset.offset\n };\n if (offset.length > (await storage.getSettings()).maxOutputScript)\n // Remove long lockingScript data from outputs table, will be read from rawTx in proven_tx or proven_tx_reqs tables.\n update.lockingScript = undefined;\n vargs.outputUpdates.push({ id: o.outputId, update });\n }\n return vargs;\n}\nasync function commitNewTxToStorage(storage, userId, vargs) {\n let log = vargs.log;\n log = (0, stampLog_1.stampLog)(log, `start storage commitNewTxToStorage`);\n let req;\n await storage.transaction(async (trx) => {\n log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction start`);\n // Create initial 'nosend' proven_tx_req record to store signed, valid rawTx and input beef\n req = await vargs.req.insertOrMerge(storage, trx);\n log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage req inserted`);\n for (const ou of vargs.outputUpdates) {\n await storage.updateOutput(ou.id, ou.update, trx);\n }\n log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage outputs updated`);\n await storage.updateTransaction(vargs.transactionId, vargs.transactionUpdate, trx);\n log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction end`);\n });\n log = (0, stampLog_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction await done`);\n const r = {\n req: (0, utilityHelpers_1.verifyTruthy)(req),\n log\n };\n log = (0, stampLog_1.stampLog)(log, `end storage commitNewTxToStorage`);\n return r;\n}\n//# sourceMappingURL=processAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.processAction = processAction;\nexports.shareReqsWithWorld = shareReqsWithWorld;\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst aggregateResults_1 = __webpack_require__(/*! ../../utility/aggregateResults */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/aggregateResults.js\");\nasync function processAction(storage, auth, args) {\n (0, index_client_1.stampLog)(args.log, `start storage processActionSdk`);\n const userId = (0, index_client_1.verifyId)(auth.userId);\n const r = {\n sendWithResults: undefined\n };\n let req;\n const txidsOfReqsToShareWithWorld = [...args.sendWith];\n if (args.isNewTx) {\n const vargs = await validateCommitNewTxToStorageArgs(storage, userId, args);\n ({ req, log: args.log } = await commitNewTxToStorage(storage, userId, vargs));\n if (!req)\n throw new index_client_1.sdk.WERR_INTERNAL();\n // Add the new txid to sendWith unless there are no others to send and the noSend option is set.\n if (args.isNoSend && !args.isSendWith)\n (0, index_client_1.stampLog)(args.log, `... storage processActionSdk newTx committed noSend`);\n else {\n txidsOfReqsToShareWithWorld.push(req.txid);\n (0, index_client_1.stampLog)(args.log, `... storage processActionSdk newTx committed sendWith ${req.txid}`);\n }\n }\n const { swr, ndr } = await shareReqsWithWorld(storage, userId, txidsOfReqsToShareWithWorld, args.isDelayed);\n r.sendWithResults = swr;\n r.notDelayedResults = ndr;\n (0, index_client_1.stampLog)(args.log, `end storage processActionSdk`);\n return r;\n}\n/**\n * Verifies that all the txids are known reqs with ready-to-share status.\n * Assigns a batch identifier and updates all the provenTxReqs.\n * If not isDelayed, triggers an initial attempt to broadcast the batch and returns the results.\n *\n * @param storage\n * @param userId\n * @param txids\n * @param isDelayed\n */\nasync function shareReqsWithWorld(storage, userId, txids, isDelayed) {\n let swr = [];\n let ndr = undefined;\n if (txids.length < 1)\n return { swr, ndr };\n // Collect what we know about these sendWith transaction txids from storage.\n const r = await storage.getReqsAndBeefToShareWithWorld(txids, []);\n const readyToSendReqs = [];\n for (const getReq of r.details) {\n let status = 'failed';\n if (getReq.status === 'alreadySent')\n status = 'unproven';\n else if (getReq.status === 'readyToSend') {\n status = 'sending';\n readyToSendReqs.push(new index_client_1.EntityProvenTxReq(getReq.req));\n }\n swr.push({\n txid: getReq.txid,\n status\n });\n }\n // Filter original txids down to reqIds that are available and need sending\n const readyToSendReqIds = readyToSendReqs.map(r => r.id);\n const transactionIds = readyToSendReqs.map(r => r.notify.transactionIds || []).flat();\n // If there are reqs to send, verify that we have a valid aggregate beef for them.\n // If isDelayed, this (or a different beef) will have to be rebuilt at the time of sending.\n if (readyToSendReqs.length > 0) {\n const beefIsValid = await r.beef.verify(await storage.getServices().getChainTracker());\n if (!beefIsValid) {\n console.log(`VERIFY FALSE BEEF: ${r.beef.toLogString()}`);\n throw new index_client_1.sdk.WERR_INTERNAL(`merged Beef failed validation.`);\n }\n }\n // Set req batch property for the reqs being sent\n // If delayed, also bump status to 'unsent' and we're done here\n const batch = txids.length > 1 ? (0, index_client_1.randomBytesBase64)(16) : undefined;\n if (isDelayed) {\n // Just bump the req status to 'unsent' to enable background sending...\n if (readyToSendReqIds.length > 0) {\n await storage.transaction(async (trx) => {\n await storage.updateProvenTxReq(readyToSendReqIds, { status: 'unsent', batch }, trx);\n await storage.updateTransaction(transactionIds, { status: 'sending' }, trx);\n });\n }\n return { swr, ndr };\n }\n if (readyToSendReqIds.length < 1) {\n return { swr, ndr };\n }\n if (batch) {\n // Keep batch values in sync...\n for (const req of readyToSendReqs)\n req.batch = batch;\n await storage.updateProvenTxReq(readyToSendReqIds, { batch });\n }\n //\n // Handle the NON-DELAYED-SEND-NOW case\n //\n const prtn = await storage.attemptToPostReqsToNetwork(readyToSendReqs);\n const { swr: swrRes, rar } = await (0, aggregateResults_1.aggregateActionResults)(storage, swr, prtn);\n return { swr: swrRes, ndr: rar };\n}\nasync function validateCommitNewTxToStorageArgs(storage, userId, params) {\n if (!params.reference || !params.txid || !params.rawTx)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('One or more expected params are undefined.');\n let tx;\n try {\n tx = sdk_1.Transaction.fromBinary(params.rawTx);\n }\n catch (e) {\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('Parsing serialized transaction failed.');\n }\n if (params.txid !== tx.id('hex'))\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`Hash of serialized transaction doesn't match expected txid`);\n if (!(await storage.getServices()).nLockTimeIsFinal(tx)) {\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`This transaction is not final.\n Ensure that the transaction meets the rules for being a finalized\n which can be found at https://wiki.bitcoinsv.io/index.php/NLocktime_and_nSequence`);\n }\n const txScriptOffsets = (0, index_client_1.parseTxScriptOffsets)(params.rawTx);\n const transaction = (0, index_client_1.verifyOne)(await storage.findTransactions({\n partial: { userId, reference: params.reference }\n }));\n if (!transaction.isOutgoing)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('isOutgoing is not true');\n if (!transaction.inputBEEF)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION();\n const beef = sdk_1.Beef.fromBinary((0, index_client_1.asArray)(transaction.inputBEEF));\n // TODO: Could check beef validates transaction inputs...\n // Transaction must have unsigned or unprocessed status\n if (transaction.status !== 'unsigned' && transaction.status !== 'unprocessed')\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`invalid transaction status ${transaction.status}`);\n const transactionId = (0, index_client_1.verifyId)(transaction.transactionId);\n const outputOutputs = await storage.findOutputs({\n partial: { userId, transactionId }\n });\n const inputOutputs = await storage.findOutputs({\n partial: { userId, spentBy: transactionId }\n });\n const commission = (0, index_client_1.verifyOneOrNone)(await storage.findCommissions({ partial: { transactionId, userId } }));\n if (storage.commissionSatoshis > 0) {\n // A commission is required...\n if (!commission)\n throw new index_client_1.sdk.WERR_INTERNAL();\n const commissionValid = tx.outputs.some(x => x.satoshis === commission.satoshis && x.lockingScript.toHex() === (0, index_client_1.asString)(commission.lockingScript));\n if (!commissionValid)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('Transaction did not include an output to cover service fee.');\n }\n const req = index_client_1.EntityProvenTxReq.fromTxid(params.txid, params.rawTx, transaction.inputBEEF);\n req.addNotifyTransactionId(transactionId);\n // \"Processing\" a transaction is the final step of creating a new one.\n // If it is to be sent to the network directly (prior to return from processAction),\n // then there is status pre-send and post-send.\n // Otherwise there is no post-send status.\n // Note that isSendWith trumps isNoSend, e.g. isNoSend && !isSendWith\n //\n // Determine what status the req and transaction should have pre- at the end of processing.\n // Pre-Status (to newReq/newTx) Post-Status (to all sent reqs/txs)\n // req tx req tx\n // isNoSend noSend noSend\n // !isNoSend && isDelayed unsent unprocessed\n // !isNoSend && !isDelayed unprocessed unprocessed sending/unmined sending/unproven This is the only case that sends immediately.\n let postStatus = undefined;\n let status;\n if (params.isNoSend && !params.isSendWith)\n status = { req: 'nosend', tx: 'nosend' };\n else if (!params.isNoSend && params.isDelayed)\n status = { req: 'unsent', tx: 'unprocessed' };\n else if (!params.isNoSend && !params.isDelayed) {\n status = { req: 'unprocessed', tx: 'unprocessed' };\n postStatus = { req: 'unmined', tx: 'unproven' };\n }\n else\n throw new index_client_1.sdk.WERR_INTERNAL('logic error');\n req.status = status.req;\n const vargs = {\n reference: params.reference,\n txid: params.txid,\n rawTx: params.rawTx,\n isSendWith: !!params.sendWith && params.sendWith.length > 0,\n isDelayed: params.isDelayed,\n isNoSend: params.isNoSend,\n // Properties with values added during validation.\n tx,\n txScriptOffsets,\n transactionId,\n transaction,\n inputOutputs,\n outputOutputs,\n commission,\n beef,\n req,\n outputUpdates: [],\n // update txid, status in transactions table and drop rawTransaction value\n transactionUpdate: {\n txid: params.txid,\n rawTx: undefined,\n inputBEEF: undefined,\n status: status.tx\n },\n postStatus\n };\n // update outputs with txid, script offsets and lengths, drop long output scripts from outputs table\n // outputs spendable will be updated for change to true and all others to !!o.tracked when tx has been broadcast\n // MAX_OUTPUTSCRIPT_LENGTH is limit for scripts left in outputs table\n for (const o of vargs.outputOutputs) {\n const vout = (0, index_client_1.verifyInteger)(o.vout);\n const offset = vargs.txScriptOffsets.outputs[vout];\n const rawTxScript = (0, index_client_1.asString)(vargs.rawTx.slice(offset.offset, offset.offset + offset.length));\n if (o.lockingScript && rawTxScript !== (0, index_client_1.asString)(o.lockingScript))\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`rawTx output locking script for vout ${vout} not equal to expected output script.`);\n if (tx.outputs[vout].lockingScript.toHex() !== rawTxScript)\n throw new index_client_1.sdk.WERR_INVALID_OPERATION(`parsed transaction output locking script for vout ${vout} not equal to expected output script.`);\n const update = {\n txid: vargs.txid,\n spendable: true, // spendability is gated by transaction status. Remains true until the output is spent.\n scriptLength: offset.length,\n scriptOffset: offset.offset\n };\n if (offset.length > (await storage.getSettings()).maxOutputScript)\n // Remove long lockingScript data from outputs table, will be read from rawTx in proven_tx or proven_tx_reqs tables.\n update.lockingScript = undefined;\n vargs.outputUpdates.push({ id: o.outputId, update });\n }\n return vargs;\n}\nasync function commitNewTxToStorage(storage, userId, vargs) {\n let log = vargs.log;\n log = (0, index_client_1.stampLog)(log, `start storage commitNewTxToStorage`);\n let req;\n await storage.transaction(async (trx) => {\n log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction start`);\n // Create initial 'nosend' proven_tx_req record to store signed, valid rawTx and input beef\n req = await vargs.req.insertOrMerge(storage, trx);\n log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage req inserted`);\n for (const ou of vargs.outputUpdates) {\n await storage.updateOutput(ou.id, ou.update, trx);\n }\n log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage outputs updated`);\n await storage.updateTransaction(vargs.transactionId, vargs.transactionUpdate, trx);\n log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction end`);\n });\n log = (0, index_client_1.stampLog)(log, `... storage commitNewTxToStorage storage transaction await done`);\n const r = {\n req: (0, index_client_1.verifyTruthy)(req),\n log\n };\n log = (0, index_client_1.stampLog)(log, `end storage commitNewTxToStorage`);\n return r;\n}\n//# sourceMappingURL=processAction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/processAction.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/purgeDataIdb.js": +/*!*****************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/purgeDataIdb.js ***! + \*****************************************************************************************/ +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.purgeDataIdb = purgeDataIdb;\nasync function purgeDataIdb(storage, params, trx) {\n const r = { count: 0, log: '' };\n // TODO: implement purgeDataIdb\n return r;\n}\n//# sourceMappingURL=purgeDataIdb.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/purgeDataIdb.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/reviewStatusIdb.js": +/*!********************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/reviewStatusIdb.js ***! + \********************************************************************************************/ +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.reviewStatusIdb = reviewStatusIdb;\n/**\n * Looks for unpropagated state:\n *\n * 1. set transactions to 'failed' if not already failed and provenTxReq with matching txid has status of 'invalid'.\n * 2. sets transactions to 'completed' if provenTx with matching txid exists and current provenTxId is null.\n * 3. sets outputs to spendable true, spentBy undefined if spentBy is a transaction with status 'failed'.\n *\n * @param storage\n * @param args\n * @returns\n */\nasync function reviewStatusIdb(storage, args) {\n const r = { log: '' };\n // 1. set transactions to 'failed' if not already failed and provenTxReq with matching txid has status of 'invalid'.\n const invalidTxids = [];\n await storage.filterProvenTxReqs({ partial: { status: 'invalid' } }, txReq => {\n invalidTxids.push(txReq.txid);\n });\n for (const txid of invalidTxids) {\n const txs = await storage.findTransactions({ partial: { txid } });\n for (const tx of txs) {\n if (tx.status !== 'failed') {\n r.log += `transaction ${tx.transactionId} updated to status of 'failed' was ${tx.status}\\n`;\n await storage.updateTransactionStatus('failed', tx.transactionId);\n }\n }\n }\n // 2. sets transactions to 'completed' if provenTx with matching txid exists and current provenTxId is null.\n // 3. sets outputs to spendable true, spentBy undefined if spentBy is a transaction with status 'failed'.\n return r;\n}\n//# sourceMappingURL=reviewStatusIdb.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/reviewStatusIdb.js?\n}"); /***/ }), @@ -3762,7 +3641,18 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.varUintSize = varUintSize;\nexports.transactionInputSize = transactionInputSize;\nexports.transactionOutputSize = transactionOutputSize;\nexports.transactionSize = transactionSize;\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\n/**\n * Returns the byte size required to encode number as Bitcoin VarUint\n * @publicbody\n */\nfunction varUintSize(val) {\n if (val < 0)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('varUint', 'non-negative');\n return val <= 0xfc ? 1 : val <= 0xffff ? 3 : val <= 0xffffffff ? 5 : 9;\n}\n/**\n * @param scriptSize byte length of input script\n * @returns serialized byte length a transaction input\n */\nfunction transactionInputSize(scriptSize) {\n return (32 + // txid\n 4 + // vout\n varUintSize(scriptSize) + // script length, this is already in bytes\n scriptSize + // script\n 4); // sequence number\n}\n/**\n * @param scriptSize byte length of output script\n * @returns serialized byte length a transaction output\n */\nfunction transactionOutputSize(scriptSize) {\n return (varUintSize(scriptSize) + // output script length, from script encoded as hex string\n scriptSize + // output script\n 8); // output amount (satoshis)\n}\n/**\n * Compute the serialized binary transaction size in bytes\n * given the number of inputs and outputs,\n * and the size of each script.\n * @param inputs array of input script lengths, in bytes\n * @param outputs array of output script lengths, in bytes\n * @returns total transaction size in bytes\n */\nfunction transactionSize(inputs, outputs) {\n return (4 + // Version\n varUintSize(inputs.length) + // Number of inputs\n inputs.reduce((a, e) => a + transactionInputSize(e), 0) + // all inputs\n varUintSize(outputs.length) + // Number of outputs\n outputs.reduce((a, e) => a + transactionOutputSize(e), 0) + // all outputs\n 4); // lock time\n}\n//# sourceMappingURL=utils.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/utils.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.varUintSize = varUintSize;\nexports.transactionInputSize = transactionInputSize;\nexports.transactionOutputSize = transactionOutputSize;\nexports.transactionSize = transactionSize;\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * Returns the byte size required to encode number as Bitcoin VarUint\n * @publicbody\n */\nfunction varUintSize(val) {\n if (val < 0)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('varUint', 'non-negative');\n return val <= 0xfc ? 1 : val <= 0xffff ? 3 : val <= 0xffffffff ? 5 : 9;\n}\n/**\n * @param scriptSize byte length of input script\n * @returns serialized byte length a transaction input\n */\nfunction transactionInputSize(scriptSize) {\n return (32 + // txid\n 4 + // vout\n varUintSize(scriptSize) + // script length, this is already in bytes\n scriptSize + // script\n 4); // sequence number\n}\n/**\n * @param scriptSize byte length of output script\n * @returns serialized byte length a transaction output\n */\nfunction transactionOutputSize(scriptSize) {\n return (varUintSize(scriptSize) + // output script length, from script encoded as hex string\n scriptSize + // output script\n 8); // output amount (satoshis)\n}\n/**\n * Compute the serialized binary transaction size in bytes\n * given the number of inputs and outputs,\n * and the size of each script.\n * @param inputs array of input script lengths, in bytes\n * @param outputs array of output script lengths, in bytes\n * @returns total transaction size in bytes\n */\nfunction transactionSize(inputs, outputs) {\n return (4 + // Version\n varUintSize(inputs.length) + // Number of inputs\n inputs.reduce((a, e) => a + transactionInputSize(e), 0) + // all inputs\n varUintSize(outputs.length) + // Number of outputs\n outputs.reduce((a, e) => a + transactionOutputSize(e), 0) + // all outputs\n 4); // lock time\n}\n//# sourceMappingURL=utils.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/methods/utils.js?\n}"); + +/***/ }), + +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/remoting/StorageClient.js": +/*!*******************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/remoting/StorageClient.js ***! + \*******************************************************************************************/ +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageClient = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * `StorageClient` implements the `WalletStorageProvider` interface which allows it to\n * serve as a BRC-100 wallet's active storage.\n *\n * Internally, it uses JSON-RPC over HTTPS to make requests of a remote server.\n * Typically this server uses the `StorageServer` class to implement the service.\n *\n * The `AuthFetch` component is used to secure and authenticate the requests to the remote server.\n *\n * `AuthFetch` is initialized with a BRC-100 wallet which establishes the identity of\n * the party making requests of the remote service.\n *\n * For details of the API implemented, follow the \"See also\" link for the `WalletStorageProvider` interface.\n */\nclass StorageClient {\n constructor(wallet, endpointUrl) {\n this.nextId = 1;\n this.authClient = new sdk_1.AuthFetch(wallet);\n this.endpointUrl = endpointUrl;\n }\n /**\n * The `StorageClient` implements the `WalletStorageProvider` interface.\n * It does not implement the lower level `StorageProvider` interface.\n *\n * @returns false\n */\n isStorageProvider() {\n return false;\n }\n //////////////////////////////////////////////////////////////////////////////\n // JSON-RPC helper\n //////////////////////////////////////////////////////////////////////////////\n /**\n * Make a JSON-RPC call to the remote server.\n * @param method The WalletStorage method name to call.\n * @param params The array of parameters to pass to the method in order.\n */\n async rpcCall(method, params) {\n try {\n const id = this.nextId++;\n const body = {\n jsonrpc: '2.0',\n method,\n params,\n id\n };\n let response;\n try {\n response = await this.authClient.fetch(this.endpointUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body)\n });\n }\n catch (eu) {\n throw eu;\n }\n if (!response.ok) {\n throw new Error(`WalletStorageClient rpcCall: network error ${response.status} ${response.statusText}`);\n }\n const json = await response.json();\n if (json.error) {\n const { code, message, data } = json.error;\n const err = new Error(`RPC Error: ${message}`);\n err.code = code;\n err.data = data;\n throw err;\n }\n return json.result;\n }\n catch (eu) {\n throw eu;\n }\n }\n /**\n * @returns true once storage `TableSettings` have been retreived from remote storage.\n */\n isAvailable() {\n // We'll just say \"yes\" if we have settings\n return !!this.settings;\n }\n /**\n * @returns remote storage `TableSettings` if they have been retreived by `makeAvailable`.\n * @throws WERR_INVALID_OPERATION if `makeAvailable` has not yet been called.\n */\n getSettings() {\n if (!this.settings) {\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('call makeAvailable at least once before getSettings');\n }\n return this.settings;\n }\n /**\n * Must be called prior to making use of storage.\n * Retreives `TableSettings` from remote storage provider.\n * @returns remote storage `TableSettings`\n */\n async makeAvailable() {\n if (!this.settings) {\n this.settings = await this.rpcCall('makeAvailable', []);\n }\n return this.settings;\n }\n //////////////////////////////////////////////////////////////////////////////\n //\n // Implementation of all WalletStorage interface methods\n // They are simple pass-thrus to rpcCall\n //\n // IMPORTANT: The parameter ordering must match exactly as in your interface.\n //////////////////////////////////////////////////////////////////////////////\n /**\n * Called to cleanup resources when no further use of this object will occur.\n */\n async destroy() {\n return this.rpcCall('destroy', []);\n }\n /**\n * Requests schema migration to latest.\n * Typically remote storage will ignore this request.\n * @param storageName Unique human readable name for remote storage if it does not yet exist.\n * @param storageIdentityKey Unique identity key for remote storage if it does not yet exist.\n * @returns current schema migration identifier\n */\n async migrate(storageName, storageIdentityKey) {\n return this.rpcCall('migrate', [storageName]);\n }\n /**\n * Remote storage does not offer `Services` to remote clients.\n * @throws WERR_INVALID_OPERATION\n */\n getServices() {\n // Typically, the client would not store or retrieve \"Services\" from a remote server.\n // The \"services\" in local in-memory usage is a no-op or your own approach:\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('getServices() not implemented in remote client. This method typically is not used remotely.');\n }\n /**\n * Ignored. Remote storage cannot share `Services` with remote clients.\n */\n setServices(v) {\n // Typically no-op for remote client\n // Because \"services\" are usually local definitions to the Storage.\n }\n /**\n * Storage level processing for wallet `internalizeAction`.\n * Updates internalized outputs in remote storage.\n * Triggers proof validation of containing transaction.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Original wallet `internalizeAction` arguments.\n * @returns `internalizeAction` results\n */\n async internalizeAction(auth, args) {\n return this.rpcCall('internalizeAction', [auth, args]);\n }\n /**\n * Storage level processing for wallet `createAction`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `createAction` arguments.\n * @returns `StorageCreateActionResults` supporting additional wallet processing to yield `createAction` results.\n */\n async createAction(auth, args) {\n return this.rpcCall('createAction', [auth, args]);\n }\n /**\n * Storage level processing for wallet `createAction` and `signAction`.\n *\n * Handles remaining storage tasks once a fully signed transaction has been completed. This is common to both `createAction` and `signAction`.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `StorageProcessActionArgs` convey completed signed transaction to storage.\n * @returns `StorageProcessActionResults` supporting final wallet processing to yield `createAction` or `signAction` results.\n */\n async processAction(auth, args) {\n return this.rpcCall('processAction', [auth, args]);\n }\n /**\n * Aborts an action by `reference` string.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `abortAction` args.\n * @returns `abortAction` result.\n */\n async abortAction(auth, args) {\n return this.rpcCall('abortAction', [auth, args]);\n }\n /**\n * Used to both find and initialize a new user by identity key.\n * It is up to the remote storage whether to allow creation of new users by this method.\n * @param identityKey of the user.\n * @returns `TableUser` for the user and whether a new user was created.\n */\n async findOrInsertUser(identityKey) {\n return this.rpcCall('findOrInsertUser', [identityKey]);\n }\n /**\n * Used to both find and insert a `TableSyncState` record for the user to track wallet data replication across storage providers.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param storageName the name of the remote storage being sync'd\n * @param storageIdentityKey the identity key of the remote storage being sync'd\n * @returns `TableSyncState` and whether a new record was created.\n */\n async findOrInsertSyncStateAuth(auth, storageIdentityKey, storageName) {\n const r = await this.rpcCall('findOrInsertSyncStateAuth', [\n auth,\n storageIdentityKey,\n storageName\n ]);\n r.syncState = this.validateEntity(r.syncState, ['when']);\n return r;\n }\n /**\n * Inserts a new certificate with fields and keyring into remote storage.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param certificate the certificate to insert.\n * @returns record Id of the inserted `TableCertificate` record.\n */\n async insertCertificateAuth(auth, certificate) {\n const r = await this.rpcCall('insertCertificateAuth', [auth, certificate]);\n return r;\n }\n /**\n * Storage level processing for wallet `listActions`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listActions` arguments.\n * @returns `listActions` results.\n */\n async listActions(auth, vargs) {\n const r = await this.rpcCall('listActions', [auth, vargs]);\n return r;\n }\n /**\n * Storage level processing for wallet `listOutputs`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listOutputs` arguments.\n * @returns `listOutputs` results.\n */\n async listOutputs(auth, vargs) {\n const r = await this.rpcCall('listOutputs', [auth, vargs]);\n return r;\n }\n /**\n * Storage level processing for wallet `listCertificates`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listCertificates` arguments.\n * @returns `listCertificates` results.\n */\n async listCertificates(auth, vargs) {\n const r = await this.rpcCall('listCertificates', [auth, vargs]);\n return r;\n }\n /**\n * Find user certificates, optionally with fields.\n *\n * This certificate retrieval method supports internal wallet operations.\n * Field values are stored and retrieved encrypted.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindCertificatesArgs` determines which certificates to retrieve and whether to include fields.\n * @returns array of certificates matching args.\n */\n async findCertificatesAuth(auth, args) {\n const r = await this.rpcCall('findCertificatesAuth', [auth, args]);\n this.validateEntities(r);\n if (args.includeFields) {\n for (const c of r) {\n if (c.fields)\n this.validateEntities(c.fields);\n }\n }\n return r;\n }\n /**\n * Find output baskets.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindOutputBasketsArgs` determines which baskets to retrieve.\n * @returns array of output baskets matching args.\n */\n async findOutputBasketsAuth(auth, args) {\n const r = await this.rpcCall('findOutputBaskets', [auth, args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Find outputs.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindOutputsArgs` determines which outputs to retrieve.\n * @returns array of outputs matching args.\n */\n async findOutputsAuth(auth, args) {\n const r = await this.rpcCall('findOutputsAuth', [auth, args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Find requests for transaction proofs.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindProvenTxReqsArgs` determines which proof requests to retrieve.\n * @returns array of proof requests matching args.\n */\n async findProvenTxReqs(args) {\n const r = await this.rpcCall('findProvenTxReqs', [args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Relinquish a certificate.\n *\n * For storage supporting replication records must be kept of deletions. Therefore certificates are marked as deleted\n * when relinquished, and no longer returned by `listCertificates`, but are still retained by storage.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `relinquishCertificate` args.\n */\n async relinquishCertificate(auth, args) {\n return this.rpcCall('relinquishCertificate', [auth, args]);\n }\n /**\n * Relinquish an output.\n *\n * Relinquishing an output removes the output from whatever basket was tracking it.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `relinquishOutput` args.\n */\n async relinquishOutput(auth, args) {\n return this.rpcCall('relinquishOutput', [auth, args]);\n }\n /**\n * Process a \"chunk\" of replication data for the user.\n *\n * The normal data flow is for the active storage to push backups as a sequence of data chunks to backup storage providers.\n *\n * @param args a copy of the replication request args that initiated the sequence of data chunks.\n * @param chunk the current data chunk to process.\n * @returns whether processing is done, counts of inserts and udpates, and related progress tracking properties.\n */\n async processSyncChunk(args, chunk) {\n const r = await this.rpcCall('processSyncChunk', [args, chunk]);\n return r;\n }\n /**\n * Request a \"chunk\" of replication data for a specific user and storage provider.\n *\n * The normal data flow is for the active storage to push backups as a sequence of data chunks to backup storage providers.\n * Also supports recovery where non-active storage can attempt to merge available data prior to becoming active.\n *\n * @param args that identify the non-active storage which will receive replication data and constrains the replication process.\n * @returns the next \"chunk\" of replication data\n */\n async getSyncChunk(args) {\n const r = await this.rpcCall('getSyncChunk', [args]);\n if (r.certificateFields)\n r.certificateFields = this.validateEntities(r.certificateFields);\n if (r.certificates)\n r.certificates = this.validateEntities(r.certificates);\n if (r.commissions)\n r.commissions = this.validateEntities(r.commissions);\n if (r.outputBaskets)\n r.outputBaskets = this.validateEntities(r.outputBaskets);\n if (r.outputTagMaps)\n r.outputTagMaps = this.validateEntities(r.outputTagMaps);\n if (r.outputTags)\n r.outputTags = this.validateEntities(r.outputTags);\n if (r.outputs)\n r.outputs = this.validateEntities(r.outputs);\n if (r.provenTxReqs)\n r.provenTxReqs = this.validateEntities(r.provenTxReqs);\n if (r.provenTxs)\n r.provenTxs = this.validateEntities(r.provenTxs);\n if (r.transactions)\n r.transactions = this.validateEntities(r.transactions);\n if (r.txLabelMaps)\n r.txLabelMaps = this.validateEntities(r.txLabelMaps);\n if (r.txLabels)\n r.txLabels = this.validateEntities(r.txLabels);\n if (r.user)\n r.user = this.validateEntity(r.user);\n return r;\n }\n /**\n * Handles the data received when a new transaction proof is found in response to an outstanding request for proof data:\n *\n * - Creates a new `TableProvenTx` record.\n * - Notifies all user transaction records of the new status.\n * - Updates the proof request record to 'completed' status which enables delayed deletion.\n *\n * @param args proof request and new transaction proof data\n * @returns results of updates\n */\n async updateProvenTxReqWithNewProvenTx(args) {\n const r = await this.rpcCall('updateProvenTxReqWithNewProvenTx', [args]);\n return r;\n }\n /**\n * Ensures up-to-date wallet data replication to all configured backup storage providers,\n * then promotes one of the configured backups to active,\n * demoting the current active to new backup.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param newActiveStorageIdentityKey which must be a currently configured backup storage provider.\n */\n async setActive(auth, newActiveStorageIdentityKey) {\n return this.rpcCall('setActive', [auth, newActiveStorageIdentityKey]);\n }\n validateDate(date) {\n let r;\n if (date instanceof Date)\n r = date;\n else\n r = new Date(date);\n return r;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all individual records with time stamps retreived from database.\n */\n validateEntity(entity, dateFields) {\n entity.created_at = this.validateDate(entity.created_at);\n entity.updated_at = this.validateDate(entity.updated_at);\n if (dateFields) {\n for (const df of dateFields) {\n if (entity[df])\n entity[df] = this.validateDate(entity[df]);\n }\n }\n for (const key of Object.keys(entity)) {\n const val = entity[key];\n if (val === null) {\n entity[key] = undefined;\n }\n else if (val instanceof Uint8Array) {\n entity[key] = Array.from(val);\n }\n }\n return entity;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all arrays of records with time stamps retreived from database.\n * @returns input `entities` array with contained values validated.\n */\n validateEntities(entities, dateFields) {\n for (let i = 0; i < entities.length; i++) {\n entities[i] = this.validateEntity(entities[i], dateFields);\n }\n return entities;\n }\n}\nexports.StorageClient = StorageClient;\n//# sourceMappingURL=StorageClient.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/remoting/StorageClient.js?\n}"); /***/ }), @@ -3773,7 +3663,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageClient = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\n/**\n * `StorageClient` implements the `WalletStorageProvider` interface which allows it to\n * serve as a BRC-100 wallet's active storage.\n *\n * Internally, it uses JSON-RPC over HTTPS to make requests of a remote server.\n * Typically this server uses the `StorageServer` class to implement the service.\n *\n * The `AuthFetch` component is used to secure and authenticate the requests to the remote server.\n *\n * `AuthFetch` is initialized with a BRC-100 wallet which establishes the identity of\n * the party making requests of the remote service.\n *\n * For details of the API implemented, follow the \"See also\" link for the `WalletStorageProvider` interface.\n */\nclass StorageClient {\n constructor(wallet, endpointUrl) {\n this.nextId = 1;\n this.authClient = new sdk_1.AuthFetch(wallet);\n this.endpointUrl = endpointUrl;\n }\n /**\n * The `StorageClient` implements the `WalletStorageProvider` interface.\n * It does not implement the lower level `StorageProvider` interface.\n *\n * @returns false\n */\n isStorageProvider() {\n return false;\n }\n //////////////////////////////////////////////////////////////////////////////\n // JSON-RPC helper\n //////////////////////////////////////////////////////////////////////////////\n /**\n * Make a JSON-RPC call to the remote server.\n * @param method The WalletStorage method name to call.\n * @param params The array of parameters to pass to the method in order.\n */\n async rpcCall(method, params) {\n try {\n const id = this.nextId++;\n const body = {\n jsonrpc: '2.0',\n method,\n params,\n id\n };\n let response;\n try {\n response = await this.authClient.fetch(this.endpointUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body)\n });\n }\n catch (eu) {\n throw eu;\n }\n if (!response.ok) {\n throw new Error(`WalletStorageClient rpcCall: network error ${response.status} ${response.statusText}`);\n }\n const json = await response.json();\n if (json.error) {\n const { code, message, data } = json.error;\n const err = new Error(`RPC Error: ${message}`);\n err.code = code;\n err.data = data;\n throw err;\n }\n return json.result;\n }\n catch (eu) {\n throw eu;\n }\n }\n /**\n * @returns true once storage `TableSettings` have been retreived from remote storage.\n */\n isAvailable() {\n // We'll just say \"yes\" if we have settings\n return !!this.settings;\n }\n /**\n * @returns remote storage `TableSettings` if they have been retreived by `makeAvailable`.\n * @throws WERR_INVALID_OPERATION if `makeAvailable` has not yet been called.\n */\n getSettings() {\n if (!this.settings) {\n throw new WERR_errors_1.WERR_INVALID_OPERATION('call makeAvailable at least once before getSettings');\n }\n return this.settings;\n }\n /**\n * Must be called prior to making use of storage.\n * Retreives `TableSettings` from remote storage provider.\n * @returns remote storage `TableSettings`\n */\n async makeAvailable() {\n if (!this.settings) {\n this.settings = await this.rpcCall('makeAvailable', []);\n }\n return this.settings;\n }\n //////////////////////////////////////////////////////////////////////////////\n //\n // Implementation of all WalletStorage interface methods\n // They are simple pass-thrus to rpcCall\n //\n // IMPORTANT: The parameter ordering must match exactly as in your interface.\n //////////////////////////////////////////////////////////////////////////////\n /**\n * Called to cleanup resources when no further use of this object will occur.\n */\n async destroy() {\n return this.rpcCall('destroy', []);\n }\n /**\n * Requests schema migration to latest.\n * Typically remote storage will ignore this request.\n * @param storageName Unique human readable name for remote storage if it does not yet exist.\n * @param storageIdentityKey Unique identity key for remote storage if it does not yet exist.\n * @returns current schema migration identifier\n */\n async migrate(storageName, storageIdentityKey) {\n return this.rpcCall('migrate', [storageName]);\n }\n /**\n * Remote storage does not offer `Services` to remote clients.\n * @throws WERR_INVALID_OPERATION\n */\n getServices() {\n // Typically, the client would not store or retrieve \"Services\" from a remote server.\n // The \"services\" in local in-memory usage is a no-op or your own approach:\n throw new WERR_errors_1.WERR_INVALID_OPERATION('getServices() not implemented in remote client. This method typically is not used remotely.');\n }\n /**\n * Ignored. Remote storage cannot share `Services` with remote clients.\n */\n setServices(v) {\n // Typically no-op for remote client\n // Because \"services\" are usually local definitions to the Storage.\n }\n /**\n * Storage level processing for wallet `internalizeAction`.\n * Updates internalized outputs in remote storage.\n * Triggers proof validation of containing transaction.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Original wallet `internalizeAction` arguments.\n * @returns `internalizeAction` results\n */\n async internalizeAction(auth, args) {\n return this.rpcCall('internalizeAction', [auth, args]);\n }\n /**\n * Storage level processing for wallet `createAction`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `createAction` arguments.\n * @returns `StorageCreateActionResults` supporting additional wallet processing to yield `createAction` results.\n */\n async createAction(auth, args) {\n return this.rpcCall('createAction', [auth, args]);\n }\n /**\n * Storage level processing for wallet `createAction` and `signAction`.\n *\n * Handles remaining storage tasks once a fully signed transaction has been completed. This is common to both `createAction` and `signAction`.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `StorageProcessActionArgs` convey completed signed transaction to storage.\n * @returns `StorageProcessActionResults` supporting final wallet processing to yield `createAction` or `signAction` results.\n */\n async processAction(auth, args) {\n return this.rpcCall('processAction', [auth, args]);\n }\n /**\n * Aborts an action by `reference` string.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `abortAction` args.\n * @returns `abortAction` result.\n */\n async abortAction(auth, args) {\n return this.rpcCall('abortAction', [auth, args]);\n }\n /**\n * Used to both find and initialize a new user by identity key.\n * It is up to the remote storage whether to allow creation of new users by this method.\n * @param identityKey of the user.\n * @returns `TableUser` for the user and whether a new user was created.\n */\n async findOrInsertUser(identityKey) {\n return this.rpcCall('findOrInsertUser', [identityKey]);\n }\n /**\n * Used to both find and insert a `TableSyncState` record for the user to track wallet data replication across storage providers.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param storageName the name of the remote storage being sync'd\n * @param storageIdentityKey the identity key of the remote storage being sync'd\n * @returns `TableSyncState` and whether a new record was created.\n */\n async findOrInsertSyncStateAuth(auth, storageIdentityKey, storageName) {\n const r = await this.rpcCall('findOrInsertSyncStateAuth', [\n auth,\n storageIdentityKey,\n storageName\n ]);\n r.syncState = this.validateEntity(r.syncState, ['when']);\n return r;\n }\n /**\n * Inserts a new certificate with fields and keyring into remote storage.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param certificate the certificate to insert.\n * @returns record Id of the inserted `TableCertificate` record.\n */\n async insertCertificateAuth(auth, certificate) {\n const r = await this.rpcCall('insertCertificateAuth', [auth, certificate]);\n return r;\n }\n /**\n * Storage level processing for wallet `listActions`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listActions` arguments.\n * @returns `listActions` results.\n */\n async listActions(auth, vargs) {\n const r = await this.rpcCall('listActions', [auth, vargs]);\n return r;\n }\n /**\n * Storage level processing for wallet `listOutputs`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listOutputs` arguments.\n * @returns `listOutputs` results.\n */\n async listOutputs(auth, vargs) {\n const r = await this.rpcCall('listOutputs', [auth, vargs]);\n return r;\n }\n /**\n * Storage level processing for wallet `listCertificates`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listCertificates` arguments.\n * @returns `listCertificates` results.\n */\n async listCertificates(auth, vargs) {\n const r = await this.rpcCall('listCertificates', [auth, vargs]);\n return r;\n }\n /**\n * Find user certificates, optionally with fields.\n *\n * This certificate retrieval method supports internal wallet operations.\n * Field values are stored and retrieved encrypted.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindCertificatesArgs` determines which certificates to retrieve and whether to include fields.\n * @returns array of certificates matching args.\n */\n async findCertificatesAuth(auth, args) {\n const r = await this.rpcCall('findCertificatesAuth', [auth, args]);\n this.validateEntities(r);\n if (args.includeFields) {\n for (const c of r) {\n if (c.fields)\n this.validateEntities(c.fields);\n }\n }\n return r;\n }\n /**\n * Find output baskets.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindOutputBasketsArgs` determines which baskets to retrieve.\n * @returns array of output baskets matching args.\n */\n async findOutputBasketsAuth(auth, args) {\n const r = await this.rpcCall('findOutputBaskets', [auth, args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Find outputs.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindOutputsArgs` determines which outputs to retrieve.\n * @returns array of outputs matching args.\n */\n async findOutputsAuth(auth, args) {\n const r = await this.rpcCall('findOutputsAuth', [auth, args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Find requests for transaction proofs.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindProvenTxReqsArgs` determines which proof requests to retrieve.\n * @returns array of proof requests matching args.\n */\n async findProvenTxReqs(args) {\n const r = await this.rpcCall('findProvenTxReqs', [args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Relinquish a certificate.\n *\n * For storage supporting replication records must be kept of deletions. Therefore certificates are marked as deleted\n * when relinquished, and no longer returned by `listCertificates`, but are still retained by storage.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `relinquishCertificate` args.\n */\n async relinquishCertificate(auth, args) {\n return this.rpcCall('relinquishCertificate', [auth, args]);\n }\n /**\n * Relinquish an output.\n *\n * Relinquishing an output removes the output from whatever basket was tracking it.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `relinquishOutput` args.\n */\n async relinquishOutput(auth, args) {\n return this.rpcCall('relinquishOutput', [auth, args]);\n }\n /**\n * Process a \"chunk\" of replication data for the user.\n *\n * The normal data flow is for the active storage to push backups as a sequence of data chunks to backup storage providers.\n *\n * @param args a copy of the replication request args that initiated the sequence of data chunks.\n * @param chunk the current data chunk to process.\n * @returns whether processing is done, counts of inserts and udpates, and related progress tracking properties.\n */\n async processSyncChunk(args, chunk) {\n const r = await this.rpcCall('processSyncChunk', [args, chunk]);\n return r;\n }\n /**\n * Request a \"chunk\" of replication data for a specific user and storage provider.\n *\n * The normal data flow is for the active storage to push backups as a sequence of data chunks to backup storage providers.\n * Also supports recovery where non-active storage can attempt to merge available data prior to becoming active.\n *\n * @param args that identify the non-active storage which will receive replication data and constrains the replication process.\n * @returns the next \"chunk\" of replication data\n */\n async getSyncChunk(args) {\n const r = await this.rpcCall('getSyncChunk', [args]);\n if (r.certificateFields)\n r.certificateFields = this.validateEntities(r.certificateFields);\n if (r.certificates)\n r.certificates = this.validateEntities(r.certificates);\n if (r.commissions)\n r.commissions = this.validateEntities(r.commissions);\n if (r.outputBaskets)\n r.outputBaskets = this.validateEntities(r.outputBaskets);\n if (r.outputTagMaps)\n r.outputTagMaps = this.validateEntities(r.outputTagMaps);\n if (r.outputTags)\n r.outputTags = this.validateEntities(r.outputTags);\n if (r.outputs)\n r.outputs = this.validateEntities(r.outputs);\n if (r.provenTxReqs)\n r.provenTxReqs = this.validateEntities(r.provenTxReqs);\n if (r.provenTxs)\n r.provenTxs = this.validateEntities(r.provenTxs);\n if (r.transactions)\n r.transactions = this.validateEntities(r.transactions);\n if (r.txLabelMaps)\n r.txLabelMaps = this.validateEntities(r.txLabelMaps);\n if (r.txLabels)\n r.txLabels = this.validateEntities(r.txLabels);\n if (r.user)\n r.user = this.validateEntity(r.user);\n return r;\n }\n /**\n * Handles the data received when a new transaction proof is found in response to an outstanding request for proof data:\n *\n * - Creates a new `TableProvenTx` record.\n * - Notifies all user transaction records of the new status.\n * - Updates the proof request record to 'completed' status which enables delayed deletion.\n *\n * @param args proof request and new transaction proof data\n * @returns results of updates\n */\n async updateProvenTxReqWithNewProvenTx(args) {\n const r = await this.rpcCall('updateProvenTxReqWithNewProvenTx', [args]);\n return r;\n }\n /**\n * Ensures up-to-date wallet data replication to all configured backup storage providers,\n * then promotes one of the configured backups to active,\n * demoting the current active to new backup.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param newActiveStorageIdentityKey which must be a currently configured backup storage provider.\n */\n async setActive(auth, newActiveStorageIdentityKey) {\n return this.rpcCall('setActive', [auth, newActiveStorageIdentityKey]);\n }\n validateDate(date) {\n let r;\n if (date instanceof Date)\n r = date;\n else\n r = new Date(date);\n return r;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all individual records with time stamps retreived from database.\n */\n validateEntity(entity, dateFields) {\n entity.created_at = this.validateDate(entity.created_at);\n entity.updated_at = this.validateDate(entity.updated_at);\n if (dateFields) {\n for (const df of dateFields) {\n if (entity[df])\n entity[df] = this.validateDate(entity[df]);\n }\n }\n for (const key of Object.keys(entity)) {\n const val = entity[key];\n if (val === null) {\n entity[key] = undefined;\n }\n else if (val instanceof Uint8Array) {\n entity[key] = Array.from(val);\n }\n }\n return entity;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all arrays of records with time stamps retreived from database.\n * @returns input `entities` array with contained values validated.\n */\n validateEntities(entities, dateFields) {\n for (let i = 0; i < entities.length; i++) {\n entities[i] = this.validateEntity(entities[i], dateFields);\n }\n return entities;\n }\n}\nexports.StorageClient = StorageClient;\n//# sourceMappingURL=StorageMobile.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/remoting/StorageMobile.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.StorageClient = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_mobile_1 = __webpack_require__(/*! ../../index.mobile */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js\");\n/**\n * `StorageClient` implements the `WalletStorageProvider` interface which allows it to\n * serve as a BRC-100 wallet's active storage.\n *\n * Internally, it uses JSON-RPC over HTTPS to make requests of a remote server.\n * Typically this server uses the `StorageServer` class to implement the service.\n *\n * The `AuthFetch` component is used to secure and authenticate the requests to the remote server.\n *\n * `AuthFetch` is initialized with a BRC-100 wallet which establishes the identity of\n * the party making requests of the remote service.\n *\n * For details of the API implemented, follow the \"See also\" link for the `WalletStorageProvider` interface.\n */\nclass StorageClient {\n constructor(wallet, endpointUrl) {\n this.nextId = 1;\n this.authClient = new sdk_1.AuthFetch(wallet);\n this.endpointUrl = endpointUrl;\n }\n /**\n * The `StorageClient` implements the `WalletStorageProvider` interface.\n * It does not implement the lower level `StorageProvider` interface.\n *\n * @returns false\n */\n isStorageProvider() {\n return false;\n }\n //////////////////////////////////////////////////////////////////////////////\n // JSON-RPC helper\n //////////////////////////////////////////////////////////////////////////////\n /**\n * Make a JSON-RPC call to the remote server.\n * @param method The WalletStorage method name to call.\n * @param params The array of parameters to pass to the method in order.\n */\n async rpcCall(method, params) {\n try {\n const id = this.nextId++;\n const body = {\n jsonrpc: '2.0',\n method,\n params,\n id\n };\n let response;\n try {\n response = await this.authClient.fetch(this.endpointUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body)\n });\n }\n catch (eu) {\n throw eu;\n }\n if (!response.ok) {\n throw new Error(`WalletStorageClient rpcCall: network error ${response.status} ${response.statusText}`);\n }\n const json = await response.json();\n if (json.error) {\n const { code, message, data } = json.error;\n const err = new Error(`RPC Error: ${message}`);\n err.code = code;\n err.data = data;\n throw err;\n }\n return json.result;\n }\n catch (eu) {\n throw eu;\n }\n }\n /**\n * @returns true once storage `TableSettings` have been retreived from remote storage.\n */\n isAvailable() {\n // We'll just say \"yes\" if we have settings\n return !!this.settings;\n }\n /**\n * @returns remote storage `TableSettings` if they have been retreived by `makeAvailable`.\n * @throws WERR_INVALID_OPERATION if `makeAvailable` has not yet been called.\n */\n getSettings() {\n if (!this.settings) {\n throw new index_mobile_1.sdk.WERR_INVALID_OPERATION('call makeAvailable at least once before getSettings');\n }\n return this.settings;\n }\n /**\n * Must be called prior to making use of storage.\n * Retreives `TableSettings` from remote storage provider.\n * @returns remote storage `TableSettings`\n */\n async makeAvailable() {\n if (!this.settings) {\n this.settings = await this.rpcCall('makeAvailable', []);\n }\n return this.settings;\n }\n //////////////////////////////////////////////////////////////////////////////\n //\n // Implementation of all WalletStorage interface methods\n // They are simple pass-thrus to rpcCall\n //\n // IMPORTANT: The parameter ordering must match exactly as in your interface.\n //////////////////////////////////////////////////////////////////////////////\n /**\n * Called to cleanup resources when no further use of this object will occur.\n */\n async destroy() {\n return this.rpcCall('destroy', []);\n }\n /**\n * Requests schema migration to latest.\n * Typically remote storage will ignore this request.\n * @param storageName Unique human readable name for remote storage if it does not yet exist.\n * @param storageIdentityKey Unique identity key for remote storage if it does not yet exist.\n * @returns current schema migration identifier\n */\n async migrate(storageName, storageIdentityKey) {\n return this.rpcCall('migrate', [storageName]);\n }\n /**\n * Remote storage does not offer `Services` to remote clients.\n * @throws WERR_INVALID_OPERATION\n */\n getServices() {\n // Typically, the client would not store or retrieve \"Services\" from a remote server.\n // The \"services\" in local in-memory usage is a no-op or your own approach:\n throw new index_mobile_1.sdk.WERR_INVALID_OPERATION('getServices() not implemented in remote client. This method typically is not used remotely.');\n }\n /**\n * Ignored. Remote storage cannot share `Services` with remote clients.\n */\n setServices(v) {\n // Typically no-op for remote client\n // Because \"services\" are usually local definitions to the Storage.\n }\n /**\n * Storage level processing for wallet `internalizeAction`.\n * Updates internalized outputs in remote storage.\n * Triggers proof validation of containing transaction.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Original wallet `internalizeAction` arguments.\n * @returns `internalizeAction` results\n */\n async internalizeAction(auth, args) {\n return this.rpcCall('internalizeAction', [auth, args]);\n }\n /**\n * Storage level processing for wallet `createAction`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `createAction` arguments.\n * @returns `StorageCreateActionResults` supporting additional wallet processing to yield `createAction` results.\n */\n async createAction(auth, args) {\n return this.rpcCall('createAction', [auth, args]);\n }\n /**\n * Storage level processing for wallet `createAction` and `signAction`.\n *\n * Handles remaining storage tasks once a fully signed transaction has been completed. This is common to both `createAction` and `signAction`.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `StorageProcessActionArgs` convey completed signed transaction to storage.\n * @returns `StorageProcessActionResults` supporting final wallet processing to yield `createAction` or `signAction` results.\n */\n async processAction(auth, args) {\n return this.rpcCall('processAction', [auth, args]);\n }\n /**\n * Aborts an action by `reference` string.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `abortAction` args.\n * @returns `abortAction` result.\n */\n async abortAction(auth, args) {\n return this.rpcCall('abortAction', [auth, args]);\n }\n /**\n * Used to both find and initialize a new user by identity key.\n * It is up to the remote storage whether to allow creation of new users by this method.\n * @param identityKey of the user.\n * @returns `TableUser` for the user and whether a new user was created.\n */\n async findOrInsertUser(identityKey) {\n return this.rpcCall('findOrInsertUser', [identityKey]);\n }\n /**\n * Used to both find and insert a `TableSyncState` record for the user to track wallet data replication across storage providers.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param storageName the name of the remote storage being sync'd\n * @param storageIdentityKey the identity key of the remote storage being sync'd\n * @returns `TableSyncState` and whether a new record was created.\n */\n async findOrInsertSyncStateAuth(auth, storageIdentityKey, storageName) {\n const r = await this.rpcCall('findOrInsertSyncStateAuth', [\n auth,\n storageIdentityKey,\n storageName\n ]);\n r.syncState = this.validateEntity(r.syncState, ['when']);\n return r;\n }\n /**\n * Inserts a new certificate with fields and keyring into remote storage.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param certificate the certificate to insert.\n * @returns record Id of the inserted `TableCertificate` record.\n */\n async insertCertificateAuth(auth, certificate) {\n const r = await this.rpcCall('insertCertificateAuth', [auth, certificate]);\n return r;\n }\n /**\n * Storage level processing for wallet `listActions`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listActions` arguments.\n * @returns `listActions` results.\n */\n async listActions(auth, vargs) {\n const r = await this.rpcCall('listActions', [auth, vargs]);\n return r;\n }\n /**\n * Storage level processing for wallet `listOutputs`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listOutputs` arguments.\n * @returns `listOutputs` results.\n */\n async listOutputs(auth, vargs) {\n const r = await this.rpcCall('listOutputs', [auth, vargs]);\n return r;\n }\n /**\n * Storage level processing for wallet `listCertificates`.\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args Validated extension of original wallet `listCertificates` arguments.\n * @returns `listCertificates` results.\n */\n async listCertificates(auth, vargs) {\n const r = await this.rpcCall('listCertificates', [auth, vargs]);\n return r;\n }\n /**\n * Find user certificates, optionally with fields.\n *\n * This certificate retrieval method supports internal wallet operations.\n * Field values are stored and retrieved encrypted.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindCertificatesArgs` determines which certificates to retrieve and whether to include fields.\n * @returns array of certificates matching args.\n */\n async findCertificatesAuth(auth, args) {\n const r = await this.rpcCall('findCertificatesAuth', [auth, args]);\n this.validateEntities(r);\n if (args.includeFields) {\n for (const c of r) {\n if (c.fields)\n this.validateEntities(c.fields);\n }\n }\n return r;\n }\n /**\n * Find output baskets.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindOutputBasketsArgs` determines which baskets to retrieve.\n * @returns array of output baskets matching args.\n */\n async findOutputBasketsAuth(auth, args) {\n const r = await this.rpcCall('findOutputBaskets', [auth, args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Find outputs.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindOutputsArgs` determines which outputs to retrieve.\n * @returns array of outputs matching args.\n */\n async findOutputsAuth(auth, args) {\n const r = await this.rpcCall('findOutputsAuth', [auth, args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Find requests for transaction proofs.\n *\n * This retrieval method supports internal wallet operations.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args `FindProvenTxReqsArgs` determines which proof requests to retrieve.\n * @returns array of proof requests matching args.\n */\n async findProvenTxReqs(args) {\n const r = await this.rpcCall('findProvenTxReqs', [args]);\n this.validateEntities(r);\n return r;\n }\n /**\n * Relinquish a certificate.\n *\n * For storage supporting replication records must be kept of deletions. Therefore certificates are marked as deleted\n * when relinquished, and no longer returned by `listCertificates`, but are still retained by storage.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `relinquishCertificate` args.\n */\n async relinquishCertificate(auth, args) {\n return this.rpcCall('relinquishCertificate', [auth, args]);\n }\n /**\n * Relinquish an output.\n *\n * Relinquishing an output removes the output from whatever basket was tracking it.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param args original wallet `relinquishOutput` args.\n */\n async relinquishOutput(auth, args) {\n return this.rpcCall('relinquishOutput', [auth, args]);\n }\n /**\n * Process a \"chunk\" of replication data for the user.\n *\n * The normal data flow is for the active storage to push backups as a sequence of data chunks to backup storage providers.\n *\n * @param args a copy of the replication request args that initiated the sequence of data chunks.\n * @param chunk the current data chunk to process.\n * @returns whether processing is done, counts of inserts and udpates, and related progress tracking properties.\n */\n async processSyncChunk(args, chunk) {\n const r = await this.rpcCall('processSyncChunk', [args, chunk]);\n return r;\n }\n /**\n * Request a \"chunk\" of replication data for a specific user and storage provider.\n *\n * The normal data flow is for the active storage to push backups as a sequence of data chunks to backup storage providers.\n * Also supports recovery where non-active storage can attempt to merge available data prior to becoming active.\n *\n * @param args that identify the non-active storage which will receive replication data and constrains the replication process.\n * @returns the next \"chunk\" of replication data\n */\n async getSyncChunk(args) {\n const r = await this.rpcCall('getSyncChunk', [args]);\n if (r.certificateFields)\n r.certificateFields = this.validateEntities(r.certificateFields);\n if (r.certificates)\n r.certificates = this.validateEntities(r.certificates);\n if (r.commissions)\n r.commissions = this.validateEntities(r.commissions);\n if (r.outputBaskets)\n r.outputBaskets = this.validateEntities(r.outputBaskets);\n if (r.outputTagMaps)\n r.outputTagMaps = this.validateEntities(r.outputTagMaps);\n if (r.outputTags)\n r.outputTags = this.validateEntities(r.outputTags);\n if (r.outputs)\n r.outputs = this.validateEntities(r.outputs);\n if (r.provenTxReqs)\n r.provenTxReqs = this.validateEntities(r.provenTxReqs);\n if (r.provenTxs)\n r.provenTxs = this.validateEntities(r.provenTxs);\n if (r.transactions)\n r.transactions = this.validateEntities(r.transactions);\n if (r.txLabelMaps)\n r.txLabelMaps = this.validateEntities(r.txLabelMaps);\n if (r.txLabels)\n r.txLabels = this.validateEntities(r.txLabels);\n if (r.user)\n r.user = this.validateEntity(r.user);\n return r;\n }\n /**\n * Handles the data received when a new transaction proof is found in response to an outstanding request for proof data:\n *\n * - Creates a new `TableProvenTx` record.\n * - Notifies all user transaction records of the new status.\n * - Updates the proof request record to 'completed' status which enables delayed deletion.\n *\n * @param args proof request and new transaction proof data\n * @returns results of updates\n */\n async updateProvenTxReqWithNewProvenTx(args) {\n const r = await this.rpcCall('updateProvenTxReqWithNewProvenTx', [args]);\n return r;\n }\n /**\n * Ensures up-to-date wallet data replication to all configured backup storage providers,\n * then promotes one of the configured backups to active,\n * demoting the current active to new backup.\n *\n * @param auth Identifies client by identity key and the storage identity key of their currently active storage.\n * This must match the `AuthFetch` identity securing the remote conneciton.\n * @param newActiveStorageIdentityKey which must be a currently configured backup storage provider.\n */\n async setActive(auth, newActiveStorageIdentityKey) {\n return this.rpcCall('setActive', [auth, newActiveStorageIdentityKey]);\n }\n validateDate(date) {\n let r;\n if (date instanceof Date)\n r = date;\n else\n r = new Date(date);\n return r;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all individual records with time stamps retreived from database.\n */\n validateEntity(entity, dateFields) {\n entity.created_at = this.validateDate(entity.created_at);\n entity.updated_at = this.validateDate(entity.updated_at);\n if (dateFields) {\n for (const df of dateFields) {\n if (entity[df])\n entity[df] = this.validateDate(entity[df]);\n }\n }\n for (const key of Object.keys(entity)) {\n const val = entity[key];\n if (val === null) {\n entity[key] = undefined;\n }\n else if (val instanceof Uint8Array) {\n entity[key] = Array.from(val);\n }\n }\n return entity;\n }\n /**\n * Helper to force uniform behavior across database engines.\n * Use to process all arrays of records with time stamps retreived from database.\n * @returns input `entities` array with contained values validated.\n */\n validateEntities(entities, dateFields) {\n for (let i = 0; i < entities.length; i++) {\n entities[i] = this.validateEntity(entities[i], dateFields);\n }\n return entities;\n }\n}\nexports.StorageClient = StorageClient;\n//# sourceMappingURL=StorageMobile.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/remoting/StorageMobile.js?\n}"); /***/ }), @@ -3795,7 +3685,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityCertificate = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityCertificate extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n certificateId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n type: '',\n subject: '',\n verifier: undefined,\n serialNumber: '',\n certifier: '',\n revocationOutpoint: '',\n signature: '',\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get certificateId() {\n return this.api.certificateId;\n }\n set certificateId(v) {\n this.api.certificateId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get type() {\n return this.api.type;\n }\n set type(v) {\n this.api.type = v;\n }\n get subject() {\n return this.api.subject;\n }\n set subject(v) {\n this.api.subject = v;\n }\n get verifier() {\n return this.api.verifier;\n }\n set verifier(v) {\n this.api.verifier = v;\n }\n get serialNumber() {\n return this.api.serialNumber;\n }\n set serialNumber(v) {\n this.api.serialNumber = v;\n }\n get certifier() {\n return this.api.certifier;\n }\n set certifier(v) {\n this.api.certifier = v;\n }\n get revocationOutpoint() {\n return this.api.revocationOutpoint;\n }\n set revocationOutpoint(v) {\n this.api.revocationOutpoint = v;\n }\n get signature() {\n return this.api.signature;\n }\n set signature(v) {\n this.api.signature = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n //get fields() { return this.api.fields }\n //set fields(v: Record | undefined) { this.api.fields = v }\n get id() {\n return this.api.certificateId;\n }\n set id(v) {\n this.api.certificateId = v;\n }\n get entityName() {\n return 'certificate';\n }\n get entityTable() {\n return 'certificates';\n }\n equals(ei, syncMap) {\n if (this.type !== ei.type ||\n this.subject !== ei.subject ||\n this.serialNumber !== ei.serialNumber ||\n this.revocationOutpoint !== ei.revocationOutpoint ||\n this.signature !== ei.signature ||\n this.verifier !== ei.verifier ||\n this.isDeleted !== ei.isDeleted)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findCertificates({\n partial: {\n serialNumber: ei.serialNumber,\n certifier: ei.certifier,\n userId\n },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityCertificate(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.certificateId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.certificateId = 0;\n this.certificateId = await storage.insertCertificate(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.type = ei.type;\n this.subject = ei.subject;\n this.serialNumber = ei.serialNumber;\n this.revocationOutpoint = ei.revocationOutpoint;\n this.signature = ei.signature;\n this.verifier = ei.verifier;\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateCertificate(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityCertificate = EntityCertificate;\n//# sourceMappingURL=EntityCertificate.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCertificate.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityCertificate = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityCertificate extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n certificateId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n type: '',\n subject: '',\n verifier: undefined,\n serialNumber: '',\n certifier: '',\n revocationOutpoint: '',\n signature: '',\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get certificateId() {\n return this.api.certificateId;\n }\n set certificateId(v) {\n this.api.certificateId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get type() {\n return this.api.type;\n }\n set type(v) {\n this.api.type = v;\n }\n get subject() {\n return this.api.subject;\n }\n set subject(v) {\n this.api.subject = v;\n }\n get verifier() {\n return this.api.verifier;\n }\n set verifier(v) {\n this.api.verifier = v;\n }\n get serialNumber() {\n return this.api.serialNumber;\n }\n set serialNumber(v) {\n this.api.serialNumber = v;\n }\n get certifier() {\n return this.api.certifier;\n }\n set certifier(v) {\n this.api.certifier = v;\n }\n get revocationOutpoint() {\n return this.api.revocationOutpoint;\n }\n set revocationOutpoint(v) {\n this.api.revocationOutpoint = v;\n }\n get signature() {\n return this.api.signature;\n }\n set signature(v) {\n this.api.signature = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n //get fields() { return this.api.fields }\n //set fields(v: Record | undefined) { this.api.fields = v }\n get id() {\n return this.api.certificateId;\n }\n set id(v) {\n this.api.certificateId = v;\n }\n get entityName() {\n return 'certificate';\n }\n get entityTable() {\n return 'certificates';\n }\n equals(ei, syncMap) {\n if (this.type !== ei.type ||\n this.subject !== ei.subject ||\n this.serialNumber !== ei.serialNumber ||\n this.revocationOutpoint !== ei.revocationOutpoint ||\n this.signature !== ei.signature ||\n this.verifier !== ei.verifier ||\n this.isDeleted !== ei.isDeleted)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findCertificates({\n partial: {\n serialNumber: ei.serialNumber,\n certifier: ei.certifier,\n userId\n },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityCertificate(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.certificateId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.certificateId = 0;\n this.certificateId = await storage.insertCertificate(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.type = ei.type;\n this.subject = ei.subject;\n this.serialNumber = ei.serialNumber;\n this.revocationOutpoint = ei.revocationOutpoint;\n this.signature = ei.signature;\n this.verifier = ei.verifier;\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateCertificate(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityCertificate = EntityCertificate;\n//# sourceMappingURL=EntityCertificate.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCertificate.js?\n}"); /***/ }), @@ -3806,7 +3696,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityCertificateField = void 0;\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityCertificateField extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n created_at: now,\n updated_at: now,\n userId: 0,\n certificateId: 0,\n fieldName: '',\n fieldValue: '',\n masterKey: ''\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get certificateId() {\n return this.api.certificateId;\n }\n set certificateId(v) {\n this.api.certificateId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get fieldName() {\n return this.api.fieldName;\n }\n set fieldName(v) {\n this.api.fieldName = v;\n }\n get fieldValue() {\n return this.api.fieldValue;\n }\n set fieldValue(v) {\n this.api.fieldValue = v;\n }\n get masterKey() {\n return this.api.masterKey;\n }\n set masterKey(v) {\n this.api.masterKey = v;\n }\n get id() {\n throw new WERR_errors_1.WERR_INVALID_OPERATION('entity has no \"id\" value');\n }\n get entityName() {\n return 'certificateField';\n }\n get entityTable() {\n return 'certificate_fields';\n }\n equals(ei, syncMap) {\n if (this.certificateId !== (syncMap ? syncMap.certificate.idMap[ei.certificateId] : ei.certificateId) ||\n this.fieldName !== ei.fieldName ||\n this.fieldValue !== ei.fieldValue ||\n this.masterKey !== ei.masterKey)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const certificateId = syncMap.certificate.idMap[ei.certificateId];\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findCertificateFields({\n partial: { certificateId, userId, fieldName: ei.fieldName },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityCertificateField(ef || { ...ei }),\n eiId: -1\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.certificateId = syncMap.certificate.idMap[this.certificateId];\n this.userId = userId;\n await storage.insertCertificateField(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.fieldValue = ei.fieldValue;\n this.masterKey = ei.masterKey;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateCertificateField(this.certificateId, this.fieldName, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityCertificateField = EntityCertificateField;\n//# sourceMappingURL=EntityCertificateField.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCertificateField.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityCertificateField = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityCertificateField extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n created_at: now,\n updated_at: now,\n userId: 0,\n certificateId: 0,\n fieldName: '',\n fieldValue: '',\n masterKey: ''\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get certificateId() {\n return this.api.certificateId;\n }\n set certificateId(v) {\n this.api.certificateId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get fieldName() {\n return this.api.fieldName;\n }\n set fieldName(v) {\n this.api.fieldName = v;\n }\n get fieldValue() {\n return this.api.fieldValue;\n }\n set fieldValue(v) {\n this.api.fieldValue = v;\n }\n get masterKey() {\n return this.api.masterKey;\n }\n set masterKey(v) {\n this.api.masterKey = v;\n }\n get id() {\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('entity has no \"id\" value');\n }\n get entityName() {\n return 'certificateField';\n }\n get entityTable() {\n return 'certificate_fields';\n }\n equals(ei, syncMap) {\n if (this.certificateId !== (syncMap ? syncMap.certificate.idMap[ei.certificateId] : ei.certificateId) ||\n this.fieldName !== ei.fieldName ||\n this.fieldValue !== ei.fieldValue ||\n this.masterKey !== ei.masterKey)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const certificateId = syncMap.certificate.idMap[ei.certificateId];\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findCertificateFields({\n partial: { certificateId, userId, fieldName: ei.fieldName },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityCertificateField(ef || { ...ei }),\n eiId: -1\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.certificateId = syncMap.certificate.idMap[this.certificateId];\n this.userId = userId;\n await storage.insertCertificateField(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.fieldValue = ei.fieldValue;\n this.masterKey = ei.masterKey;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateCertificateField(this.certificateId, this.fieldName, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityCertificateField = EntityCertificateField;\n//# sourceMappingURL=EntityCertificateField.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCertificateField.js?\n}"); /***/ }), @@ -3817,7 +3707,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityCommission = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityCommission extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n commissionId: 0,\n created_at: now,\n updated_at: now,\n transactionId: 0,\n userId: 0,\n isRedeemed: false,\n keyOffset: '',\n lockingScript: [],\n satoshis: 0\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get commissionId() {\n return this.api.commissionId;\n }\n set commissionId(v) {\n this.api.commissionId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get isRedeemed() {\n return this.api.isRedeemed;\n }\n set isRedeemed(v) {\n this.api.isRedeemed = v;\n }\n get keyOffset() {\n return this.api.keyOffset;\n }\n set keyOffset(v) {\n this.api.keyOffset = v;\n }\n get lockingScript() {\n return this.api.lockingScript;\n }\n set lockingScript(v) {\n this.api.lockingScript = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get id() {\n return this.api.commissionId;\n }\n set id(v) {\n this.api.commissionId = v;\n }\n get entityName() {\n return 'commission';\n }\n get entityTable() {\n return 'commissions';\n }\n equals(ei, syncMap) {\n if (this.isRedeemed !== ei.isRedeemed ||\n this.transactionId !== (syncMap ? syncMap.transaction.idMap[ei.transactionId] : ei.transactionId) ||\n this.keyOffset !== ei.keyOffset ||\n !(0, utilityHelpers_1.arraysEqual)(this.lockingScript, ei.lockingScript) ||\n this.satoshis !== ei.satoshis)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const transactionId = syncMap.transaction.idMap[ei.transactionId];\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findCommissions({ partial: { transactionId, userId }, trx }));\n return {\n found: !!ef,\n eo: new EntityCommission(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.commissionId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n if (this.transactionId)\n this.transactionId = syncMap.transaction.idMap[this.transactionId];\n this.userId = userId;\n this.commissionId = 0;\n this.commissionId = await storage.insertCommission(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isRedeemed = ei.isRedeemed;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateCommission(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityCommission = EntityCommission;\n//# sourceMappingURL=EntityCommission.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCommission.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityCommission = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityCommission extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n commissionId: 0,\n created_at: now,\n updated_at: now,\n transactionId: 0,\n userId: 0,\n isRedeemed: false,\n keyOffset: '',\n lockingScript: [],\n satoshis: 0\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get commissionId() {\n return this.api.commissionId;\n }\n set commissionId(v) {\n this.api.commissionId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get isRedeemed() {\n return this.api.isRedeemed;\n }\n set isRedeemed(v) {\n this.api.isRedeemed = v;\n }\n get keyOffset() {\n return this.api.keyOffset;\n }\n set keyOffset(v) {\n this.api.keyOffset = v;\n }\n get lockingScript() {\n return this.api.lockingScript;\n }\n set lockingScript(v) {\n this.api.lockingScript = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get id() {\n return this.api.commissionId;\n }\n set id(v) {\n this.api.commissionId = v;\n }\n get entityName() {\n return 'commission';\n }\n get entityTable() {\n return 'commissions';\n }\n equals(ei, syncMap) {\n if (this.isRedeemed !== ei.isRedeemed ||\n this.transactionId !== (syncMap ? syncMap.transaction.idMap[ei.transactionId] : ei.transactionId) ||\n this.keyOffset !== ei.keyOffset ||\n !(0, index_client_1.arraysEqual)(this.lockingScript, ei.lockingScript) ||\n this.satoshis !== ei.satoshis)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const transactionId = syncMap.transaction.idMap[ei.transactionId];\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findCommissions({ partial: { transactionId, userId }, trx }));\n return {\n found: !!ef,\n eo: new EntityCommission(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.commissionId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n if (this.transactionId)\n this.transactionId = syncMap.transaction.idMap[this.transactionId];\n this.userId = userId;\n this.commissionId = 0;\n this.commissionId = await storage.insertCommission(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isRedeemed = ei.isRedeemed;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateCommission(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityCommission = EntityCommission;\n//# sourceMappingURL=EntityCommission.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCommission.js?\n}"); /***/ }), @@ -3828,7 +3718,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutput = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityOutput extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n outputId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n transactionId: 0,\n spendable: false,\n change: false,\n satoshis: 0,\n outputDescription: '',\n vout: 0,\n type: '',\n providedBy: 'you',\n purpose: '',\n txid: undefined,\n basketId: undefined,\n spentBy: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined,\n senderIdentityKey: undefined,\n customInstructions: undefined,\n spendingDescription: undefined,\n scriptLength: undefined,\n scriptOffset: undefined,\n lockingScript: undefined\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get outputId() {\n return this.api.outputId;\n }\n set outputId(v) {\n this.api.outputId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get basketId() {\n return this.api.basketId;\n }\n set basketId(v) {\n this.api.basketId = v;\n }\n get spentBy() {\n return this.api.spentBy;\n }\n set spentBy(v) {\n this.api.spentBy = v;\n }\n get vout() {\n return this.api.vout;\n }\n set vout(v) {\n this.api.vout = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get outputDescription() {\n return this.api.outputDescription;\n }\n set outputDescription(v) {\n this.api.outputDescription = v;\n }\n get spendable() {\n return this.api.spendable;\n }\n set spendable(v) {\n this.api.spendable = v;\n }\n get change() {\n return this.api.change;\n }\n set change(v) {\n this.api.change = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get type() {\n return this.api.type;\n }\n set type(v) {\n this.api.type = v;\n }\n get providedBy() {\n return this.api.providedBy;\n }\n set providedBy(v) {\n this.api.providedBy = v;\n }\n get purpose() {\n return this.api.purpose;\n }\n set purpose(v) {\n this.api.purpose = v;\n }\n get spendingDescription() {\n return this.api.spendingDescription;\n }\n set spendingDescription(v) {\n this.api.spendingDescription = v;\n }\n get derivationPrefix() {\n return this.api.derivationPrefix;\n }\n set derivationPrefix(v) {\n this.api.derivationPrefix = v;\n }\n get derivationSuffix() {\n return this.api.derivationSuffix;\n }\n set derivationSuffix(v) {\n this.api.derivationSuffix = v;\n }\n get senderIdentityKey() {\n return this.api.senderIdentityKey;\n }\n set senderIdentityKey(v) {\n this.api.senderIdentityKey = v;\n }\n get customInstructions() {\n return this.api.customInstructions;\n }\n set customInstructions(v) {\n this.api.customInstructions = v;\n }\n get lockingScript() {\n return this.api.lockingScript;\n }\n set lockingScript(v) {\n this.api.lockingScript = v;\n }\n get scriptLength() {\n return this.api.scriptLength;\n }\n set scriptLength(v) {\n this.api.scriptLength = v;\n }\n get scriptOffset() {\n return this.api.scriptOffset;\n }\n set scriptOffset(v) {\n this.api.scriptOffset = v;\n }\n get id() {\n return this.api.outputId;\n }\n set id(v) {\n this.api.outputId = v;\n }\n get entityName() {\n return 'output';\n }\n get entityTable() {\n return 'outputs';\n }\n equals(ei, syncMap) {\n if (this.transactionId !== (syncMap ? syncMap.transaction.idMap[ei.transactionId] : ei.transactionId) ||\n this.basketId !== (syncMap && ei.basketId ? syncMap.outputBasket.idMap[ei.basketId] : ei.basketId) ||\n this.spentBy !== (syncMap && ei.spentBy ? syncMap.transaction.idMap[ei.spentBy] : ei.spentBy) ||\n this.vout !== ei.vout ||\n this.satoshis !== ei.satoshis ||\n this.spendable !== ei.spendable ||\n this.change !== ei.change ||\n this.txid !== ei.txid ||\n this.type !== ei.type ||\n this.providedBy !== ei.providedBy ||\n this.purpose !== ei.purpose ||\n this.outputDescription !== ei.outputDescription ||\n this.spendingDescription !== ei.spendingDescription ||\n this.derivationPrefix !== ei.derivationPrefix ||\n this.derivationSuffix !== ei.derivationSuffix ||\n this.senderIdentityKey !== ei.senderIdentityKey ||\n this.customInstructions !== ei.customInstructions ||\n !(0, utilityHelpers_1.optionalArraysEqual)(this.lockingScript, ei.lockingScript) ||\n this.scriptLength !== ei.scriptLength ||\n this.scriptOffset !== ei.scriptOffset)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const transactionId = syncMap.transaction.idMap[ei.transactionId];\n const basketId = ei.basketId ? syncMap.outputBasket.idMap[ei.basketId] : null;\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputs({\n partial: { userId, transactionId, vout: ei.vout },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityOutput(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.outputId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.basketId = this.basketId ? syncMap.outputBasket.idMap[this.basketId] : undefined;\n this.transactionId = syncMap.transaction.idMap[this.transactionId];\n this.spentBy = this.spentBy ? syncMap.transaction.idMap[this.spentBy] : undefined;\n this.outputId = 0;\n this.outputId = await storage.insertOutput(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.spentBy = ei.spentBy ? syncMap.transaction.idMap[ei.spentBy] : undefined;\n this.spendable = ei.spendable;\n this.change = ei.change;\n this.type = ei.type;\n this.providedBy = ei.providedBy;\n this.purpose = ei.purpose;\n this.outputDescription = ei.outputDescription;\n this.spendingDescription = ei.spendingDescription;\n this.senderIdentityKey = ei.senderIdentityKey;\n this.customInstructions = ei.customInstructions;\n this.scriptLength = ei.scriptLength;\n this.scriptOffset = ei.scriptOffset;\n this.lockingScript = ei.lockingScript;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutput(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutput = EntityOutput;\n//# sourceMappingURL=EntityOutput.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutput.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutput = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityOutput extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n outputId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n transactionId: 0,\n spendable: false,\n change: false,\n satoshis: 0,\n outputDescription: '',\n vout: 0,\n type: '',\n providedBy: 'you',\n purpose: '',\n txid: undefined,\n basketId: undefined,\n spentBy: undefined,\n derivationPrefix: undefined,\n derivationSuffix: undefined,\n senderIdentityKey: undefined,\n customInstructions: undefined,\n spendingDescription: undefined,\n scriptLength: undefined,\n scriptOffset: undefined,\n lockingScript: undefined\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get outputId() {\n return this.api.outputId;\n }\n set outputId(v) {\n this.api.outputId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get basketId() {\n return this.api.basketId;\n }\n set basketId(v) {\n this.api.basketId = v;\n }\n get spentBy() {\n return this.api.spentBy;\n }\n set spentBy(v) {\n this.api.spentBy = v;\n }\n get vout() {\n return this.api.vout;\n }\n set vout(v) {\n this.api.vout = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get outputDescription() {\n return this.api.outputDescription;\n }\n set outputDescription(v) {\n this.api.outputDescription = v;\n }\n get spendable() {\n return this.api.spendable;\n }\n set spendable(v) {\n this.api.spendable = v;\n }\n get change() {\n return this.api.change;\n }\n set change(v) {\n this.api.change = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get type() {\n return this.api.type;\n }\n set type(v) {\n this.api.type = v;\n }\n get providedBy() {\n return this.api.providedBy;\n }\n set providedBy(v) {\n this.api.providedBy = v;\n }\n get purpose() {\n return this.api.purpose;\n }\n set purpose(v) {\n this.api.purpose = v;\n }\n get spendingDescription() {\n return this.api.spendingDescription;\n }\n set spendingDescription(v) {\n this.api.spendingDescription = v;\n }\n get derivationPrefix() {\n return this.api.derivationPrefix;\n }\n set derivationPrefix(v) {\n this.api.derivationPrefix = v;\n }\n get derivationSuffix() {\n return this.api.derivationSuffix;\n }\n set derivationSuffix(v) {\n this.api.derivationSuffix = v;\n }\n get senderIdentityKey() {\n return this.api.senderIdentityKey;\n }\n set senderIdentityKey(v) {\n this.api.senderIdentityKey = v;\n }\n get customInstructions() {\n return this.api.customInstructions;\n }\n set customInstructions(v) {\n this.api.customInstructions = v;\n }\n get lockingScript() {\n return this.api.lockingScript;\n }\n set lockingScript(v) {\n this.api.lockingScript = v;\n }\n get scriptLength() {\n return this.api.scriptLength;\n }\n set scriptLength(v) {\n this.api.scriptLength = v;\n }\n get scriptOffset() {\n return this.api.scriptOffset;\n }\n set scriptOffset(v) {\n this.api.scriptOffset = v;\n }\n get id() {\n return this.api.outputId;\n }\n set id(v) {\n this.api.outputId = v;\n }\n get entityName() {\n return 'output';\n }\n get entityTable() {\n return 'outputs';\n }\n equals(ei, syncMap) {\n if (this.transactionId !== (syncMap ? syncMap.transaction.idMap[ei.transactionId] : ei.transactionId) ||\n this.basketId !== (syncMap && ei.basketId ? syncMap.outputBasket.idMap[ei.basketId] : ei.basketId) ||\n this.spentBy !== (syncMap && ei.spentBy ? syncMap.transaction.idMap[ei.spentBy] : ei.spentBy) ||\n this.vout !== ei.vout ||\n this.satoshis !== ei.satoshis ||\n this.spendable !== ei.spendable ||\n this.change !== ei.change ||\n this.txid !== ei.txid ||\n this.type !== ei.type ||\n this.providedBy !== ei.providedBy ||\n this.purpose !== ei.purpose ||\n this.outputDescription !== ei.outputDescription ||\n this.spendingDescription !== ei.spendingDescription ||\n this.derivationPrefix !== ei.derivationPrefix ||\n this.derivationSuffix !== ei.derivationSuffix ||\n this.senderIdentityKey !== ei.senderIdentityKey ||\n this.customInstructions !== ei.customInstructions ||\n !(0, index_client_1.optionalArraysEqual)(this.lockingScript, ei.lockingScript) ||\n this.scriptLength !== ei.scriptLength ||\n this.scriptOffset !== ei.scriptOffset)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const transactionId = syncMap.transaction.idMap[ei.transactionId];\n const basketId = ei.basketId ? syncMap.outputBasket.idMap[ei.basketId] : null;\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findOutputs({\n partial: { userId, transactionId, vout: ei.vout },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityOutput(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.outputId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.basketId = this.basketId ? syncMap.outputBasket.idMap[this.basketId] : undefined;\n this.transactionId = syncMap.transaction.idMap[this.transactionId];\n this.spentBy = this.spentBy ? syncMap.transaction.idMap[this.spentBy] : undefined;\n this.outputId = 0;\n this.outputId = await storage.insertOutput(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.spentBy = ei.spentBy ? syncMap.transaction.idMap[ei.spentBy] : undefined;\n this.spendable = ei.spendable;\n this.change = ei.change;\n this.type = ei.type;\n this.providedBy = ei.providedBy;\n this.purpose = ei.purpose;\n this.outputDescription = ei.outputDescription;\n this.spendingDescription = ei.spendingDescription;\n this.senderIdentityKey = ei.senderIdentityKey;\n this.customInstructions = ei.customInstructions;\n this.scriptLength = ei.scriptLength;\n this.scriptOffset = ei.scriptOffset;\n this.lockingScript = ei.lockingScript;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutput(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutput = EntityOutput;\n//# sourceMappingURL=EntityOutput.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutput.js?\n}"); /***/ }), @@ -3839,7 +3729,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutputBasket = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityOutputBasket extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n basketId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n name: '',\n numberOfDesiredUTXOs: 0,\n minimumDesiredUTXOValue: 0,\n isDeleted: false\n });\n }\n get basketId() {\n return this.api.basketId;\n }\n set basketId(v) {\n this.api.basketId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get name() {\n return this.api.name;\n }\n set name(v) {\n this.api.name = v;\n }\n get numberOfDesiredUTXOs() {\n return this.api.numberOfDesiredUTXOs;\n }\n set numberOfDesiredUTXOs(v) {\n this.api.numberOfDesiredUTXOs = v;\n }\n get minimumDesiredUTXOValue() {\n return this.api.minimumDesiredUTXOValue;\n }\n set minimumDesiredUTXOValue(v) {\n this.api.minimumDesiredUTXOValue = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n return this.api.basketId;\n }\n set id(v) {\n this.api.basketId = v;\n }\n get entityName() {\n return 'outputBasket';\n }\n get entityTable() {\n return 'output_baskets';\n }\n updateApi() {\n /* nothing needed yet... */\n }\n equals(ei, syncMap) {\n const eo = this.api;\n if (eo.name != ei.name ||\n eo.numberOfDesiredUTXOs != ei.numberOfDesiredUTXOs ||\n eo.minimumDesiredUTXOValue != ei.minimumDesiredUTXOValue)\n return false;\n if (syncMap) {\n if (eo.basketId !== syncMap.outputBasket.idMap[(0, utilityHelpers_1.verifyId)(ei.basketId)])\n return false;\n }\n else {\n if (eo.basketId !== ei.basketId || eo.userId !== ei.userId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputBaskets({\n partial: { name: ei.name, userId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityOutputBasket(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.basketId)\n };\n }\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.name || (this.name = 'default');\n this.basketId = 0;\n this.basketId = await storage.insertOutputBasket(this.toApi(), trx);\n }\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n // basket name is its identity, should not change\n this.minimumDesiredUTXOValue = ei.minimumDesiredUTXOValue;\n this.numberOfDesiredUTXOs = ei.numberOfDesiredUTXOs;\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutputBasket(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutputBasket = EntityOutputBasket;\n//# sourceMappingURL=EntityOutputBasket.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputBasket.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutputBasket = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityOutputBasket extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n basketId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n name: '',\n numberOfDesiredUTXOs: 0,\n minimumDesiredUTXOValue: 0,\n isDeleted: false\n });\n }\n get basketId() {\n return this.api.basketId;\n }\n set basketId(v) {\n this.api.basketId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get name() {\n return this.api.name;\n }\n set name(v) {\n this.api.name = v;\n }\n get numberOfDesiredUTXOs() {\n return this.api.numberOfDesiredUTXOs;\n }\n set numberOfDesiredUTXOs(v) {\n this.api.numberOfDesiredUTXOs = v;\n }\n get minimumDesiredUTXOValue() {\n return this.api.minimumDesiredUTXOValue;\n }\n set minimumDesiredUTXOValue(v) {\n this.api.minimumDesiredUTXOValue = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n return this.api.basketId;\n }\n set id(v) {\n this.api.basketId = v;\n }\n get entityName() {\n return 'outputBasket';\n }\n get entityTable() {\n return 'output_baskets';\n }\n updateApi() {\n /* nothing needed yet... */\n }\n equals(ei, syncMap) {\n const eo = this.api;\n if (eo.name != ei.name ||\n eo.numberOfDesiredUTXOs != ei.numberOfDesiredUTXOs ||\n eo.minimumDesiredUTXOValue != ei.minimumDesiredUTXOValue)\n return false;\n if (syncMap) {\n if (eo.basketId !== syncMap.outputBasket.idMap[(0, index_client_1.verifyId)(ei.basketId)])\n return false;\n }\n else {\n if (eo.basketId !== ei.basketId || eo.userId !== ei.userId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findOutputBaskets({\n partial: { name: ei.name, userId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityOutputBasket(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.basketId)\n };\n }\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.name || (this.name = 'default');\n this.basketId = 0;\n this.basketId = await storage.insertOutputBasket(this.toApi(), trx);\n }\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n // basket name is its identity, should not change\n this.minimumDesiredUTXOValue = ei.minimumDesiredUTXOValue;\n this.numberOfDesiredUTXOs = ei.numberOfDesiredUTXOs;\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutputBasket(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutputBasket = EntityOutputBasket;\n//# sourceMappingURL=EntityOutputBasket.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputBasket.js?\n}"); /***/ }), @@ -3850,7 +3740,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutputTag = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityOutputTag extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n outputTagId: 0,\n created_at: now,\n updated_at: now,\n tag: '',\n userId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get outputTagId() {\n return this.api.outputTagId;\n }\n set outputTagId(v) {\n this.api.outputTagId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get tag() {\n return this.api.tag;\n }\n set tag(v) {\n this.api.tag = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n return this.api.outputTagId;\n }\n set id(v) {\n this.api.outputTagId = v;\n }\n get entityName() {\n return 'outputTag';\n }\n get entityTable() {\n return 'output_tags';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.tag != ei.tag || eo.isDeleted != ei.isDeleted)\n return false;\n if (!syncMap) {\n if (eo.userId !== ei.userId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputTags({ partial: { tag: ei.tag, userId }, trx }));\n return {\n found: !!ef,\n eo: new EntityOutputTag(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.outputTagId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.outputTagId = 0;\n this.outputTagId = await storage.insertOutputTag(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutputTag(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutputTag = EntityOutputTag;\n//# sourceMappingURL=EntityOutputTag.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputTag.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutputTag = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityOutputTag extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n outputTagId: 0,\n created_at: now,\n updated_at: now,\n tag: '',\n userId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get outputTagId() {\n return this.api.outputTagId;\n }\n set outputTagId(v) {\n this.api.outputTagId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get tag() {\n return this.api.tag;\n }\n set tag(v) {\n this.api.tag = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n return this.api.outputTagId;\n }\n set id(v) {\n this.api.outputTagId = v;\n }\n get entityName() {\n return 'outputTag';\n }\n get entityTable() {\n return 'output_tags';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.tag != ei.tag || eo.isDeleted != ei.isDeleted)\n return false;\n if (!syncMap) {\n if (eo.userId !== ei.userId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findOutputTags({ partial: { tag: ei.tag, userId }, trx }));\n return {\n found: !!ef,\n eo: new EntityOutputTag(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.outputTagId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.outputTagId = 0;\n this.outputTagId = await storage.insertOutputTag(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutputTag(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutputTag = EntityOutputTag;\n//# sourceMappingURL=EntityOutputTag.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputTag.js?\n}"); /***/ }), @@ -3861,7 +3751,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutputTagMap = void 0;\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityOutputTagMap extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n created_at: now,\n updated_at: now,\n outputId: 0,\n outputTagId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get outputTagId() {\n return this.api.outputTagId;\n }\n set outputTagId(v) {\n this.api.outputTagId = v;\n }\n get outputId() {\n return this.api.outputId;\n }\n set outputId(v) {\n this.api.outputId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n throw new WERR_errors_1.WERR_INVALID_OPERATION('entity has no \"id\" value');\n }\n get entityName() {\n return 'outputTagMap';\n }\n get entityTable() {\n return 'output_tags_map';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.outputId !== (syncMap ? syncMap.output.idMap[(0, utilityHelpers_1.verifyId)(ei.outputId)] : ei.outputId) ||\n eo.outputTagId !== (syncMap ? syncMap.outputTag.idMap[(0, utilityHelpers_1.verifyId)(ei.outputTagId)] : ei.outputTagId) ||\n eo.isDeleted !== ei.isDeleted)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const outputId = syncMap.output.idMap[ei.outputId];\n const outputTagId = syncMap.outputTag.idMap[ei.outputTagId];\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputTagMaps({\n partial: { outputId, outputTagId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityOutputTagMap(ef || { ...ei }),\n eiId: -1\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.outputId = syncMap.output.idMap[this.outputId];\n this.outputTagId = syncMap.outputTag.idMap[this.outputTagId];\n await storage.insertOutputTagMap(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutputTagMap(this.outputId, this.outputTagId, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutputTagMap = EntityOutputTagMap;\n//# sourceMappingURL=EntityOutputTagMap.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputTagMap.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityOutputTagMap = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityOutputTagMap extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n created_at: now,\n updated_at: now,\n outputId: 0,\n outputTagId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get outputTagId() {\n return this.api.outputTagId;\n }\n set outputTagId(v) {\n this.api.outputTagId = v;\n }\n get outputId() {\n return this.api.outputId;\n }\n set outputId(v) {\n this.api.outputId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('entity has no \"id\" value');\n }\n get entityName() {\n return 'outputTagMap';\n }\n get entityTable() {\n return 'output_tags_map';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.outputId !== (syncMap ? syncMap.output.idMap[(0, index_client_1.verifyId)(ei.outputId)] : ei.outputId) ||\n eo.outputTagId !== (syncMap ? syncMap.outputTag.idMap[(0, index_client_1.verifyId)(ei.outputTagId)] : ei.outputTagId) ||\n eo.isDeleted !== ei.isDeleted)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const outputId = syncMap.output.idMap[ei.outputId];\n const outputTagId = syncMap.outputTag.idMap[ei.outputTagId];\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findOutputTagMaps({\n partial: { outputId, outputTagId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityOutputTagMap(ef || { ...ei }),\n eiId: -1\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.outputId = syncMap.output.idMap[this.outputId];\n this.outputTagId = syncMap.outputTag.idMap[this.outputTagId];\n await storage.insertOutputTagMap(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateOutputTagMap(this.outputId, this.outputTagId, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityOutputTagMap = EntityOutputTagMap;\n//# sourceMappingURL=EntityOutputTagMap.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputTagMap.js?\n}"); /***/ }), @@ -3872,7 +3762,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityProvenTx = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst WalletError_1 = __webpack_require__(/*! ../../../sdk/WalletError */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WalletError.js\");\nclass EntityProvenTx extends EntityBase_1.EntityBase {\n /**\n * Given a txid and optionally its rawTx, create a new ProvenTx object.\n *\n * rawTx is fetched if not provided.\n *\n * Only succeeds (proven is not undefined) if a proof is confirmed for rawTx,\n * and hash of rawTx is confirmed to match txid\n *\n * The returned ProvenTx and ProvenTxReq objects have not been added to the storage database,\n * this is optional and can be done by the caller if appropriate.\n *\n * @param txid\n * @param services\n * @param rawTx\n * @returns\n */\n static async fromTxid(txid, services, rawTx) {\n var _a;\n const r = { proven: undefined, rawTx };\n const chain = services.chain;\n if (!r.rawTx) {\n const gr = await services.getRawTx(txid);\n if (!(gr === null || gr === void 0 ? void 0 : gr.rawTx))\n // Failing to find anything...\n return r;\n r.rawTx = gr.rawTx;\n }\n const gmpr = await services.getMerklePath(txid);\n if (gmpr.merklePath && gmpr.header) {\n const index = (_a = gmpr.merklePath.path[0].find(l => l.hash === txid)) === null || _a === void 0 ? void 0 : _a.offset;\n if (index !== undefined) {\n const api = {\n created_at: new Date(),\n updated_at: new Date(),\n provenTxId: 0,\n txid,\n height: gmpr.header.height,\n index,\n merklePath: gmpr.merklePath.toBinary(),\n rawTx: r.rawTx,\n blockHash: gmpr.header.hash,\n merkleRoot: gmpr.header.merkleRoot\n };\n r.proven = new EntityProvenTx(api);\n }\n }\n return r;\n }\n constructor(api) {\n const now = new Date();\n super(api || {\n provenTxId: 0,\n created_at: now,\n updated_at: now,\n txid: '',\n height: 0,\n index: 0,\n merklePath: [],\n rawTx: [],\n blockHash: '',\n merkleRoot: ''\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n /**\n * @returns desirialized `MerklePath` object, value is cached.\n */\n getMerklePath() {\n if (!this._mp)\n this._mp = sdk_1.MerklePath.fromBinary(this.api.merklePath);\n return this._mp;\n }\n get provenTxId() {\n return this.api.provenTxId;\n }\n set provenTxId(v) {\n this.api.provenTxId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get height() {\n return this.api.height;\n }\n set height(v) {\n this.api.height = v;\n }\n get index() {\n return this.api.index;\n }\n set index(v) {\n this.api.index = v;\n }\n get merklePath() {\n return this.api.merklePath;\n }\n set merklePath(v) {\n this.api.merklePath = v;\n }\n get rawTx() {\n return this.api.rawTx;\n }\n set rawTx(v) {\n this.api.rawTx = v;\n }\n get blockHash() {\n return this.api.blockHash;\n }\n set blockHash(v) {\n this.api.blockHash = v;\n }\n get merkleRoot() {\n return this.api.merkleRoot;\n }\n set merkleRoot(v) {\n this.api.merkleRoot = v;\n }\n get id() {\n return this.api.provenTxId;\n }\n set id(v) {\n this.api.provenTxId = v;\n }\n get entityName() {\n return 'provenTx';\n }\n get entityTable() {\n return 'proven_txs';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.txid != ei.txid ||\n eo.height != ei.height ||\n eo.index != ei.index ||\n !(0, utilityHelpers_1.arraysEqual)(eo.merklePath, ei.merklePath) ||\n !(0, utilityHelpers_1.arraysEqual)(eo.rawTx, ei.rawTx) ||\n eo.blockHash !== ei.blockHash ||\n eo.merkleRoot !== ei.merkleRoot\n // equality does not depend on timestamps.\n // || eo.created_at !== ei.created_at\n // || eo.updated_at !== ei.updated_at\n )\n return false;\n if (syncMap) {\n if (eo.provenTxId !== syncMap.provenTx.idMap[ei.provenTxId])\n return false;\n }\n else {\n if (eo.provenTxId !== ei.provenTxId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findProvenTxs({ partial: { txid: ei.txid }, trx }));\n return {\n found: !!ef,\n eo: new EntityProvenTx(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.provenTxId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.provenTxId = 0;\n // TODO: Since these records are a shared resource, the record must be validated before accepting it...\n this.provenTxId = await storage.insertProvenTx(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n // ProvenTxs are never updated.\n return false;\n }\n /**\n * Try to create a new ProvenTx from a ProvenTxReq and GetMerkleProofResultApi\n *\n * Otherwise it returns undefined and updates req.status to either 'unknown', 'invalid', or 'unconfirmed'\n *\n * @param req\n * @param gmpResult\n * @returns\n */\n static async fromReq(req, gmpResult, countsAsAttempt) {\n if (!req.txid)\n throw new WERR_errors_1.WERR_MISSING_PARAMETER('req.txid');\n if (!req.rawTx)\n throw new WERR_errors_1.WERR_MISSING_PARAMETER('req.rawTx');\n if (!req.rawTx)\n throw new WERR_errors_1.WERR_INTERNAL('rawTx must be valid');\n for (const note of gmpResult.notes || []) {\n req.addHistoryNote(note, true);\n }\n if (!gmpResult.name && !gmpResult.merklePath && !gmpResult.error) {\n // Most likely offline or now services configured.\n // Does not count as a proof attempt.\n return undefined;\n }\n if (!gmpResult.merklePath) {\n if (req.created_at) {\n const ageInMsecs = Date.now() - req.created_at.getTime();\n const ageInMinutes = Math.ceil(ageInMsecs < 1 ? 0 : ageInMsecs / (1000 * 60));\n if (req.attempts > EntityProvenTx.getProofAttemptsLimit && ageInMinutes > EntityProvenTx.getProofMinutes) {\n // Start the process of setting transactions to 'failed'\n const limit = EntityProvenTx.getProofAttemptsLimit;\n const { attempts } = req;\n req.addHistoryNote({ what: 'getMerklePathGiveUp', attempts, limit, ageInMinutes }, true);\n req.notified = false;\n req.status = 'invalid';\n }\n }\n return undefined;\n }\n if (countsAsAttempt)\n req.attempts++;\n const merklePaths = Array.isArray(gmpResult.merklePath) ? gmpResult.merklePath : [gmpResult.merklePath];\n for (const proof of merklePaths) {\n try {\n const now = new Date();\n const leaf = proof.path[0].find(leaf => leaf.txid === true && leaf.hash === req.txid);\n if (!leaf) {\n req.addHistoryNote({ what: 'getMerklePathTxidNotFound' }, true);\n throw new WERR_errors_1.WERR_INTERNAL('merkle path does not contain leaf for txid');\n }\n const proven = new EntityProvenTx({\n created_at: now,\n updated_at: now,\n provenTxId: 0,\n txid: req.txid,\n height: proof.blockHeight,\n index: leaf.offset,\n merklePath: proof.toBinary(),\n rawTx: req.rawTx,\n merkleRoot: gmpResult.header.merkleRoot,\n blockHash: gmpResult.header.hash\n });\n return proven;\n }\n catch (eu) {\n const { code, description } = WalletError_1.WalletError.fromUnknown(eu);\n const { attempts } = req;\n req.addHistoryNote({ what: 'getMerklePathProvenError', attempts, code, description }, true);\n }\n }\n }\n}\nexports.EntityProvenTx = EntityProvenTx;\n/**\n * How high attempts can go before status is forced to invalid\n */\nEntityProvenTx.getProofAttemptsLimit = 8;\n/**\n * How many hours we have to try for a poof\n */\nEntityProvenTx.getProofMinutes = 60;\n//# sourceMappingURL=EntityProvenTx.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTx.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityProvenTx = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityProvenTx extends _1.EntityBase {\n /**\n * Given a txid and optionally its rawTx, create a new ProvenTx object.\n *\n * rawTx is fetched if not provided.\n *\n * Only succeeds (proven is not undefined) if a proof is confirmed for rawTx,\n * and hash of rawTx is confirmed to match txid\n *\n * The returned ProvenTx and ProvenTxReq objects have not been added to the storage database,\n * this is optional and can be done by the caller if appropriate.\n *\n * @param txid\n * @param services\n * @param rawTx\n * @returns\n */\n static async fromTxid(txid, services, rawTx) {\n var _a;\n const r = { proven: undefined, rawTx };\n const chain = services.chain;\n if (!r.rawTx) {\n const gr = await services.getRawTx(txid);\n if (!(gr === null || gr === void 0 ? void 0 : gr.rawTx))\n // Failing to find anything...\n return r;\n r.rawTx = gr.rawTx;\n }\n const gmpr = await services.getMerklePath(txid);\n if (gmpr.merklePath && gmpr.header) {\n const index = (_a = gmpr.merklePath.path[0].find(l => l.hash === txid)) === null || _a === void 0 ? void 0 : _a.offset;\n if (index !== undefined) {\n const api = {\n created_at: new Date(),\n updated_at: new Date(),\n provenTxId: 0,\n txid,\n height: gmpr.header.height,\n index,\n merklePath: gmpr.merklePath.toBinary(),\n rawTx: r.rawTx,\n blockHash: gmpr.header.hash,\n merkleRoot: gmpr.header.merkleRoot\n };\n r.proven = new EntityProvenTx(api);\n }\n }\n return r;\n }\n constructor(api) {\n const now = new Date();\n super(api || {\n provenTxId: 0,\n created_at: now,\n updated_at: now,\n txid: '',\n height: 0,\n index: 0,\n merklePath: [],\n rawTx: [],\n blockHash: '',\n merkleRoot: ''\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n /**\n * @returns desirialized `MerklePath` object, value is cached.\n */\n getMerklePath() {\n if (!this._mp)\n this._mp = sdk_1.MerklePath.fromBinary(this.api.merklePath);\n return this._mp;\n }\n get provenTxId() {\n return this.api.provenTxId;\n }\n set provenTxId(v) {\n this.api.provenTxId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get height() {\n return this.api.height;\n }\n set height(v) {\n this.api.height = v;\n }\n get index() {\n return this.api.index;\n }\n set index(v) {\n this.api.index = v;\n }\n get merklePath() {\n return this.api.merklePath;\n }\n set merklePath(v) {\n this.api.merklePath = v;\n }\n get rawTx() {\n return this.api.rawTx;\n }\n set rawTx(v) {\n this.api.rawTx = v;\n }\n get blockHash() {\n return this.api.blockHash;\n }\n set blockHash(v) {\n this.api.blockHash = v;\n }\n get merkleRoot() {\n return this.api.merkleRoot;\n }\n set merkleRoot(v) {\n this.api.merkleRoot = v;\n }\n get id() {\n return this.api.provenTxId;\n }\n set id(v) {\n this.api.provenTxId = v;\n }\n get entityName() {\n return 'provenTx';\n }\n get entityTable() {\n return 'proven_txs';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.txid != ei.txid ||\n eo.height != ei.height ||\n eo.index != ei.index ||\n !(0, index_client_1.arraysEqual)(eo.merklePath, ei.merklePath) ||\n !(0, index_client_1.arraysEqual)(eo.rawTx, ei.rawTx) ||\n eo.blockHash !== ei.blockHash ||\n eo.merkleRoot !== ei.merkleRoot\n // equality does not depend on timestamps.\n // || eo.created_at !== ei.created_at\n // || eo.updated_at !== ei.updated_at\n )\n return false;\n if (syncMap) {\n if (eo.provenTxId !== syncMap.provenTx.idMap[ei.provenTxId])\n return false;\n }\n else {\n if (eo.provenTxId !== ei.provenTxId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findProvenTxs({ partial: { txid: ei.txid }, trx }));\n return {\n found: !!ef,\n eo: new EntityProvenTx(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.provenTxId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.provenTxId = 0;\n // TODO: Since these records are a shared resource, the record must be validated before accepting it...\n this.provenTxId = await storage.insertProvenTx(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n // ProvenTxs are never updated.\n return false;\n }\n /**\n * Try to create a new ProvenTx from a ProvenTxReq and GetMerkleProofResultApi\n *\n * Otherwise it returns undefined and updates req.status to either 'unknown', 'invalid', or 'unconfirmed'\n *\n * @param req\n * @param gmpResult\n * @returns\n */\n static async fromReq(req, gmpResult, countsAsAttempt) {\n if (!req.txid)\n throw new index_client_1.sdk.WERR_MISSING_PARAMETER('req.txid');\n if (!req.rawTx)\n throw new index_client_1.sdk.WERR_MISSING_PARAMETER('req.rawTx');\n if (!req.rawTx)\n throw new index_client_1.sdk.WERR_INTERNAL('rawTx must be valid');\n for (const note of gmpResult.notes || []) {\n req.addHistoryNote(note, true);\n }\n if (!gmpResult.name && !gmpResult.merklePath && !gmpResult.error) {\n // Most likely offline or now services configured.\n // Does not count as a proof attempt.\n return undefined;\n }\n if (!gmpResult.merklePath) {\n if (req.created_at) {\n const ageInMsecs = Date.now() - req.created_at.getTime();\n const ageInMinutes = Math.ceil(ageInMsecs < 1 ? 0 : ageInMsecs / (1000 * 60));\n if (req.attempts > EntityProvenTx.getProofAttemptsLimit && ageInMinutes > EntityProvenTx.getProofMinutes) {\n // Start the process of setting transactions to 'failed'\n const limit = EntityProvenTx.getProofAttemptsLimit;\n const { attempts } = req;\n req.addHistoryNote({ what: 'getMerklePathGiveUp', attempts, limit, ageInMinutes }, true);\n req.notified = false;\n req.status = 'invalid';\n }\n }\n return undefined;\n }\n if (countsAsAttempt)\n req.attempts++;\n const merklePaths = Array.isArray(gmpResult.merklePath) ? gmpResult.merklePath : [gmpResult.merklePath];\n for (const proof of merklePaths) {\n try {\n const now = new Date();\n const leaf = proof.path[0].find(leaf => leaf.txid === true && leaf.hash === req.txid);\n if (!leaf) {\n req.addHistoryNote({ what: 'getMerklePathTxidNotFound' }, true);\n throw new index_client_1.sdk.WERR_INTERNAL('merkle path does not contain leaf for txid');\n }\n const proven = new EntityProvenTx({\n created_at: now,\n updated_at: now,\n provenTxId: 0,\n txid: req.txid,\n height: proof.blockHeight,\n index: leaf.offset,\n merklePath: proof.toBinary(),\n rawTx: req.rawTx,\n merkleRoot: gmpResult.header.merkleRoot,\n blockHash: gmpResult.header.hash\n });\n return proven;\n }\n catch (eu) {\n const { code, description } = index_client_1.sdk.WalletError.fromUnknown(eu);\n const { attempts } = req;\n req.addHistoryNote({ what: 'getMerklePathProvenError', attempts, code, description }, true);\n }\n }\n }\n}\nexports.EntityProvenTx = EntityProvenTx;\n/**\n * How high attempts can go before status is forced to invalid\n */\nEntityProvenTx.getProofAttemptsLimit = 8;\n/**\n * How many hours we have to try for a poof\n */\nEntityProvenTx.getProofMinutes = 60;\n//# sourceMappingURL=EntityProvenTx.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTx.js?\n}"); /***/ }), @@ -3883,7 +3773,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityProvenTxReq = void 0;\nconst types_1 = __webpack_require__(/*! ../../../sdk/types */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/types.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityProvenTxReq extends EntityBase_1.EntityBase {\n static async fromStorageTxid(storage, txid, trx) {\n const reqApi = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findProvenTxReqs({ partial: { txid }, trx }));\n if (!reqApi)\n return undefined;\n return new EntityProvenTxReq(reqApi);\n }\n static async fromStorageId(storage, id, trx) {\n const reqApi = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findProvenTxReqs({ partial: { provenTxReqId: id }, trx }));\n if (!reqApi)\n throw new WERR_errors_1.WERR_INTERNAL(`proven_tx_reqs with id ${id} is missing.`);\n return new EntityProvenTxReq(reqApi);\n }\n static fromTxid(txid, rawTx, inputBEEF) {\n const now = new Date();\n return new EntityProvenTxReq({\n provenTxReqId: 0,\n created_at: now,\n updated_at: now,\n txid,\n inputBEEF,\n rawTx,\n status: 'unknown',\n history: '{}',\n notify: '{}',\n attempts: 0,\n notified: false\n });\n }\n packApiHistory() {\n this.api.history = JSON.stringify(this.history);\n }\n packApiNotify() {\n this.api.notify = JSON.stringify(this.notify);\n }\n unpackApiHistory() {\n this.history = JSON.parse(this.api.history);\n }\n unpackApiNotify() {\n this.notify = JSON.parse(this.api.notify);\n }\n get apiHistory() {\n this.packApiHistory();\n return this.api.history;\n }\n get apiNotify() {\n this.packApiNotify();\n return this.api.notify;\n }\n set apiHistory(v) {\n this.api.history = v;\n this.unpackApiHistory();\n }\n set apiNotify(v) {\n this.api.notify = v;\n this.unpackApiNotify();\n }\n updateApi() {\n this.packApiHistory();\n this.packApiNotify();\n }\n unpackApi() {\n this.unpackApiHistory();\n this.unpackApiNotify();\n if (this.notify.transactionIds) {\n // Cleanup null values and duplicates.\n const transactionIds = [];\n for (const id of this.notify.transactionIds) {\n if (Number.isInteger(id) && !transactionIds.some(txid => txid === id))\n transactionIds.push(id);\n }\n this.notify.transactionIds = transactionIds;\n }\n }\n async refreshFromStorage(storage, trx) {\n const newApi = (0, utilityHelpers_1.verifyOne)(await storage.findProvenTxReqs({ partial: { provenTxReqId: this.id }, trx }));\n this.api = newApi;\n this.unpackApi();\n }\n constructor(api) {\n const now = new Date();\n super(api || {\n provenTxReqId: 0,\n created_at: now,\n updated_at: now,\n txid: '',\n rawTx: [],\n history: '',\n notify: '',\n attempts: 0,\n status: 'unknown',\n notified: false\n });\n this.history = {};\n this.notify = {};\n this.unpackApi();\n }\n /**\n * Returns history to only what followed since date.\n */\n historySince(since) {\n const fh = { notes: [] };\n const filter = since.toISOString();\n const notes = this.history.notes;\n if (notes && fh.notes) {\n for (const note of notes)\n if (note.when && note.when > filter)\n fh.notes.push(note);\n }\n return fh;\n }\n historyPretty(since, indent = 0) {\n const h = since ? this.historySince(since) : { ...this.history };\n if (!h.notes)\n return '';\n const whenLimit = since ? since.toISOString() : undefined;\n let log = '';\n for (const note of h.notes) {\n if (whenLimit && note.when && note.when < whenLimit)\n continue;\n log += this.prettyNote(note) + '\\n';\n }\n return log;\n }\n prettyNote(note) {\n let log = `${note.when}: ${note.what}`;\n for (const [key, val] of Object.entries(note)) {\n if (key !== 'when' && key !== 'what') {\n if (typeof val === 'string')\n log += ' ' + key + ':`' + val + '`';\n else\n log += ' ' + key + ':' + val;\n }\n }\n return log;\n }\n getHistorySummary() {\n const summary = {\n setToCompleted: false,\n setToUnmined: false,\n setToCallback: false,\n setToDoubleSpend: false,\n setToSending: false,\n setToUnconfirmed: false\n };\n const h = this.history;\n if (h.notes) {\n for (const note of h.notes) {\n this.parseHistoryNote(note, summary);\n }\n }\n return summary;\n }\n parseHistoryNote(note, summary) {\n const c = summary || {\n setToCompleted: false,\n setToUnmined: false,\n setToCallback: false,\n setToDoubleSpend: false,\n setToSending: false,\n setToUnconfirmed: false\n };\n let n = this.prettyNote(note);\n try {\n switch (note.what) {\n case 'status':\n {\n const status = note.status_now;\n switch (status) {\n case 'completed':\n c.setToCompleted = true;\n break;\n case 'unmined':\n c.setToUnmined = true;\n break;\n case 'callback':\n c.setToCallback = true;\n break;\n case 'doubleSpend':\n c.setToDoubleSpend = true;\n break;\n case 'sending':\n c.setToSending = true;\n break;\n case 'unconfirmed':\n c.setToUnconfirmed = true;\n break;\n default:\n break;\n }\n }\n break;\n default:\n break;\n }\n }\n catch (_a) {\n /** */\n }\n return n;\n }\n addNotifyTransactionId(id) {\n if (!Number.isInteger(id))\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('id', 'integer');\n const s = new Set(this.notify.transactionIds || []);\n s.add(id);\n this.notify.transactionIds = [...s].sort((a, b) => (a > b ? 1 : a < b ? -1 : 0));\n this.notified = false;\n }\n /**\n * Adds a note to history.\n * Notes with identical property values to an existing note are ignored.\n * @param note Note to add\n * @param noDupes if true, only newest note with same `what` value is retained.\n */\n addHistoryNote(note, noDupes) {\n if (!this.history.notes)\n this.history.notes = [];\n if (!note.when)\n note.when = new Date().toISOString();\n if (noDupes) {\n // Remove any existing notes with same 'what' value and either no 'when' or an earlier 'when'\n this.history.notes = this.history.notes.filter(n => n.what !== note.what || (n.when && n.when > note.when));\n }\n let addNote = true;\n for (const n of this.history.notes) {\n let isEqual = true;\n for (const [k, v] of Object.entries(n)) {\n if (v !== note[k]) {\n isEqual = false;\n break;\n }\n }\n if (isEqual)\n addNote = false;\n if (!addNote)\n break;\n }\n if (addNote) {\n this.history.notes.push(note);\n const k = (n) => {\n return `${n.when} ${n.what}`;\n };\n this.history.notes.sort((a, b) => (k(a) < k(b) ? -1 : k(a) > k(b) ? 1 : 0));\n }\n }\n /**\n * Updates database record with current state of this EntityUser\n \n * @param storage\n * @param trx\n */\n async updateStorage(storage, trx) {\n this.updated_at = new Date();\n this.updateApi();\n if (this.id === 0) {\n await storage.insertProvenTxReq(this.api);\n }\n const update = { ...this.api };\n await storage.updateProvenTxReq(this.id, update, trx);\n }\n /**\n * Update storage with changes to non-static properties:\n * updated_at\n * provenTxId\n * status\n * history\n * notify\n * notified\n * attempts\n * batch\n *\n * @param storage\n * @param trx\n */\n async updateStorageDynamicProperties(storage, trx) {\n this.updated_at = new Date();\n this.updateApi();\n const update = {\n updated_at: this.api.updated_at,\n provenTxId: this.api.provenTxId,\n status: this.api.status,\n history: this.api.history,\n notify: this.api.notify,\n notified: this.api.notified,\n attempts: this.api.attempts,\n batch: this.api.batch\n };\n if (storage.isStorageProvider()) {\n const sp = storage;\n await sp.updateProvenTxReqDynamics(this.id, update, trx);\n }\n else {\n const wsm = storage;\n await wsm.runAsStorageProvider(async (sp) => {\n await sp.updateProvenTxReqDynamics(this.id, update, trx);\n });\n }\n }\n async insertOrMerge(storage, trx) {\n const req = await storage.transaction(async (trx) => {\n let reqApi0 = this.toApi();\n const { req: reqApi1, isNew } = await storage.findOrInsertProvenTxReq(reqApi0, trx);\n if (isNew) {\n return new EntityProvenTxReq(reqApi1);\n }\n else {\n const req = new EntityProvenTxReq(reqApi1);\n req.mergeNotifyTransactionIds(reqApi0);\n req.mergeHistory(reqApi0, undefined, true);\n await req.updateStorage(storage, trx);\n return req;\n }\n }, trx);\n return req;\n }\n /**\n * See `ProvenTxReqStatusApi`\n */\n get status() {\n return this.api.status;\n }\n set status(v) {\n if (v !== this.api.status) {\n this.addHistoryNote({ what: 'status', status_was: this.api.status, status_now: v });\n this.api.status = v;\n }\n }\n get provenTxReqId() {\n return this.api.provenTxReqId;\n }\n set provenTxReqId(v) {\n this.api.provenTxReqId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get inputBEEF() {\n return this.api.inputBEEF;\n }\n set inputBEEF(v) {\n this.api.inputBEEF = v;\n }\n get rawTx() {\n return this.api.rawTx;\n }\n set rawTx(v) {\n this.api.rawTx = v;\n }\n get attempts() {\n return this.api.attempts;\n }\n set attempts(v) {\n this.api.attempts = v;\n }\n get provenTxId() {\n return this.api.provenTxId;\n }\n set provenTxId(v) {\n this.api.provenTxId = v;\n }\n get notified() {\n return this.api.notified;\n }\n set notified(v) {\n this.api.notified = v;\n }\n get batch() {\n return this.api.batch;\n }\n set batch(v) {\n this.api.batch = v;\n }\n get id() {\n return this.api.provenTxReqId;\n }\n set id(v) {\n this.api.provenTxReqId = v;\n }\n get entityName() {\n return 'provenTxReq';\n }\n get entityTable() {\n return 'proven_tx_reqs';\n }\n /**\n * 'convergent' equality must satisfy (A sync B) equals (B sync A)\n */\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.txid != ei.txid ||\n !(0, utilityHelpers_1.arraysEqual)(eo.rawTx, ei.rawTx) ||\n (!eo.inputBEEF && ei.inputBEEF) ||\n (eo.inputBEEF && !ei.inputBEEF) ||\n (eo.inputBEEF && ei.inputBEEF && !(0, utilityHelpers_1.arraysEqual)(eo.inputBEEF, ei.inputBEEF)) ||\n eo.batch != ei.batch)\n return false;\n if (syncMap) {\n if (\n // attempts doesn't matter for convergent equality\n // history doesn't matter for convergent equality\n // only local transactionIds matter, that cared about this txid in sorted order\n eo.provenTxReqId !== syncMap.provenTxReq.idMap[(0, utilityHelpers_1.verifyId)(ei.provenTxReqId)] ||\n (!eo.provenTxId && ei.provenTxId) ||\n (eo.provenTxId && !ei.provenTxId) ||\n (ei.provenTxId && eo.provenTxId !== syncMap.provenTx.idMap[ei.provenTxId])\n // || eo.created_at !== minDate(ei.created_at, eo.created_at)\n // || eo.updated_at !== maxDate(ei.updated_at, eo.updated_at)\n )\n return false;\n }\n else {\n if (eo.attempts != ei.attempts ||\n eo.history != ei.history ||\n eo.notify != ei.notify ||\n eo.provenTxReqId !== ei.provenTxReqId ||\n eo.provenTxId !== ei.provenTxId\n // || eo.created_at !== ei.created_at\n // || eo.updated_at !== ei.updated_at\n )\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findProvenTxReqs({ partial: { txid: ei.txid }, trx }));\n return {\n found: !!ef,\n eo: new EntityProvenTxReq(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.provenTxReqId)\n };\n }\n mapNotifyTransactionIds(syncMap) {\n // Map external notification transaction ids to local ids\n const externalIds = this.notify.transactionIds || [];\n this.notify.transactionIds = [];\n for (const transactionId of externalIds) {\n const localTxId = syncMap.transaction.idMap[transactionId];\n if (localTxId) {\n this.addNotifyTransactionId(localTxId);\n }\n }\n }\n mergeNotifyTransactionIds(ei, syncMap) {\n var _a;\n // Map external notification transaction ids to local ids and merge them if they exist.\n const eie = new EntityProvenTxReq(ei);\n if (eie.notify.transactionIds) {\n (_a = this.notify).transactionIds || (_a.transactionIds = []);\n for (const transactionId of eie.notify.transactionIds) {\n const localTxId = syncMap ? syncMap.transaction.idMap[transactionId] : transactionId;\n if (localTxId) {\n this.addNotifyTransactionId(localTxId);\n }\n }\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n mergeHistory(ei, syncMap, noDupes) {\n const eie = new EntityProvenTxReq(ei);\n if (eie.history.notes) {\n for (const note of eie.history.notes) {\n this.addHistoryNote(note);\n }\n }\n }\n static isTerminalStatus(status) {\n return types_1.ProvenTxReqTerminalStatus.some(s => s === status);\n }\n async mergeNew(storage, userId, syncMap, trx) {\n if (this.provenTxId)\n this.provenTxId = syncMap.provenTx.idMap[this.provenTxId];\n this.mapNotifyTransactionIds(syncMap);\n this.provenTxReqId = 0;\n this.provenTxReqId = await storage.insertProvenTxReq(this.toApi(), trx);\n }\n /**\n * When merging `ProvenTxReq`, care is taken to avoid short-cirtuiting notification: `status` must not transition to `completed` without\n * passing through `notifying`. Thus a full convergent merge passes through these sequence steps:\n * 1. Remote storage completes before local storage.\n * 2. The remotely completed req and ProvenTx sync to local storage.\n * 3. The local storage transitions to `notifying`, after merging the remote attempts and history.\n * 4. The local storage notifies, transitioning to `completed`.\n * 5. Having been updated, the local req, but not ProvenTx sync to remote storage, but do not merge because the earlier `completed` wins.\n * 6. Convergent equality is achieved (completing work - history and attempts are equal)\n *\n * On terminal failure: `doubleSpend` trumps `invalid` as it contains more data.\n */\n async mergeExisting(storage, since, ei, syncMap, trx) {\n if (!this.batch && ei.batch)\n this.batch = ei.batch;\n else if (this.batch && ei.batch && this.batch !== ei.batch)\n throw new WERR_errors_1.WERR_INTERNAL('ProvenTxReq merge batch not equal.');\n this.mergeHistory(ei, syncMap, true);\n this.mergeNotifyTransactionIds(ei, syncMap);\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateProvenTxReq(this.id, this.toApi(), trx);\n return false;\n }\n}\nexports.EntityProvenTxReq = EntityProvenTxReq;\n//# sourceMappingURL=EntityProvenTxReq.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTxReq.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityProvenTxReq = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityProvenTxReq extends _1.EntityBase {\n static async fromStorageTxid(storage, txid, trx) {\n const reqApi = (0, index_client_1.verifyOneOrNone)(await storage.findProvenTxReqs({ partial: { txid }, trx }));\n if (!reqApi)\n return undefined;\n return new EntityProvenTxReq(reqApi);\n }\n static async fromStorageId(storage, id, trx) {\n const reqApi = (0, index_client_1.verifyOneOrNone)(await storage.findProvenTxReqs({ partial: { provenTxReqId: id }, trx }));\n if (!reqApi)\n throw new index_client_1.sdk.WERR_INTERNAL(`proven_tx_reqs with id ${id} is missing.`);\n return new EntityProvenTxReq(reqApi);\n }\n static fromTxid(txid, rawTx, inputBEEF) {\n const now = new Date();\n return new EntityProvenTxReq({\n provenTxReqId: 0,\n created_at: now,\n updated_at: now,\n txid,\n inputBEEF,\n rawTx,\n status: 'unknown',\n history: '{}',\n notify: '{}',\n attempts: 0,\n notified: false\n });\n }\n packApiHistory() {\n this.api.history = JSON.stringify(this.history);\n }\n packApiNotify() {\n this.api.notify = JSON.stringify(this.notify);\n }\n unpackApiHistory() {\n this.history = JSON.parse(this.api.history);\n }\n unpackApiNotify() {\n this.notify = JSON.parse(this.api.notify);\n }\n get apiHistory() {\n this.packApiHistory();\n return this.api.history;\n }\n get apiNotify() {\n this.packApiNotify();\n return this.api.notify;\n }\n set apiHistory(v) {\n this.api.history = v;\n this.unpackApiHistory();\n }\n set apiNotify(v) {\n this.api.notify = v;\n this.unpackApiNotify();\n }\n updateApi() {\n this.packApiHistory();\n this.packApiNotify();\n }\n unpackApi() {\n this.unpackApiHistory();\n this.unpackApiNotify();\n if (this.notify.transactionIds) {\n // Cleanup null values and duplicates.\n const transactionIds = [];\n for (const id of this.notify.transactionIds) {\n if (Number.isInteger(id) && !transactionIds.some(txid => txid === id))\n transactionIds.push(id);\n }\n this.notify.transactionIds = transactionIds;\n }\n }\n async refreshFromStorage(storage, trx) {\n const newApi = (0, index_client_1.verifyOne)(await storage.findProvenTxReqs({ partial: { provenTxReqId: this.id }, trx }));\n this.api = newApi;\n this.unpackApi();\n }\n constructor(api) {\n const now = new Date();\n super(api || {\n provenTxReqId: 0,\n created_at: now,\n updated_at: now,\n txid: '',\n rawTx: [],\n history: '',\n notify: '',\n attempts: 0,\n status: 'unknown',\n notified: false\n });\n this.history = {};\n this.notify = {};\n this.unpackApi();\n }\n /**\n * Returns history to only what followed since date.\n */\n historySince(since) {\n const fh = { notes: [] };\n const filter = since.toISOString();\n const notes = this.history.notes;\n if (notes && fh.notes) {\n for (const note of notes)\n if (note.when && note.when > filter)\n fh.notes.push(note);\n }\n return fh;\n }\n historyPretty(since, indent = 0) {\n const h = since ? this.historySince(since) : { ...this.history };\n if (!h.notes)\n return '';\n const whenLimit = since ? since.toISOString() : undefined;\n let log = '';\n for (const note of h.notes) {\n if (whenLimit && note.when && note.when < whenLimit)\n continue;\n log += this.prettyNote(note) + '\\n';\n }\n return log;\n }\n prettyNote(note) {\n let log = `${note.when}: ${note.what}`;\n for (const [key, val] of Object.entries(note)) {\n if (key !== 'when' && key !== 'what') {\n if (typeof val === 'string')\n log += ' ' + key + ':`' + val + '`';\n else\n log += ' ' + key + ':' + val;\n }\n }\n return log;\n }\n getHistorySummary() {\n const summary = {\n setToCompleted: false,\n setToUnmined: false,\n setToCallback: false,\n setToDoubleSpend: false,\n setToSending: false,\n setToUnconfirmed: false\n };\n const h = this.history;\n if (h.notes) {\n for (const note of h.notes) {\n this.parseHistoryNote(note, summary);\n }\n }\n return summary;\n }\n parseHistoryNote(note, summary) {\n const c = summary || {\n setToCompleted: false,\n setToUnmined: false,\n setToCallback: false,\n setToDoubleSpend: false,\n setToSending: false,\n setToUnconfirmed: false\n };\n let n = this.prettyNote(note);\n try {\n switch (note.what) {\n case 'status':\n {\n const status = note.status_now;\n switch (status) {\n case 'completed':\n c.setToCompleted = true;\n break;\n case 'unmined':\n c.setToUnmined = true;\n break;\n case 'callback':\n c.setToCallback = true;\n break;\n case 'doubleSpend':\n c.setToDoubleSpend = true;\n break;\n case 'sending':\n c.setToSending = true;\n break;\n case 'unconfirmed':\n c.setToUnconfirmed = true;\n break;\n default:\n break;\n }\n }\n break;\n default:\n break;\n }\n }\n catch (_a) {\n /** */\n }\n return n;\n }\n addNotifyTransactionId(id) {\n if (!Number.isInteger(id))\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('id', 'integer');\n const s = new Set(this.notify.transactionIds || []);\n s.add(id);\n this.notify.transactionIds = [...s].sort((a, b) => (a > b ? 1 : a < b ? -1 : 0));\n this.notified = false;\n }\n /**\n * Adds a note to history.\n * Notes with identical property values to an existing note are ignored.\n * @param note Note to add\n * @param noDupes if true, only newest note with same `what` value is retained.\n */\n addHistoryNote(note, noDupes) {\n if (!this.history.notes)\n this.history.notes = [];\n if (!note.when)\n note.when = new Date().toISOString();\n if (noDupes) {\n // Remove any existing notes with same 'what' value and either no 'when' or an earlier 'when'\n this.history.notes = this.history.notes.filter(n => n.what !== note.what || (n.when && n.when > note.when));\n }\n let addNote = true;\n for (const n of this.history.notes) {\n let isEqual = true;\n for (const [k, v] of Object.entries(n)) {\n if (v !== note[k]) {\n isEqual = false;\n break;\n }\n }\n if (isEqual)\n addNote = false;\n if (!addNote)\n break;\n }\n if (addNote) {\n this.history.notes.push(note);\n const k = (n) => {\n return `${n.when} ${n.what}`;\n };\n this.history.notes.sort((a, b) => (k(a) < k(b) ? -1 : k(a) > k(b) ? 1 : 0));\n }\n }\n /**\n * Updates database record with current state of this EntityUser\n \n * @param storage\n * @param trx\n */\n async updateStorage(storage, trx) {\n this.updated_at = new Date();\n this.updateApi();\n if (this.id === 0) {\n await storage.insertProvenTxReq(this.api);\n }\n const update = { ...this.api };\n await storage.updateProvenTxReq(this.id, update, trx);\n }\n /**\n * Update storage with changes to non-static properties:\n * updated_at\n * provenTxId\n * status\n * history\n * notify\n * notified\n * attempts\n * batch\n *\n * @param storage\n * @param trx\n */\n async updateStorageDynamicProperties(storage, trx) {\n this.updated_at = new Date();\n this.updateApi();\n const update = {\n updated_at: this.api.updated_at,\n provenTxId: this.api.provenTxId,\n status: this.api.status,\n history: this.api.history,\n notify: this.api.notify,\n notified: this.api.notified,\n attempts: this.api.attempts,\n batch: this.api.batch\n };\n if (storage.isStorageProvider()) {\n const sp = storage;\n await sp.updateProvenTxReqDynamics(this.id, update, trx);\n }\n else {\n const wsm = storage;\n await wsm.runAsStorageProvider(async (sp) => {\n await sp.updateProvenTxReqDynamics(this.id, update, trx);\n });\n }\n }\n async insertOrMerge(storage, trx) {\n const req = await storage.transaction(async (trx) => {\n let reqApi0 = this.toApi();\n const { req: reqApi1, isNew } = await storage.findOrInsertProvenTxReq(reqApi0, trx);\n if (isNew) {\n return new EntityProvenTxReq(reqApi1);\n }\n else {\n const req = new EntityProvenTxReq(reqApi1);\n req.mergeNotifyTransactionIds(reqApi0);\n req.mergeHistory(reqApi0, undefined, true);\n await req.updateStorage(storage, trx);\n return req;\n }\n }, trx);\n return req;\n }\n /**\n * See `ProvenTxReqStatusApi`\n */\n get status() {\n return this.api.status;\n }\n set status(v) {\n if (v !== this.api.status) {\n this.addHistoryNote({ what: 'status', status_was: this.api.status, status_now: v });\n this.api.status = v;\n }\n }\n get provenTxReqId() {\n return this.api.provenTxReqId;\n }\n set provenTxReqId(v) {\n this.api.provenTxReqId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get inputBEEF() {\n return this.api.inputBEEF;\n }\n set inputBEEF(v) {\n this.api.inputBEEF = v;\n }\n get rawTx() {\n return this.api.rawTx;\n }\n set rawTx(v) {\n this.api.rawTx = v;\n }\n get attempts() {\n return this.api.attempts;\n }\n set attempts(v) {\n this.api.attempts = v;\n }\n get provenTxId() {\n return this.api.provenTxId;\n }\n set provenTxId(v) {\n this.api.provenTxId = v;\n }\n get notified() {\n return this.api.notified;\n }\n set notified(v) {\n this.api.notified = v;\n }\n get batch() {\n return this.api.batch;\n }\n set batch(v) {\n this.api.batch = v;\n }\n get id() {\n return this.api.provenTxReqId;\n }\n set id(v) {\n this.api.provenTxReqId = v;\n }\n get entityName() {\n return 'provenTxReq';\n }\n get entityTable() {\n return 'proven_tx_reqs';\n }\n /**\n * 'convergent' equality must satisfy (A sync B) equals (B sync A)\n */\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.txid != ei.txid ||\n !(0, index_client_1.arraysEqual)(eo.rawTx, ei.rawTx) ||\n (!eo.inputBEEF && ei.inputBEEF) ||\n (eo.inputBEEF && !ei.inputBEEF) ||\n (eo.inputBEEF && ei.inputBEEF && !(0, index_client_1.arraysEqual)(eo.inputBEEF, ei.inputBEEF)) ||\n eo.batch != ei.batch)\n return false;\n if (syncMap) {\n if (\n // attempts doesn't matter for convergent equality\n // history doesn't matter for convergent equality\n // only local transactionIds matter, that cared about this txid in sorted order\n eo.provenTxReqId !== syncMap.provenTxReq.idMap[(0, index_client_1.verifyId)(ei.provenTxReqId)] ||\n (!eo.provenTxId && ei.provenTxId) ||\n (eo.provenTxId && !ei.provenTxId) ||\n (ei.provenTxId && eo.provenTxId !== syncMap.provenTx.idMap[ei.provenTxId])\n // || eo.created_at !== minDate(ei.created_at, eo.created_at)\n // || eo.updated_at !== maxDate(ei.updated_at, eo.updated_at)\n )\n return false;\n }\n else {\n if (eo.attempts != ei.attempts ||\n eo.history != ei.history ||\n eo.notify != ei.notify ||\n eo.provenTxReqId !== ei.provenTxReqId ||\n eo.provenTxId !== ei.provenTxId\n // || eo.created_at !== ei.created_at\n // || eo.updated_at !== ei.updated_at\n )\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findProvenTxReqs({ partial: { txid: ei.txid }, trx }));\n return {\n found: !!ef,\n eo: new EntityProvenTxReq(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.provenTxReqId)\n };\n }\n mapNotifyTransactionIds(syncMap) {\n // Map external notification transaction ids to local ids\n const externalIds = this.notify.transactionIds || [];\n this.notify.transactionIds = [];\n for (const transactionId of externalIds) {\n const localTxId = syncMap.transaction.idMap[transactionId];\n if (localTxId) {\n this.addNotifyTransactionId(localTxId);\n }\n }\n }\n mergeNotifyTransactionIds(ei, syncMap) {\n var _a;\n // Map external notification transaction ids to local ids and merge them if they exist.\n const eie = new EntityProvenTxReq(ei);\n if (eie.notify.transactionIds) {\n (_a = this.notify).transactionIds || (_a.transactionIds = []);\n for (const transactionId of eie.notify.transactionIds) {\n const localTxId = syncMap ? syncMap.transaction.idMap[transactionId] : transactionId;\n if (localTxId) {\n this.addNotifyTransactionId(localTxId);\n }\n }\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n mergeHistory(ei, syncMap, noDupes) {\n const eie = new EntityProvenTxReq(ei);\n if (eie.history.notes) {\n for (const note of eie.history.notes) {\n this.addHistoryNote(note);\n }\n }\n }\n static isTerminalStatus(status) {\n return index_client_1.sdk.ProvenTxReqTerminalStatus.some(s => s === status);\n }\n async mergeNew(storage, userId, syncMap, trx) {\n if (this.provenTxId)\n this.provenTxId = syncMap.provenTx.idMap[this.provenTxId];\n this.mapNotifyTransactionIds(syncMap);\n this.provenTxReqId = 0;\n this.provenTxReqId = await storage.insertProvenTxReq(this.toApi(), trx);\n }\n /**\n * When merging `ProvenTxReq`, care is taken to avoid short-cirtuiting notification: `status` must not transition to `completed` without\n * passing through `notifying`. Thus a full convergent merge passes through these sequence steps:\n * 1. Remote storage completes before local storage.\n * 2. The remotely completed req and ProvenTx sync to local storage.\n * 3. The local storage transitions to `notifying`, after merging the remote attempts and history.\n * 4. The local storage notifies, transitioning to `completed`.\n * 5. Having been updated, the local req, but not ProvenTx sync to remote storage, but do not merge because the earlier `completed` wins.\n * 6. Convergent equality is achieved (completing work - history and attempts are equal)\n *\n * On terminal failure: `doubleSpend` trumps `invalid` as it contains more data.\n */\n async mergeExisting(storage, since, ei, syncMap, trx) {\n if (!this.batch && ei.batch)\n this.batch = ei.batch;\n else if (this.batch && ei.batch && this.batch !== ei.batch)\n throw new index_client_1.sdk.WERR_INTERNAL('ProvenTxReq merge batch not equal.');\n this.mergeHistory(ei, syncMap, true);\n this.mergeNotifyTransactionIds(ei, syncMap);\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateProvenTxReq(this.id, this.toApi(), trx);\n return false;\n }\n}\nexports.EntityProvenTxReq = EntityProvenTxReq;\n//# sourceMappingURL=EntityProvenTxReq.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTxReq.js?\n}"); /***/ }), @@ -3894,7 +3784,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntitySyncState = void 0;\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nconst EntityCertificate_1 = __webpack_require__(/*! ./EntityCertificate */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCertificate.js\");\nconst EntityCertificateField_1 = __webpack_require__(/*! ./EntityCertificateField */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCertificateField.js\");\nconst EntityCommission_1 = __webpack_require__(/*! ./EntityCommission */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityCommission.js\");\nconst EntityOutput_1 = __webpack_require__(/*! ./EntityOutput */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutput.js\");\nconst EntityOutputBasket_1 = __webpack_require__(/*! ./EntityOutputBasket */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputBasket.js\");\nconst EntityOutputTag_1 = __webpack_require__(/*! ./EntityOutputTag */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputTag.js\");\nconst EntityOutputTagMap_1 = __webpack_require__(/*! ./EntityOutputTagMap */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityOutputTagMap.js\");\nconst EntityProvenTx_1 = __webpack_require__(/*! ./EntityProvenTx */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTx.js\");\nconst EntityProvenTxReq_1 = __webpack_require__(/*! ./EntityProvenTxReq */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTxReq.js\");\nconst EntityTransaction_1 = __webpack_require__(/*! ./EntityTransaction */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTransaction.js\");\nconst EntityTxLabel_1 = __webpack_require__(/*! ./EntityTxLabel */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTxLabel.js\");\nconst EntityTxLabelMap_1 = __webpack_require__(/*! ./EntityTxLabelMap */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTxLabelMap.js\");\nconst EntityUser_1 = __webpack_require__(/*! ./EntityUser */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityUser.js\");\nconst MergeEntity_1 = __webpack_require__(/*! ./MergeEntity */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/MergeEntity.js\");\nclass EntitySyncState extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n syncStateId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n storageIdentityKey: '',\n storageName: '',\n init: false,\n refNum: '',\n status: 'unknown',\n when: undefined,\n errorLocal: undefined,\n errorOther: undefined,\n satoshis: undefined,\n syncMap: JSON.stringify((0, EntityBase_1.createSyncMap)())\n });\n this.errorLocal = this.api.errorLocal ? JSON.parse(this.api.errorLocal) : undefined;\n this.errorOther = this.api.errorOther ? JSON.parse(this.api.errorOther) : undefined;\n this.syncMap = JSON.parse(this.api.syncMap);\n this.validateSyncMap(this.syncMap);\n }\n validateSyncMap(sm) {\n for (const key of Object.keys(sm)) {\n const esm = sm[key];\n if (typeof esm.maxUpdated_at === 'string')\n esm.maxUpdated_at = new Date(esm.maxUpdated_at);\n }\n }\n static async fromStorage(storage, userIdentityKey, remoteSettings) {\n const { user } = (0, utilityHelpers_1.verifyTruthy)(await storage.findOrInsertUser(userIdentityKey));\n let { syncState: api } = (0, utilityHelpers_1.verifyTruthy)(await storage.findOrInsertSyncStateAuth({ userId: user.userId, identityKey: userIdentityKey }, remoteSettings.storageIdentityKey, remoteSettings.storageName));\n if (!api.syncMap || api.syncMap === '{}')\n api.syncMap = JSON.stringify((0, EntityBase_1.createSyncMap)());\n const ss = new EntitySyncState(api);\n return ss;\n }\n /**\n * Handles both insert and update based on id value: zero indicates insert.\n * @param storage\n * @param notSyncMap if not new and true, excludes updating syncMap in storage.\n * @param trx\n */\n async updateStorage(storage, notSyncMap, trx) {\n this.updated_at = new Date();\n this.updateApi(notSyncMap && this.id > 0);\n if (this.id === 0) {\n await storage.insertSyncState(this.api);\n }\n else {\n const update = { ...this.api };\n if (notSyncMap)\n delete update.syncMap;\n delete update.created_at;\n await storage.updateSyncState((0, utilityHelpers_1.verifyId)(this.id), update, trx);\n }\n }\n updateApi(notSyncMap) {\n this.api.errorLocal = this.apiErrorLocal;\n this.api.errorOther = this.apiErrorOther;\n if (!notSyncMap)\n this.api.syncMap = this.apiSyncMap;\n }\n // Pass through api properties\n set created_at(v) {\n this.api.created_at = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get userId() {\n return this.api.userId;\n }\n set storageIdentityKey(v) {\n this.api.storageIdentityKey = v;\n }\n get storageIdentityKey() {\n return this.api.storageIdentityKey;\n }\n set storageName(v) {\n this.api.storageName = v;\n }\n get storageName() {\n return this.api.storageName;\n }\n set init(v) {\n this.api.init = v;\n }\n get init() {\n return this.api.init;\n }\n set refNum(v) {\n this.api.refNum = v;\n }\n get refNum() {\n return this.api.refNum;\n }\n set status(v) {\n this.api.status = v;\n }\n get status() {\n return this.api.status;\n }\n set when(v) {\n this.api.when = v;\n }\n get when() {\n return this.api.when;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n get apiErrorLocal() {\n return this.errorToString(this.errorLocal);\n }\n get apiErrorOther() {\n return this.errorToString(this.errorOther);\n }\n get apiSyncMap() {\n return JSON.stringify(this.syncMap);\n }\n get id() {\n return this.api.syncStateId;\n }\n set id(id) {\n this.api.syncStateId = id;\n }\n get entityName() {\n return 'syncState';\n }\n get entityTable() {\n return 'sync_states';\n }\n static mergeIdMap(fromMap, toMap) {\n for (const [key, value] of Object.entries(fromMap)) {\n const fromValue = fromMap[key];\n const toValue = toMap[key];\n if (toValue !== undefined && toValue !== fromValue)\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('syncMap', `an unmapped id or the same mapped id. ${key} maps to ${toValue} not equal to ${fromValue}`);\n if (toValue === undefined)\n toMap[key] = value;\n }\n }\n /**\n * Merge additions to the syncMap\n * @param iSyncMap\n */\n mergeSyncMap(iSyncMap) {\n EntitySyncState.mergeIdMap(iSyncMap.provenTx.idMap, this.syncMap.provenTx.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.outputBasket.idMap, this.syncMap.outputBasket.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.transaction.idMap, this.syncMap.transaction.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.provenTxReq.idMap, this.syncMap.provenTxReq.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.txLabel.idMap, this.syncMap.txLabel.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.output.idMap, this.syncMap.output.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.outputTag.idMap, this.syncMap.outputTag.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.certificate.idMap, this.syncMap.certificate.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.commission.idMap, this.syncMap.commission.idMap);\n }\n /**\n * Eliminate any properties besides code and description\n */\n errorToString(e) {\n if (!e)\n return undefined;\n const es = {\n code: e.code,\n description: e.description,\n stack: e.stack\n };\n return JSON.stringify(es);\n }\n equals(ei, syncMap) {\n return false;\n }\n async mergeNew(storage, userId, syncMap, trx) { }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n return false;\n }\n makeRequestSyncChunkArgs(forIdentityKey, forStorageIdentityKey, maxRoughSize, maxItems) {\n const a = {\n identityKey: forIdentityKey,\n maxRoughSize: maxRoughSize || 10000000,\n maxItems: maxItems || 1000,\n offsets: [],\n since: this.when,\n fromStorageIdentityKey: this.storageIdentityKey,\n toStorageIdentityKey: forStorageIdentityKey\n };\n for (const ess of [\n this.syncMap.provenTx,\n this.syncMap.outputBasket,\n this.syncMap.outputTag,\n this.syncMap.txLabel,\n this.syncMap.transaction,\n this.syncMap.output,\n this.syncMap.txLabelMap,\n this.syncMap.outputTagMap,\n this.syncMap.certificate,\n this.syncMap.certificateField,\n this.syncMap.commission,\n this.syncMap.provenTxReq\n ]) {\n if (!ess || !ess.entityName)\n debugger;\n a.offsets.push({ name: ess.entityName, offset: ess.count });\n }\n return a;\n }\n static syncChunkSummary(c) {\n let log = '';\n log += `SYNC CHUNK SUMMARY\n from storage: ${c.fromStorageIdentityKey}\n to storage: ${c.toStorageIdentityKey}\n for user: ${c.userIdentityKey}\n`;\n if (c.user)\n log += ` USER activeStorage ${c.user.activeStorage}\\n`;\n if (!!c.provenTxs) {\n log += ` PROVEN_TXS\\n`;\n for (const r of c.provenTxs) {\n log += ` ${r.provenTxId} ${r.txid}\\n`;\n }\n }\n if (!!c.provenTxReqs) {\n log += ` PROVEN_TX_REQS\\n`;\n for (const r of c.provenTxReqs) {\n log += ` ${r.provenTxReqId} ${r.txid} ${r.status} ${r.provenTxId || ''}\\n`;\n }\n }\n if (!!c.transactions) {\n log += ` TRANSACTIONS\\n`;\n for (const r of c.transactions) {\n log += ` ${r.transactionId} ${r.txid} ${r.status} ${r.provenTxId || ''} sats:${r.satoshis}\\n`;\n }\n }\n if (!!c.outputs) {\n log += ` OUTPUTS\\n`;\n for (const r of c.outputs) {\n log += ` ${r.outputId} ${r.txid}.${r.vout} ${r.transactionId} ${r.spendable ? 'spendable' : ''} sats:${r.satoshis}\\n`;\n }\n }\n return log;\n }\n async processSyncChunk(writer, args, chunk) {\n var _a;\n const mes = [\n new MergeEntity_1.MergeEntity(chunk.provenTxs, EntityProvenTx_1.EntityProvenTx.mergeFind, this.syncMap.provenTx),\n new MergeEntity_1.MergeEntity(chunk.outputBaskets, EntityOutputBasket_1.EntityOutputBasket.mergeFind, this.syncMap.outputBasket),\n new MergeEntity_1.MergeEntity(chunk.outputTags, EntityOutputTag_1.EntityOutputTag.mergeFind, this.syncMap.outputTag),\n new MergeEntity_1.MergeEntity(chunk.txLabels, EntityTxLabel_1.EntityTxLabel.mergeFind, this.syncMap.txLabel),\n new MergeEntity_1.MergeEntity(chunk.transactions, EntityTransaction_1.EntityTransaction.mergeFind, this.syncMap.transaction),\n new MergeEntity_1.MergeEntity(chunk.outputs, EntityOutput_1.EntityOutput.mergeFind, this.syncMap.output),\n new MergeEntity_1.MergeEntity(chunk.txLabelMaps, EntityTxLabelMap_1.EntityTxLabelMap.mergeFind, this.syncMap.txLabelMap),\n new MergeEntity_1.MergeEntity(chunk.outputTagMaps, EntityOutputTagMap_1.EntityOutputTagMap.mergeFind, this.syncMap.outputTagMap),\n new MergeEntity_1.MergeEntity(chunk.certificates, EntityCertificate_1.EntityCertificate.mergeFind, this.syncMap.certificate),\n new MergeEntity_1.MergeEntity(chunk.certificateFields, EntityCertificateField_1.EntityCertificateField.mergeFind, this.syncMap.certificateField),\n new MergeEntity_1.MergeEntity(chunk.commissions, EntityCommission_1.EntityCommission.mergeFind, this.syncMap.commission),\n new MergeEntity_1.MergeEntity(chunk.provenTxReqs, EntityProvenTxReq_1.EntityProvenTxReq.mergeFind, this.syncMap.provenTxReq)\n ];\n let updates = 0;\n let inserts = 0;\n let maxUpdated_at = undefined;\n let done = true;\n // Merge User\n if (chunk.user) {\n const ei = chunk.user;\n const { found, eo } = await EntityUser_1.EntityUser.mergeFind(writer, this.userId, ei);\n if (found) {\n if (await eo.mergeExisting(writer, args.since, ei)) {\n maxUpdated_at = (0, utilityHelpers_1.maxDate)(maxUpdated_at, ei.updated_at);\n updates++;\n }\n }\n }\n // Merge everything else...\n for (const me of mes) {\n const r = await me.merge(args.since, writer, this.userId, this.syncMap);\n // The counts become the offsets for the next chunk.\n me.esm.count += ((_a = me.stateArray) === null || _a === void 0 ? void 0 : _a.length) || 0;\n updates += r.updates;\n inserts += r.inserts;\n maxUpdated_at = (0, utilityHelpers_1.maxDate)(maxUpdated_at, me.esm.maxUpdated_at);\n // If any entity type either did not report results or if there were at least one, then we aren't done.\n if (me.stateArray === undefined || me.stateArray.length > 0)\n done = false;\n //if (me.stateArray !== undefined && me.stateArray.length > 0)\n // console.log(`merged ${me.stateArray?.length} ${me.esm.entityName} ${r.inserts} inserted, ${r.updates} updated`);\n }\n if (done) {\n // Next batch starts further in the future with offsets of zero.\n this.when = maxUpdated_at;\n for (const me of mes)\n me.esm.count = 0;\n }\n await this.updateStorage(writer, false);\n return { done, maxUpdated_at, updates, inserts };\n }\n}\nexports.EntitySyncState = EntitySyncState;\n//# sourceMappingURL=EntitySyncState.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntitySyncState.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntitySyncState = void 0;\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nclass EntitySyncState extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n syncStateId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n storageIdentityKey: '',\n storageName: '',\n init: false,\n refNum: '',\n status: 'unknown',\n when: undefined,\n errorLocal: undefined,\n errorOther: undefined,\n satoshis: undefined,\n syncMap: JSON.stringify((0, _1.createSyncMap)())\n });\n this.errorLocal = this.api.errorLocal ? JSON.parse(this.api.errorLocal) : undefined;\n this.errorOther = this.api.errorOther ? JSON.parse(this.api.errorOther) : undefined;\n this.syncMap = JSON.parse(this.api.syncMap);\n this.validateSyncMap(this.syncMap);\n }\n validateSyncMap(sm) {\n for (const key of Object.keys(sm)) {\n const esm = sm[key];\n if (typeof esm.maxUpdated_at === 'string')\n esm.maxUpdated_at = new Date(esm.maxUpdated_at);\n }\n }\n static async fromStorage(storage, userIdentityKey, remoteSettings) {\n const { user } = (0, index_client_1.verifyTruthy)(await storage.findOrInsertUser(userIdentityKey));\n let { syncState: api } = (0, index_client_1.verifyTruthy)(await storage.findOrInsertSyncStateAuth({ userId: user.userId, identityKey: userIdentityKey }, remoteSettings.storageIdentityKey, remoteSettings.storageName));\n if (!api.syncMap || api.syncMap === '{}')\n api.syncMap = JSON.stringify((0, _1.createSyncMap)());\n const ss = new EntitySyncState(api);\n return ss;\n }\n /**\n * Handles both insert and update based on id value: zero indicates insert.\n * @param storage\n * @param notSyncMap if not new and true, excludes updating syncMap in storage.\n * @param trx\n */\n async updateStorage(storage, notSyncMap, trx) {\n this.updated_at = new Date();\n this.updateApi(notSyncMap && this.id > 0);\n if (this.id === 0) {\n await storage.insertSyncState(this.api);\n }\n else {\n const update = { ...this.api };\n if (notSyncMap)\n delete update.syncMap;\n delete update.created_at;\n await storage.updateSyncState((0, index_client_1.verifyId)(this.id), update, trx);\n }\n }\n updateApi(notSyncMap) {\n this.api.errorLocal = this.apiErrorLocal;\n this.api.errorOther = this.apiErrorOther;\n if (!notSyncMap)\n this.api.syncMap = this.apiSyncMap;\n }\n // Pass through api properties\n set created_at(v) {\n this.api.created_at = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get userId() {\n return this.api.userId;\n }\n set storageIdentityKey(v) {\n this.api.storageIdentityKey = v;\n }\n get storageIdentityKey() {\n return this.api.storageIdentityKey;\n }\n set storageName(v) {\n this.api.storageName = v;\n }\n get storageName() {\n return this.api.storageName;\n }\n set init(v) {\n this.api.init = v;\n }\n get init() {\n return this.api.init;\n }\n set refNum(v) {\n this.api.refNum = v;\n }\n get refNum() {\n return this.api.refNum;\n }\n set status(v) {\n this.api.status = v;\n }\n get status() {\n return this.api.status;\n }\n set when(v) {\n this.api.when = v;\n }\n get when() {\n return this.api.when;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n get apiErrorLocal() {\n return this.errorToString(this.errorLocal);\n }\n get apiErrorOther() {\n return this.errorToString(this.errorOther);\n }\n get apiSyncMap() {\n return JSON.stringify(this.syncMap);\n }\n get id() {\n return this.api.syncStateId;\n }\n set id(id) {\n this.api.syncStateId = id;\n }\n get entityName() {\n return 'syncState';\n }\n get entityTable() {\n return 'sync_states';\n }\n static mergeIdMap(fromMap, toMap) {\n for (const [key, value] of Object.entries(fromMap)) {\n const fromValue = fromMap[key];\n const toValue = toMap[key];\n if (toValue !== undefined && toValue !== fromValue)\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('syncMap', `an unmapped id or the same mapped id. ${key} maps to ${toValue} not equal to ${fromValue}`);\n if (toValue === undefined)\n toMap[key] = value;\n }\n }\n /**\n * Merge additions to the syncMap\n * @param iSyncMap\n */\n mergeSyncMap(iSyncMap) {\n EntitySyncState.mergeIdMap(iSyncMap.provenTx.idMap, this.syncMap.provenTx.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.outputBasket.idMap, this.syncMap.outputBasket.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.transaction.idMap, this.syncMap.transaction.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.provenTxReq.idMap, this.syncMap.provenTxReq.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.txLabel.idMap, this.syncMap.txLabel.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.output.idMap, this.syncMap.output.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.outputTag.idMap, this.syncMap.outputTag.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.certificate.idMap, this.syncMap.certificate.idMap);\n EntitySyncState.mergeIdMap(iSyncMap.commission.idMap, this.syncMap.commission.idMap);\n }\n /**\n * Eliminate any properties besides code and description\n */\n errorToString(e) {\n if (!e)\n return undefined;\n const es = {\n code: e.code,\n description: e.description,\n stack: e.stack\n };\n return JSON.stringify(es);\n }\n equals(ei, syncMap) {\n return false;\n }\n async mergeNew(storage, userId, syncMap, trx) { }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n return false;\n }\n makeRequestSyncChunkArgs(forIdentityKey, forStorageIdentityKey, maxRoughSize, maxItems) {\n const a = {\n identityKey: forIdentityKey,\n maxRoughSize: maxRoughSize || 10000000,\n maxItems: maxItems || 1000,\n offsets: [],\n since: this.when,\n fromStorageIdentityKey: this.storageIdentityKey,\n toStorageIdentityKey: forStorageIdentityKey\n };\n for (const ess of [\n this.syncMap.provenTx,\n this.syncMap.outputBasket,\n this.syncMap.outputTag,\n this.syncMap.txLabel,\n this.syncMap.transaction,\n this.syncMap.output,\n this.syncMap.txLabelMap,\n this.syncMap.outputTagMap,\n this.syncMap.certificate,\n this.syncMap.certificateField,\n this.syncMap.commission,\n this.syncMap.provenTxReq\n ]) {\n if (!ess || !ess.entityName)\n debugger;\n a.offsets.push({ name: ess.entityName, offset: ess.count });\n }\n return a;\n }\n static syncChunkSummary(c) {\n let log = '';\n log += `SYNC CHUNK SUMMARY\n from storage: ${c.fromStorageIdentityKey}\n to storage: ${c.toStorageIdentityKey}\n for user: ${c.userIdentityKey}\n`;\n if (c.user)\n log += ` USER activeStorage ${c.user.activeStorage}\\n`;\n if (!!c.provenTxs) {\n log += ` PROVEN_TXS\\n`;\n for (const r of c.provenTxs) {\n log += ` ${r.provenTxId} ${r.txid}\\n`;\n }\n }\n if (!!c.provenTxReqs) {\n log += ` PROVEN_TX_REQS\\n`;\n for (const r of c.provenTxReqs) {\n log += ` ${r.provenTxReqId} ${r.txid} ${r.status} ${r.provenTxId || ''}\\n`;\n }\n }\n if (!!c.transactions) {\n log += ` TRANSACTIONS\\n`;\n for (const r of c.transactions) {\n log += ` ${r.transactionId} ${r.txid} ${r.status} ${r.provenTxId || ''} sats:${r.satoshis}\\n`;\n }\n }\n if (!!c.outputs) {\n log += ` OUTPUTS\\n`;\n for (const r of c.outputs) {\n log += ` ${r.outputId} ${r.txid}.${r.vout} ${r.transactionId} ${r.spendable ? 'spendable' : ''} sats:${r.satoshis}\\n`;\n }\n }\n return log;\n }\n async processSyncChunk(writer, args, chunk) {\n var _a;\n const mes = [\n new _1.MergeEntity(chunk.provenTxs, _1.EntityProvenTx.mergeFind, this.syncMap.provenTx),\n new _1.MergeEntity(chunk.outputBaskets, _1.EntityOutputBasket.mergeFind, this.syncMap.outputBasket),\n new _1.MergeEntity(chunk.outputTags, _1.EntityOutputTag.mergeFind, this.syncMap.outputTag),\n new _1.MergeEntity(chunk.txLabels, _1.EntityTxLabel.mergeFind, this.syncMap.txLabel),\n new _1.MergeEntity(chunk.transactions, _1.EntityTransaction.mergeFind, this.syncMap.transaction),\n new _1.MergeEntity(chunk.outputs, _1.EntityOutput.mergeFind, this.syncMap.output),\n new _1.MergeEntity(chunk.txLabelMaps, _1.EntityTxLabelMap.mergeFind, this.syncMap.txLabelMap),\n new _1.MergeEntity(chunk.outputTagMaps, _1.EntityOutputTagMap.mergeFind, this.syncMap.outputTagMap),\n new _1.MergeEntity(chunk.certificates, _1.EntityCertificate.mergeFind, this.syncMap.certificate),\n new _1.MergeEntity(chunk.certificateFields, _1.EntityCertificateField.mergeFind, this.syncMap.certificateField),\n new _1.MergeEntity(chunk.commissions, _1.EntityCommission.mergeFind, this.syncMap.commission),\n new _1.MergeEntity(chunk.provenTxReqs, _1.EntityProvenTxReq.mergeFind, this.syncMap.provenTxReq)\n ];\n let updates = 0;\n let inserts = 0;\n let maxUpdated_at = undefined;\n let done = true;\n // Merge User\n if (chunk.user) {\n const ei = chunk.user;\n const { found, eo } = await _1.EntityUser.mergeFind(writer, this.userId, ei);\n if (found) {\n if (await eo.mergeExisting(writer, args.since, ei)) {\n maxUpdated_at = (0, index_client_1.maxDate)(maxUpdated_at, ei.updated_at);\n updates++;\n }\n }\n }\n // Merge everything else...\n for (const me of mes) {\n const r = await me.merge(args.since, writer, this.userId, this.syncMap);\n // The counts become the offsets for the next chunk.\n me.esm.count += ((_a = me.stateArray) === null || _a === void 0 ? void 0 : _a.length) || 0;\n updates += r.updates;\n inserts += r.inserts;\n maxUpdated_at = (0, index_client_1.maxDate)(maxUpdated_at, me.esm.maxUpdated_at);\n // If any entity type either did not report results or if there were at least one, then we aren't done.\n if (me.stateArray === undefined || me.stateArray.length > 0)\n done = false;\n //if (me.stateArray !== undefined && me.stateArray.length > 0)\n // console.log(`merged ${me.stateArray?.length} ${me.esm.entityName} ${r.inserts} inserted, ${r.updates} updated`);\n }\n if (done) {\n // Next batch starts further in the future with offsets of zero.\n this.when = maxUpdated_at;\n for (const me of mes)\n me.esm.count = 0;\n }\n await this.updateStorage(writer, false);\n return { done, maxUpdated_at, updates, inserts };\n }\n}\nexports.EntitySyncState = EntitySyncState;\n//# sourceMappingURL=EntitySyncState.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntitySyncState.js?\n}"); /***/ }), @@ -3905,7 +3795,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityTransaction = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nconst EntityProvenTx_1 = __webpack_require__(/*! ./EntityProvenTx */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityProvenTx.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nclass EntityTransaction extends EntityBase_1.EntityBase {\n /**\n * @returns @bsv/sdk Transaction object from parsed rawTx.\n * If rawTx is undefined, returns undefined.\n */\n getBsvTx() {\n if (!this.rawTx)\n return undefined;\n return sdk_1.Transaction.fromBinary(this.rawTx);\n }\n /**\n * @returns array of @bsv/sdk TransactionInput objects from parsed rawTx.\n * If rawTx is undefined, an empty array is returned.\n */\n getBsvTxIns() {\n const tx = this.getBsvTx();\n if (!tx)\n return [];\n return tx.inputs;\n }\n /**\n * Returns an array of \"known\" inputs to this transaction which belong to the same userId.\n * Uses both spentBy and rawTx inputs (if available) to locate inputs from among user's outputs.\n * Not all transaction inputs correspond to prior storage outputs.\n */\n async getInputs(storage, trx) {\n const inputs = await storage.findOutputs({\n partial: { userId: this.userId, spentBy: this.id },\n trx\n });\n // Merge \"inputs\" by spentBy and userId\n for (const input of this.getBsvTxIns()) {\n //console.log(`getInputs of ${this.id}: ${input.txid()} ${input.txOutNum}`)\n const pso = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findOutputs({\n partial: {\n userId: this.userId,\n txid: input.sourceTXID,\n vout: input.sourceOutputIndex\n },\n trx\n }));\n if (pso && !inputs.some(i => i.outputId === pso.outputId))\n inputs.push(pso);\n }\n return inputs;\n }\n constructor(api) {\n const now = new Date();\n super(api || {\n transactionId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n txid: '',\n status: 'unprocessed',\n reference: '',\n satoshis: 0,\n description: '',\n isOutgoing: false,\n rawTx: undefined,\n inputBEEF: undefined\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get version() {\n return this.api.version;\n }\n set version(v) {\n this.api.version = v;\n }\n get lockTime() {\n return this.api.lockTime;\n }\n set lockTime(v) {\n this.api.lockTime = v;\n }\n get isOutgoing() {\n return this.api.isOutgoing;\n }\n set isOutgoing(v) {\n this.api.isOutgoing = v;\n }\n get status() {\n return this.api.status;\n }\n set status(v) {\n this.api.status = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get provenTxId() {\n return this.api.provenTxId;\n }\n set provenTxId(v) {\n this.api.provenTxId = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get reference() {\n return this.api.reference;\n }\n set reference(v) {\n this.api.reference = v;\n }\n get inputBEEF() {\n return this.api.inputBEEF;\n }\n set inputBEEF(v) {\n this.api.inputBEEF = v;\n }\n get description() {\n return this.api.description;\n }\n set description(v) {\n this.api.description = v;\n }\n get rawTx() {\n return this.api.rawTx;\n }\n set rawTx(v) {\n this.api.rawTx = v;\n }\n // Extended (computed / dependent entity) Properties\n //get labels() { return this.api.labels }\n //set labels(v: string[] | undefined) { this.api.labels = v }\n get id() {\n return this.api.transactionId;\n }\n set id(v) {\n this.api.transactionId = v;\n }\n get entityName() {\n return 'transaction';\n }\n get entityTable() {\n return 'transactions';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n // Properties that are never updated\n if (eo.transactionId !== (syncMap ? syncMap.transaction.idMap[(0, utilityHelpers_1.verifyId)(ei.transactionId)] : ei.transactionId) ||\n eo.reference !== ei.reference)\n return false;\n if (eo.version !== ei.version ||\n eo.lockTime !== ei.lockTime ||\n eo.isOutgoing !== ei.isOutgoing ||\n eo.status !== ei.status ||\n eo.satoshis !== ei.satoshis ||\n eo.txid !== ei.txid ||\n eo.description !== ei.description ||\n !(0, utilityHelpers_1.optionalArraysEqual)(eo.rawTx, ei.rawTx) ||\n !(0, utilityHelpers_1.optionalArraysEqual)(eo.inputBEEF, ei.inputBEEF))\n return false;\n if (!eo.provenTxId !== !ei.provenTxId ||\n (ei.provenTxId && eo.provenTxId !== (syncMap ? syncMap.provenTx.idMap[(0, utilityHelpers_1.verifyId)(ei.provenTxId)] : ei.provenTxId)))\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findTransactions({\n partial: { reference: ei.reference, userId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityTransaction(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.transactionId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n if (this.provenTxId)\n this.provenTxId = syncMap.provenTx.idMap[this.provenTxId];\n this.userId = userId;\n this.transactionId = 0;\n this.transactionId = await storage.insertTransaction(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n // Properties that are never updated:\n // transactionId\n // userId\n // reference\n // Merged properties\n this.version = ei.version;\n this.lockTime = ei.lockTime;\n this.isOutgoing = ei.isOutgoing;\n this.status = ei.status;\n this.provenTxId = ei.provenTxId ? syncMap.provenTx.idMap[ei.provenTxId] : undefined;\n this.satoshis = ei.satoshis;\n this.txid = ei.txid;\n this.description = ei.description;\n this.rawTx = ei.rawTx;\n this.inputBEEF = ei.inputBEEF;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateTransaction(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n async getProvenTx(storage, trx) {\n if (!this.provenTxId)\n return undefined;\n const p = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findProvenTxs({\n partial: { provenTxId: this.provenTxId },\n trx\n }));\n if (!p)\n return undefined;\n return new EntityProvenTx_1.EntityProvenTx(p);\n }\n}\nexports.EntityTransaction = EntityTransaction;\n//# sourceMappingURL=EntityTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTransaction.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityTransaction = void 0;\n/* eslint-disable @typescript-eslint/no-unused-vars */\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityTransaction extends _1.EntityBase {\n /**\n * @returns @bsv/sdk Transaction object from parsed rawTx.\n * If rawTx is undefined, returns undefined.\n */\n getBsvTx() {\n if (!this.rawTx)\n return undefined;\n return sdk_1.Transaction.fromBinary(this.rawTx);\n }\n /**\n * @returns array of @bsv/sdk TransactionInput objects from parsed rawTx.\n * If rawTx is undefined, an empty array is returned.\n */\n getBsvTxIns() {\n const tx = this.getBsvTx();\n if (!tx)\n return [];\n return tx.inputs;\n }\n /**\n * Returns an array of \"known\" inputs to this transaction which belong to the same userId.\n * Uses both spentBy and rawTx inputs (if available) to locate inputs from among user's outputs.\n * Not all transaction inputs correspond to prior storage outputs.\n */\n async getInputs(storage, trx) {\n const inputs = await storage.findOutputs({\n partial: { userId: this.userId, spentBy: this.id },\n trx\n });\n // Merge \"inputs\" by spentBy and userId\n for (const input of this.getBsvTxIns()) {\n //console.log(`getInputs of ${this.id}: ${input.txid()} ${input.txOutNum}`)\n const pso = (0, index_client_1.verifyOneOrNone)(await storage.findOutputs({\n partial: {\n userId: this.userId,\n txid: input.sourceTXID,\n vout: input.sourceOutputIndex\n },\n trx\n }));\n if (pso && !inputs.some(i => i.outputId === pso.outputId))\n inputs.push(pso);\n }\n return inputs;\n }\n constructor(api) {\n const now = new Date();\n super(api || {\n transactionId: 0,\n created_at: now,\n updated_at: now,\n userId: 0,\n txid: '',\n status: 'unprocessed',\n reference: '',\n satoshis: 0,\n description: '',\n isOutgoing: false,\n rawTx: undefined,\n inputBEEF: undefined\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get version() {\n return this.api.version;\n }\n set version(v) {\n this.api.version = v;\n }\n get lockTime() {\n return this.api.lockTime;\n }\n set lockTime(v) {\n this.api.lockTime = v;\n }\n get isOutgoing() {\n return this.api.isOutgoing;\n }\n set isOutgoing(v) {\n this.api.isOutgoing = v;\n }\n get status() {\n return this.api.status;\n }\n set status(v) {\n this.api.status = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get provenTxId() {\n return this.api.provenTxId;\n }\n set provenTxId(v) {\n this.api.provenTxId = v;\n }\n get satoshis() {\n return this.api.satoshis;\n }\n set satoshis(v) {\n this.api.satoshis = v;\n }\n get txid() {\n return this.api.txid;\n }\n set txid(v) {\n this.api.txid = v;\n }\n get reference() {\n return this.api.reference;\n }\n set reference(v) {\n this.api.reference = v;\n }\n get inputBEEF() {\n return this.api.inputBEEF;\n }\n set inputBEEF(v) {\n this.api.inputBEEF = v;\n }\n get description() {\n return this.api.description;\n }\n set description(v) {\n this.api.description = v;\n }\n get rawTx() {\n return this.api.rawTx;\n }\n set rawTx(v) {\n this.api.rawTx = v;\n }\n // Extended (computed / dependent entity) Properties\n //get labels() { return this.api.labels }\n //set labels(v: string[] | undefined) { this.api.labels = v }\n get id() {\n return this.api.transactionId;\n }\n set id(v) {\n this.api.transactionId = v;\n }\n get entityName() {\n return 'transaction';\n }\n get entityTable() {\n return 'transactions';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n // Properties that are never updated\n if (eo.transactionId !== (syncMap ? syncMap.transaction.idMap[(0, index_client_1.verifyId)(ei.transactionId)] : ei.transactionId) ||\n eo.reference !== ei.reference)\n return false;\n if (eo.version !== ei.version ||\n eo.lockTime !== ei.lockTime ||\n eo.isOutgoing !== ei.isOutgoing ||\n eo.status !== ei.status ||\n eo.satoshis !== ei.satoshis ||\n eo.txid !== ei.txid ||\n eo.description !== ei.description ||\n !(0, index_client_1.optionalArraysEqual)(eo.rawTx, ei.rawTx) ||\n !(0, index_client_1.optionalArraysEqual)(eo.inputBEEF, ei.inputBEEF))\n return false;\n if (!eo.provenTxId !== !ei.provenTxId ||\n (ei.provenTxId && eo.provenTxId !== (syncMap ? syncMap.provenTx.idMap[(0, index_client_1.verifyId)(ei.provenTxId)] : ei.provenTxId)))\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findTransactions({\n partial: { reference: ei.reference, userId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityTransaction(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.transactionId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n if (this.provenTxId)\n this.provenTxId = syncMap.provenTx.idMap[this.provenTxId];\n this.userId = userId;\n this.transactionId = 0;\n this.transactionId = await storage.insertTransaction(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n // Properties that are never updated:\n // transactionId\n // userId\n // reference\n // Merged properties\n this.version = ei.version;\n this.lockTime = ei.lockTime;\n this.isOutgoing = ei.isOutgoing;\n this.status = ei.status;\n this.provenTxId = ei.provenTxId ? syncMap.provenTx.idMap[ei.provenTxId] : undefined;\n this.satoshis = ei.satoshis;\n this.txid = ei.txid;\n this.description = ei.description;\n this.rawTx = ei.rawTx;\n this.inputBEEF = ei.inputBEEF;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateTransaction(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n async getProvenTx(storage, trx) {\n if (!this.provenTxId)\n return undefined;\n const p = (0, index_client_1.verifyOneOrNone)(await storage.findProvenTxs({\n partial: { provenTxId: this.provenTxId },\n trx\n }));\n if (!p)\n return undefined;\n return new _1.EntityProvenTx(p);\n }\n}\nexports.EntityTransaction = EntityTransaction;\n//# sourceMappingURL=EntityTransaction.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTransaction.js?\n}"); /***/ }), @@ -3916,7 +3806,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityTxLabel = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nclass EntityTxLabel extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n txLabelId: 0,\n created_at: now,\n updated_at: now,\n label: '',\n userId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get txLabelId() {\n return this.api.txLabelId;\n }\n set txLabelId(v) {\n this.api.txLabelId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get label() {\n return this.api.label;\n }\n set label(v) {\n this.api.label = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n return this.api.txLabelId;\n }\n set id(v) {\n this.api.txLabelId = v;\n }\n get entityName() {\n return 'txLabel';\n }\n get entityTable() {\n return 'tx_labels';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.label != ei.label || eo.isDeleted != ei.isDeleted)\n return false;\n if (!syncMap) {\n if (eo.userId !== ei.userId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findTxLabels({ partial: { label: ei.label, userId }, trx }));\n return {\n found: !!ef,\n eo: new EntityTxLabel(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.txLabelId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.txLabelId = 0;\n this.txLabelId = await storage.insertTxLabel(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateTxLabel(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityTxLabel = EntityTxLabel;\n//# sourceMappingURL=EntityTxLabel.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTxLabel.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityTxLabel = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityTxLabel extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n txLabelId: 0,\n created_at: now,\n updated_at: now,\n label: '',\n userId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get txLabelId() {\n return this.api.txLabelId;\n }\n set txLabelId(v) {\n this.api.txLabelId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get label() {\n return this.api.label;\n }\n set label(v) {\n this.api.label = v;\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n return this.api.txLabelId;\n }\n set id(v) {\n this.api.txLabelId = v;\n }\n get entityName() {\n return 'txLabel';\n }\n get entityTable() {\n return 'tx_labels';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.label != ei.label || eo.isDeleted != ei.isDeleted)\n return false;\n if (!syncMap) {\n if (eo.userId !== ei.userId)\n return false;\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findTxLabels({ partial: { label: ei.label, userId }, trx }));\n return {\n found: !!ef,\n eo: new EntityTxLabel(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.txLabelId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.userId = userId;\n this.txLabelId = 0;\n this.txLabelId = await storage.insertTxLabel(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateTxLabel(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityTxLabel = EntityTxLabel;\n//# sourceMappingURL=EntityTxLabel.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTxLabel.js?\n}"); /***/ }), @@ -3927,7 +3817,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityTxLabelMap = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nclass EntityTxLabelMap extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n created_at: now,\n updated_at: now,\n transactionId: 0,\n txLabelId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get txLabelId() {\n return this.api.txLabelId;\n }\n set txLabelId(v) {\n this.api.txLabelId = v;\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n throw new WERR_errors_1.WERR_INVALID_OPERATION('entity has no \"id\" value');\n } // entity does not have its own id.\n get entityName() {\n return 'txLabelMap';\n }\n get entityTable() {\n return 'tx_labels_map';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.transactionId !== (syncMap ? syncMap.transaction.idMap[(0, utilityHelpers_1.verifyId)(ei.transactionId)] : ei.transactionId) ||\n eo.txLabelId !== (syncMap ? syncMap.txLabel.idMap[(0, utilityHelpers_1.verifyId)(ei.txLabelId)] : ei.txLabelId) ||\n eo.isDeleted !== ei.isDeleted)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const transactionId = syncMap.transaction.idMap[ei.transactionId];\n const txLabelId = syncMap.txLabel.idMap[ei.txLabelId];\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findTxLabelMaps({\n partial: { transactionId, txLabelId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityTxLabelMap(ef || { ...ei }),\n eiId: -1\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.transactionId = syncMap.transaction.idMap[this.transactionId];\n this.txLabelId = syncMap.txLabel.idMap[this.txLabelId];\n await storage.insertTxLabelMap(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateTxLabelMap(this.transactionId, this.txLabelId, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityTxLabelMap = EntityTxLabelMap;\n//# sourceMappingURL=EntityTxLabelMap.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTxLabelMap.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityTxLabelMap = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityTxLabelMap extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n created_at: now,\n updated_at: now,\n transactionId: 0,\n txLabelId: 0,\n isDeleted: false\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get txLabelId() {\n return this.api.txLabelId;\n }\n set txLabelId(v) {\n this.api.txLabelId = v;\n }\n get transactionId() {\n return this.api.transactionId;\n }\n set transactionId(v) {\n this.api.transactionId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get isDeleted() {\n return this.api.isDeleted;\n }\n set isDeleted(v) {\n this.api.isDeleted = v;\n }\n get id() {\n throw new index_client_1.sdk.WERR_INVALID_OPERATION('entity has no \"id\" value');\n } // entity does not have its own id.\n get entityName() {\n return 'txLabelMap';\n }\n get entityTable() {\n return 'tx_labels_map';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.transactionId !== (syncMap ? syncMap.transaction.idMap[(0, index_client_1.verifyId)(ei.transactionId)] : ei.transactionId) ||\n eo.txLabelId !== (syncMap ? syncMap.txLabel.idMap[(0, index_client_1.verifyId)(ei.txLabelId)] : ei.txLabelId) ||\n eo.isDeleted !== ei.isDeleted)\n return false;\n return true;\n }\n static async mergeFind(storage, userId, ei, syncMap, trx) {\n const transactionId = syncMap.transaction.idMap[ei.transactionId];\n const txLabelId = syncMap.txLabel.idMap[ei.txLabelId];\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findTxLabelMaps({\n partial: { transactionId, txLabelId },\n trx\n }));\n return {\n found: !!ef,\n eo: new EntityTxLabelMap(ef || { ...ei }),\n eiId: -1\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n this.transactionId = syncMap.transaction.idMap[this.transactionId];\n this.txLabelId = syncMap.txLabel.idMap[this.txLabelId];\n await storage.insertTxLabelMap(this.toApi(), trx);\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n if (ei.updated_at > this.updated_at) {\n this.isDeleted = ei.isDeleted;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateTxLabelMap(this.transactionId, this.txLabelId, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityTxLabelMap = EntityTxLabelMap;\n//# sourceMappingURL=EntityTxLabelMap.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityTxLabelMap.js?\n}"); /***/ }), @@ -3938,7 +3828,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityUser = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst EntityBase_1 = __webpack_require__(/*! ./EntityBase */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityBase.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nclass EntityUser extends EntityBase_1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n userId: 0,\n created_at: now,\n updated_at: now,\n identityKey: '',\n activeStorage: ''\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get identityKey() {\n return this.api.identityKey;\n }\n set identityKey(v) {\n this.api.identityKey = v;\n }\n get activeStorage() {\n return this.api.activeStorage;\n }\n set activeStorage(v) {\n this.api.activeStorage = v;\n }\n get id() {\n return this.api.userId;\n }\n set id(v) {\n this.api.userId = v;\n }\n get entityName() {\n return 'user';\n }\n get entityTable() {\n return 'users';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.identityKey != ei.identityKey || eo.activeStorage != ei.activeStorage)\n return false;\n if (!syncMap) {\n /** */\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, trx) {\n const ef = (0, utilityHelpers_1.verifyOneOrNone)(await storage.findUsers({ partial: { identityKey: ei.identityKey }, trx }));\n if (ef && ef.userId != userId)\n throw new WERR_errors_1.WERR_INTERNAL('logic error, userIds don not match.');\n return {\n found: !!ef,\n eo: new EntityUser(ef || { ...ei }),\n eiId: (0, utilityHelpers_1.verifyId)(ei.userId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n throw new WERR_errors_1.WERR_INTERNAL('a sync chunk merge must never create a new user');\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n // The condition on activeStorage here is critical as a new user record may have just been created\n // in a backup store to which a backup is being pushed.\n if (ei.updated_at > this.updated_at || (this.activeStorage === undefined && ei.activeStorage !== undefined)) {\n this.activeStorage = ei.activeStorage;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateUser(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityUser = EntityUser;\n//# sourceMappingURL=EntityUser.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityUser.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.EntityUser = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nconst _1 = __webpack_require__(/*! . */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/index.js\");\nclass EntityUser extends _1.EntityBase {\n constructor(api) {\n const now = new Date();\n super(api || {\n userId: 0,\n created_at: now,\n updated_at: now,\n identityKey: '',\n activeStorage: ''\n });\n }\n updateApi() {\n /* nothing needed yet... */\n }\n get userId() {\n return this.api.userId;\n }\n set userId(v) {\n this.api.userId = v;\n }\n get created_at() {\n return this.api.created_at;\n }\n set created_at(v) {\n this.api.created_at = v;\n }\n get updated_at() {\n return this.api.updated_at;\n }\n set updated_at(v) {\n this.api.updated_at = v;\n }\n get identityKey() {\n return this.api.identityKey;\n }\n set identityKey(v) {\n this.api.identityKey = v;\n }\n get activeStorage() {\n return this.api.activeStorage;\n }\n set activeStorage(v) {\n this.api.activeStorage = v;\n }\n get id() {\n return this.api.userId;\n }\n set id(v) {\n this.api.userId = v;\n }\n get entityName() {\n return 'user';\n }\n get entityTable() {\n return 'users';\n }\n equals(ei, syncMap) {\n const eo = this.toApi();\n if (eo.identityKey != ei.identityKey || eo.activeStorage != ei.activeStorage)\n return false;\n if (!syncMap) {\n /** */\n }\n return true;\n }\n static async mergeFind(storage, userId, ei, trx) {\n const ef = (0, index_client_1.verifyOneOrNone)(await storage.findUsers({ partial: { identityKey: ei.identityKey }, trx }));\n if (ef && ef.userId != userId)\n throw new index_client_1.sdk.WERR_INTERNAL('logic error, userIds don not match.');\n return {\n found: !!ef,\n eo: new EntityUser(ef || { ...ei }),\n eiId: (0, index_client_1.verifyId)(ei.userId)\n };\n }\n async mergeNew(storage, userId, syncMap, trx) {\n throw new index_client_1.sdk.WERR_INTERNAL('a sync chunk merge must never create a new user');\n }\n async mergeExisting(storage, since, ei, syncMap, trx) {\n let wasMerged = false;\n // The condition on activeStorage here is critical as a new user record may have just been created\n // in a backup store to which a backup is being pushed.\n if (ei.updated_at > this.updated_at || (this.activeStorage === undefined && ei.activeStorage !== undefined)) {\n this.activeStorage = ei.activeStorage;\n this.updated_at = new Date(Math.max(ei.updated_at.getTime(), this.updated_at.getTime()));\n await storage.updateUser(this.id, this.toApi(), trx);\n wasMerged = true;\n }\n return wasMerged;\n }\n}\nexports.EntityUser = EntityUser;\n//# sourceMappingURL=EntityUser.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/EntityUser.js?\n}"); /***/ }), @@ -3949,7 +3839,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.MergeEntity = void 0;\nconst utilityHelpers_1 = __webpack_require__(/*! ../../../utility/utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../../../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\n/**\n * @param API one of the storage table interfaces.\n * @param DE the corresponding entity class\n */\nclass MergeEntity {\n constructor(stateArray, find, \n /** id map for primary id of API and DE object. */\n esm) {\n this.stateArray = stateArray;\n this.find = find;\n this.esm = esm;\n this.idMap = esm.idMap;\n }\n updateSyncMap(map, inId, outId) {\n const i = (0, utilityHelpers_1.verifyId)(inId);\n const o = (0, utilityHelpers_1.verifyId)(outId);\n if (map[i] === undefined) {\n map[i] = o;\n }\n else if (map[i] !== o)\n throw new WERR_errors_1.WERR_INTERNAL(`updateSyncMap map[${inId}] can't override ${map[i]} with ${o}`);\n }\n /**\n * @param since date of current sync chunk\n */\n async merge(since, storage, userId, syncMap, trx) {\n let inserts = 0, updates = 0;\n if (!this.stateArray)\n return { inserts, updates };\n for (const ei of this.stateArray) {\n this.esm.maxUpdated_at = (0, utilityHelpers_1.maxDate)(this.esm.maxUpdated_at, ei.updated_at);\n /**\n * TODO:\n * Switch to using syncMap. If the ei id is in the map its an existing merge, else its a new merge.\n */\n try {\n const { found, eo, eiId } = await this.find(storage, userId, ei, syncMap, trx);\n if (found) {\n if (await eo.mergeExisting(storage, since, ei, syncMap, trx)) {\n updates++;\n }\n }\n else {\n await eo.mergeNew(storage, userId, syncMap, trx);\n inserts++;\n }\n if (eiId > -1)\n this.updateSyncMap(this.idMap, eiId, eo.id);\n }\n catch (eu) {\n throw eu;\n }\n }\n return { inserts, updates };\n }\n}\nexports.MergeEntity = MergeEntity;\n//# sourceMappingURL=MergeEntity.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/MergeEntity.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.MergeEntity = void 0;\nconst index_client_1 = __webpack_require__(/*! ../../../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\n/**\n * @param API one of the storage table interfaces.\n * @param DE the corresponding entity class\n */\nclass MergeEntity {\n constructor(stateArray, find, \n /** id map for primary id of API and DE object. */\n esm) {\n this.stateArray = stateArray;\n this.find = find;\n this.esm = esm;\n this.idMap = esm.idMap;\n }\n updateSyncMap(map, inId, outId) {\n const i = (0, index_client_1.verifyId)(inId);\n const o = (0, index_client_1.verifyId)(outId);\n if (map[i] === undefined) {\n map[i] = o;\n }\n else if (map[i] !== o)\n throw new index_client_1.sdk.WERR_INTERNAL(`updateSyncMap map[${inId}] can't override ${map[i]} with ${o}`);\n }\n /**\n * @param since date of current sync chunk\n */\n async merge(since, storage, userId, syncMap, trx) {\n let inserts = 0, updates = 0;\n if (!this.stateArray)\n return { inserts, updates };\n for (const ei of this.stateArray) {\n this.esm.maxUpdated_at = (0, index_client_1.maxDate)(this.esm.maxUpdated_at, ei.updated_at);\n /**\n * TODO:\n * Switch to using syncMap. If the ei id is in the map its an existing merge, else its a new merge.\n */\n try {\n const { found, eo, eiId } = await this.find(storage, userId, ei, syncMap, trx);\n if (found) {\n if (await eo.mergeExisting(storage, since, ei, syncMap, trx)) {\n updates++;\n }\n }\n else {\n await eo.mergeNew(storage, userId, syncMap, trx);\n inserts++;\n }\n if (eiId > -1)\n this.updateSyncMap(this.idMap, eiId, eo.id);\n }\n catch (eu) {\n throw eu;\n }\n }\n return { inserts, updates };\n }\n}\nexports.MergeEntity = MergeEntity;\n//# sourceMappingURL=MergeEntity.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/storage/schema/entities/MergeEntity.js?\n}"); /***/ }), @@ -4151,17 +4041,6 @@ /***/ }), -/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ReaderUint8Array.js": -/*!*************************************************************************************!*\ - !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ReaderUint8Array.js ***! - \*************************************************************************************/ -/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - -"use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ReaderUint8Array = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ./utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nclass ReaderUint8Array {\n static makeReader(bin, pos = 0) {\n if (bin instanceof Uint8Array) {\n return new ReaderUint8Array(bin, pos);\n }\n if (Array.isArray(bin)) {\n return new sdk_1.Utils.Reader(bin, pos);\n }\n throw new Error('ReaderUint8Array.makeReader: bin must be Uint8Array or number[]');\n }\n constructor(bin = new Uint8Array(0), pos = 0) {\n this.bin = bin;\n this.pos = pos;\n this.length = bin.length;\n }\n eof() {\n return this.pos >= this.length;\n }\n read(len = this.length) {\n const start = this.pos;\n const end = this.pos + len;\n this.pos = end;\n return this.bin.slice(start, end);\n }\n readReverse(len = this.length) {\n const buf2 = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n buf2[i] = this.bin[this.pos + len - 1 - i];\n }\n this.pos += len;\n return buf2;\n }\n readUInt8() {\n const val = this.bin[this.pos];\n this.pos += 1;\n return val;\n }\n readInt8() {\n const val = this.bin[this.pos];\n this.pos += 1;\n // If the sign bit is set, convert to negative value\n return (val & 0x80) !== 0 ? val - 0x100 : val;\n }\n readUInt16BE() {\n const val = (this.bin[this.pos] << 8) | this.bin[this.pos + 1];\n this.pos += 2;\n return val;\n }\n readInt16BE() {\n const val = this.readUInt16BE();\n // If the sign bit is set, convert to negative value\n return (val & 0x8000) !== 0 ? val - 0x10000 : val;\n }\n readUInt16LE() {\n const val = this.bin[this.pos] | (this.bin[this.pos + 1] << 8);\n this.pos += 2;\n return val;\n }\n readInt16LE() {\n const val = this.readUInt16LE();\n // If the sign bit is set, convert to negative value\n const x = (val & 0x8000) !== 0 ? val - 0x10000 : val;\n return x;\n }\n readUInt32BE() {\n const val = this.bin[this.pos] * 0x1000000 + // Shift the first byte by 24 bits\n ((this.bin[this.pos + 1] << 16) | // Shift the second byte by 16 bits\n (this.bin[this.pos + 2] << 8) | // Shift the third byte by 8 bits\n this.bin[this.pos + 3]); // The fourth byte\n this.pos += 4;\n return val;\n }\n readInt32BE() {\n const val = this.readUInt32BE();\n // If the sign bit is set, convert to negative value\n return (val & 0x80000000) !== 0 ? val - 0x100000000 : val;\n }\n readUInt32LE() {\n const val = (this.bin[this.pos] |\n (this.bin[this.pos + 1] << 8) |\n (this.bin[this.pos + 2] << 16) |\n (this.bin[this.pos + 3] << 24)) >>>\n 0;\n this.pos += 4;\n return val;\n }\n readInt32LE() {\n const val = this.readUInt32LE();\n // Explicitly check if the sign bit is set and then convert to a negative value\n return (val & 0x80000000) !== 0 ? val - 0x100000000 : val;\n }\n readUInt64BEBn() {\n const bin = (0, utilityHelpers_noBuffer_1.asArray)(this.bin.slice(this.pos, this.pos + 8));\n const bn = new sdk_1.BigNumber(bin);\n this.pos = this.pos + 8;\n return bn;\n }\n readUInt64LEBn() {\n const bin = (0, utilityHelpers_noBuffer_1.asArray)(this.readReverse(8));\n const bn = new sdk_1.BigNumber(bin);\n return bn;\n }\n readInt64LEBn() {\n const OverflowInt64 = new sdk_1.BigNumber(2).pow(new sdk_1.BigNumber(63));\n const OverflowUint64 = new sdk_1.BigNumber(2).pow(new sdk_1.BigNumber(64));\n const bin = (0, utilityHelpers_noBuffer_1.asArray)(this.readReverse(8));\n let bn = new sdk_1.BigNumber(bin);\n if (bn.gte(OverflowInt64)) {\n bn = bn.sub(OverflowUint64); // Adjust for negative numbers\n }\n return bn;\n }\n readVarIntNum(signed = true) {\n const first = this.readUInt8();\n let bn;\n switch (first) {\n case 0xfd:\n return this.readUInt16LE();\n case 0xfe:\n return this.readUInt32LE();\n case 0xff:\n bn = signed ? this.readInt64LEBn() : this.readUInt64LEBn();\n if (bn.lte(new sdk_1.BigNumber(2).pow(new sdk_1.BigNumber(53)))) {\n return bn.toNumber();\n }\n else {\n throw new Error('number too large to retain precision - use readVarIntBn');\n }\n default:\n return first;\n }\n }\n readVarInt() {\n const first = this.bin[this.pos];\n switch (first) {\n case 0xfd:\n return this.read(1 + 2);\n case 0xfe:\n return this.read(1 + 4);\n case 0xff:\n return this.read(1 + 8);\n default:\n return this.read(1);\n }\n }\n readVarIntBn() {\n const first = this.readUInt8();\n switch (first) {\n case 0xfd:\n return new sdk_1.BigNumber(this.readUInt16LE());\n case 0xfe:\n return new sdk_1.BigNumber(this.readUInt32LE());\n case 0xff:\n return this.readUInt64LEBn();\n default:\n return new sdk_1.BigNumber(first);\n }\n }\n}\nexports.ReaderUint8Array = ReaderUint8Array;\n//# sourceMappingURL=ReaderUint8Array.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ReaderUint8Array.js?\n}"); - -/***/ }), - /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js": /*!****************************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js ***! @@ -4169,7 +4048,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ScriptTemplateBRC29 = exports.brc29ProtocolID = void 0;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_1 = __webpack_require__(/*! ./utilityHelpers */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js\");\nexports.brc29ProtocolID = [2, '3241645161d8'];\n/**\n * Simple Authenticated BSV P2PKH Payment Protocol\n * https://brc.dev/29\n */\nclass ScriptTemplateBRC29 {\n constructor(params) {\n this.params = params;\n /**\n * P2PKH unlock estimateLength is a constant\n */\n this.unlockLength = 108;\n this.p2pkh = new sdk_1.P2PKH();\n (0, utilityHelpers_1.verifyTruthy)(params.derivationPrefix);\n (0, utilityHelpers_1.verifyTruthy)(params.derivationSuffix);\n }\n getKeyID() {\n return `${this.params.derivationPrefix} ${this.params.derivationSuffix}`;\n }\n getKeyDeriver(privKey) {\n if (typeof privKey === 'string')\n privKey = sdk_1.PrivateKey.fromHex(privKey);\n if (!this.params.keyDeriver || this.params.keyDeriver.rootKey.toHex() !== privKey.toHex())\n return new sdk_1.CachedKeyDeriver(privKey);\n return this.params.keyDeriver;\n }\n lock(lockerPrivKey, unlockerPubKey) {\n const address = this.getKeyDeriver(lockerPrivKey)\n .derivePublicKey(exports.brc29ProtocolID, this.getKeyID(), unlockerPubKey, false)\n .toAddress();\n const r = this.p2pkh.lock(address);\n return r;\n }\n unlock(unlockerPrivKey, lockerPubKey, sourceSatoshis, lockingScript) {\n const derivedPrivateKey = this.getKeyDeriver(unlockerPrivKey)\n .derivePrivateKey(exports.brc29ProtocolID, this.getKeyID(), lockerPubKey)\n .toHex();\n const r = this.p2pkh.unlock((0, utilityHelpers_1.asBsvSdkPrivateKey)(derivedPrivateKey), 'all', false, sourceSatoshis, lockingScript);\n return r;\n }\n}\nexports.ScriptTemplateBRC29 = ScriptTemplateBRC29;\n//# sourceMappingURL=ScriptTemplateBRC29.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.ScriptTemplateBRC29 = exports.brc29ProtocolID = void 0;\nconst index_client_1 = __webpack_require__(/*! ./index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/index.client.js\");\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nexports.brc29ProtocolID = [2, '3241645161d8'];\n/**\n * Simple Authenticated BSV P2PKH Payment Protocol\n * https://brc.dev/29\n */\nclass ScriptTemplateBRC29 {\n constructor(params) {\n this.params = params;\n /**\n * P2PKH unlock estimateLength is a constant\n */\n this.unlockLength = 108;\n this.p2pkh = new sdk_1.P2PKH();\n (0, index_client_1.verifyTruthy)(params.derivationPrefix);\n (0, index_client_1.verifyTruthy)(params.derivationSuffix);\n }\n getKeyID() {\n return `${this.params.derivationPrefix} ${this.params.derivationSuffix}`;\n }\n getKeyDeriver(privKey) {\n if (typeof privKey === 'string')\n privKey = sdk_1.PrivateKey.fromHex(privKey);\n if (!this.params.keyDeriver || this.params.keyDeriver.rootKey.toHex() !== privKey.toHex())\n return new sdk_1.CachedKeyDeriver(privKey);\n return this.params.keyDeriver;\n }\n lock(lockerPrivKey, unlockerPubKey) {\n const address = this.getKeyDeriver(lockerPrivKey)\n .derivePublicKey(exports.brc29ProtocolID, this.getKeyID(), unlockerPubKey, false)\n .toAddress();\n const r = this.p2pkh.lock(address);\n return r;\n }\n unlock(unlockerPrivKey, lockerPubKey, sourceSatoshis, lockingScript) {\n const derivedPrivateKey = this.getKeyDeriver(unlockerPrivKey)\n .derivePrivateKey(exports.brc29ProtocolID, this.getKeyID(), lockerPubKey)\n .toHex();\n const r = this.p2pkh.unlock((0, index_client_1.asBsvSdkPrivateKey)(derivedPrivateKey), 'all', false, sourceSatoshis, lockingScript);\n return r;\n }\n}\nexports.ScriptTemplateBRC29 = ScriptTemplateBRC29;\n//# sourceMappingURL=ScriptTemplateBRC29.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/ScriptTemplateBRC29.js?\n}"); /***/ }), @@ -4239,6 +4118,17 @@ /***/ }), +/***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.buffer.js": +/*!******************************************************************************************!*\ + !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.buffer.js ***! + \******************************************************************************************/ +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.asBuffer = asBuffer;\nexports.asString = asString;\nexports.asArray = asArray;\n/**\n * Coerce a value to Buffer if currently encoded as a string or\n * @param val Buffer or string or number[]. If string, encoding param applies. If number[], Buffer.from constructor is used.\n * @param encoding defaults to 'hex'. Only applies to val of type string\n * @returns input val if it is a Buffer or new Buffer from string val\n * @publicbody\n */\nfunction asBuffer(val, encoding) {\n let b;\n if (Buffer.isBuffer(val))\n b = val;\n else if (typeof val === 'string')\n b = Buffer.from(val, encoding !== null && encoding !== void 0 ? encoding : 'hex');\n else\n b = Buffer.from(val);\n return b;\n}\n/**\n * Coerce a value to an encoded string if currently a Buffer or number[]\n * @param val Buffer or string or number[]. If string, encoding param applies. If number[], Buffer.from constructor is used.\n * @param encoding defaults to 'hex'\n * @returns input val if it is a string; or if number[], first converted to Buffer then as Buffer; if Buffer encoded using `encoding`\n * @publicbody\n */\nfunction asString(val, encoding) {\n if (Array.isArray(val))\n val = Buffer.from(val);\n return Buffer.isBuffer(val) ? val.toString(encoding !== null && encoding !== void 0 ? encoding : 'hex') : val;\n}\nfunction asArray(val, encoding) {\n let a;\n if (Array.isArray(val))\n a = val;\n else if (Buffer.isBuffer(val))\n a = Array.from(val);\n else\n a = Array.from(Buffer.from(val, encoding || 'hex'));\n return a;\n}\n//# sourceMappingURL=utilityHelpers.buffer.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.buffer.js?\n}"); + +/***/ }), + /***/ "./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js": /*!***********************************************************************************!*\ !*** ./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js ***! @@ -4246,7 +4136,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.logger = void 0;\nexports.getIdentityKey = getIdentityKey;\nexports.toWalletNetwork = toWalletNetwork;\nexports.makeAtomicBeef = makeAtomicBeef;\nexports.asBsvSdkTx = asBsvSdkTx;\nexports.asBsvSdkScript = asBsvSdkScript;\nexports.asBsvSdkPrivateKey = asBsvSdkPrivateKey;\nexports.asBsvSdkPublickKey = asBsvSdkPublickKey;\nexports.verifyTruthy = verifyTruthy;\nexports.verifyHexString = verifyHexString;\nexports.verifyOptionalHexString = verifyOptionalHexString;\nexports.verifyNumber = verifyNumber;\nexports.verifyInteger = verifyInteger;\nexports.verifyId = verifyId;\nexports.verifyOneOrNone = verifyOneOrNone;\nexports.verifyOne = verifyOne;\nexports.wait = wait;\nexports.randomBytes = randomBytes;\nexports.randomBytesHex = randomBytesHex;\nexports.randomBytesBase64 = randomBytesBase64;\nexports.validateSecondsSinceEpoch = validateSecondsSinceEpoch;\nexports.arraysEqual = arraysEqual;\nexports.optionalArraysEqual = optionalArraysEqual;\nexports.maxDate = maxDate;\nexports.sha256Hash = sha256Hash;\nexports.doubleSha256LE = doubleSha256LE;\nexports.doubleSha256BE = doubleSha256BE;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst utilityHelpers_noBuffer_1 = __webpack_require__(/*! ./utilityHelpers.noBuffer */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js\");\nconst WERR_errors_1 = __webpack_require__(/*! ../sdk/WERR_errors */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/sdk/WERR_errors.js\");\nasync function getIdentityKey(wallet) {\n return (await wallet.getPublicKey({ identityKey: true })).publicKey;\n}\nfunction toWalletNetwork(chain) {\n return chain === 'main' ? 'mainnet' : 'testnet';\n}\nfunction makeAtomicBeef(tx, beef) {\n if (Array.isArray(beef))\n beef = sdk_1.Beef.fromBinary(beef);\n beef.mergeTransaction(tx);\n return beef.toBinaryAtomic(tx.id('hex'));\n}\n/**\n * Coerce a bsv transaction encoded as a hex string, serialized array, or Transaction to Transaction\n * If tx is already a Transaction, just return it.\n * @publicbody\n */\nfunction asBsvSdkTx(tx) {\n if (Array.isArray(tx)) {\n tx = sdk_1.Transaction.fromBinary(tx);\n }\n else if (typeof tx === 'string') {\n tx = sdk_1.Transaction.fromHex(tx);\n }\n return tx;\n}\n/**\n * Coerce a bsv script encoded as a hex string, serialized array, or Script to Script\n * If script is already a Script, just return it.\n * @publicbody\n */\nfunction asBsvSdkScript(script) {\n if (Array.isArray(script)) {\n script = sdk_1.Script.fromBinary(script);\n }\n else if (typeof script === 'string') {\n script = sdk_1.Script.fromHex(script);\n }\n return script;\n}\n/**\n * @param privKey bitcoin private key in 32 byte hex string form\n * @returns @bsv/sdk PrivateKey\n */\nfunction asBsvSdkPrivateKey(privKey) {\n return sdk_1.PrivateKey.fromString(privKey, 'hex');\n}\n/**\n * @param pubKey bitcoin public key in standard compressed key hex string form\n * @returns @bsv/sdk PublicKey\n */\nfunction asBsvSdkPublickKey(pubKey) {\n return sdk_1.PublicKey.fromString(pubKey);\n}\n/**\n * Helper function.\n *\n * Verifies that a possibly optional value has a value.\n */\nfunction verifyTruthy(v, description) {\n if (!v)\n throw new WERR_errors_1.WERR_INTERNAL(description !== null && description !== void 0 ? description : 'A truthy value is required.');\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that a hex string is trimmed and lower case.\n */\nfunction verifyHexString(v) {\n if (typeof v !== 'string')\n throw new WERR_errors_1.WERR_INTERNAL('A string is required.');\n v = v.trim().toLowerCase();\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that an optional or null hex string is undefined or a trimmed lowercase string.\n */\nfunction verifyOptionalHexString(v) {\n if (!v)\n return undefined;\n return verifyHexString(v);\n}\n/**\n * Helper function.\n *\n * Verifies that an optional or null number has a numeric value.\n */\nfunction verifyNumber(v) {\n if (typeof v !== 'number')\n throw new WERR_errors_1.WERR_INTERNAL('A number is required.');\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that an optional or null number has a numeric value.\n */\nfunction verifyInteger(v) {\n if (typeof v !== 'number' || !Number.isInteger(v))\n throw new WERR_errors_1.WERR_INTERNAL('An integer is required.');\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that a database record identifier is an integer greater than zero.\n */\nfunction verifyId(id) {\n id = verifyInteger(id);\n if (id < 1)\n throw new WERR_errors_1.WERR_INTERNAL(`id must be valid integer greater than zero.`);\n return id;\n}\n/**\n * Helper function.\n *\n * @throws WERR_BAD_REQUEST if results has length greater than one.\n *\n * @returns results[0] or undefined if length is zero.\n */\nfunction verifyOneOrNone(results) {\n if (results.length > 1)\n throw new WERR_errors_1.WERR_BAD_REQUEST('Result must be unique.');\n return results[0];\n}\n/**\n * Helper function.\n *\n * @throws WERR_BAD_REQUEST if results has length other than one.\n *\n * @returns results[0].\n */\nfunction verifyOne(results, errorDescrition) {\n if (results.length !== 1)\n throw new WERR_errors_1.WERR_BAD_REQUEST(errorDescrition !== null && errorDescrition !== void 0 ? errorDescrition : 'Result must exist and be unique.');\n return results[0];\n}\n/**\n * Returns an await'able Promise that resolves in the given number of msecs.\n * @param msecs number of milliseconds to wait before resolving the promise.\n * Must be greater than zero and less than 2 minutes (120,000 msecs)\n * @publicbody\n */\nfunction wait(msecs) {\n const MIN_WAIT = 0;\n const MAX_WAIT = 2 * 60 * 1000; // maximum allowed wait in ms (2 minutes)\n if (typeof msecs !== 'number' || !Number.isFinite(msecs) || isNaN(msecs) || msecs < MIN_WAIT || msecs > MAX_WAIT) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('msecs', `a number between ${MIN_WAIT} and ${MAX_WAIT} msecs, not ${msecs}.`);\n }\n return new Promise(resolve => setTimeout(resolve, msecs));\n}\n/**\n * @returns count cryptographically secure random bytes as array of bytes\n */\nfunction randomBytes(count) {\n return (0, sdk_1.Random)(count);\n}\n/**\n * @returns count cryptographically secure random bytes as hex encoded string\n */\nfunction randomBytesHex(count) {\n return sdk_1.Utils.toHex((0, sdk_1.Random)(count));\n}\n/**\n * @returns count cryptographically secure random bytes as base64 encoded string\n */\nfunction randomBytesBase64(count) {\n return sdk_1.Utils.toBase64((0, sdk_1.Random)(count));\n}\nfunction validateSecondsSinceEpoch(time) {\n const date = new Date(time * 1000);\n if (date.getTime() / 1000 !== time || time < 1600000000 || time > 100000000000) {\n throw new WERR_errors_1.WERR_INVALID_PARAMETER('time', `valid \"since epoch\" unix time`);\n }\n return date;\n}\n/**\n * Compares lengths and direct equality of values.\n * @param arr1\n * @param arr2\n * @returns\n */\nfunction arraysEqual(arr1, arr2) {\n if (arr1.length !== arr2.length)\n return false;\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i])\n return false;\n }\n return true;\n}\nfunction optionalArraysEqual(arr1, arr2) {\n if (!arr1 && !arr2)\n return true;\n if (!arr1 || !arr2)\n return false;\n return arraysEqual(arr1, arr2);\n}\nfunction maxDate(d1, d2) {\n if (d1 && d2) {\n if (d1 > d2)\n return d1;\n return d2;\n }\n if (d1)\n return d1;\n if (d2)\n return d2;\n return undefined;\n}\n/**\n * Calculate the SHA256 hash of an array of bytes\n * @returns sha256 hash of buffer contents.\n * @publicbody\n */\nfunction sha256Hash(data) {\n if (!Array.isArray(data)) {\n data = (0, utilityHelpers_noBuffer_1.asArray)(data);\n }\n const first = new sdk_1.Hash.SHA256().update(data).digest();\n return first;\n}\n/**\n * Calculate the SHA256 hash of the SHA256 hash of an array of bytes.\n * @param data an array of bytes\n * @returns double sha256 hash of data, byte 0 of hash first.\n * @publicbody\n */\nfunction doubleSha256LE(data) {\n if (!Array.isArray(data)) {\n data = (0, utilityHelpers_noBuffer_1.asArray)(data);\n }\n const first = new sdk_1.Hash.SHA256().update(data).digest();\n const second = new sdk_1.Hash.SHA256().update(first).digest();\n return second;\n}\n/**\n * Calculate the SHA256 hash of the SHA256 hash of an array of bytes.\n * @param data is an array of bytes.\n * @returns reversed (big-endian) double sha256 hash of data, byte 31 of hash first.\n * @publicbody\n */\nfunction doubleSha256BE(data) {\n return doubleSha256LE(data).reverse();\n}\n/**\n * Logging function to handle logging based on running in jest \"single test\" mode,\n *\n * @param {string} message - The main message to log.\n * @param {...any} optionalParams - Additional parameters to log (optional).\n */\nconst logger = (message, ...optionalParams) => {\n const isSingleTest = process.argv.some(arg => arg === '--testNamePattern' || arg === '-t');\n if (isSingleTest) {\n console.log(message, ...optionalParams);\n }\n};\nexports.logger = logger;\n//# sourceMappingURL=utilityHelpers.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.getIdentityKey = getIdentityKey;\nexports.toWalletNetwork = toWalletNetwork;\nexports.makeAtomicBeef = makeAtomicBeef;\nexports.asBsvSdkTx = asBsvSdkTx;\nexports.asBsvSdkScript = asBsvSdkScript;\nexports.asBsvSdkPrivateKey = asBsvSdkPrivateKey;\nexports.asBsvSdkPublickKey = asBsvSdkPublickKey;\nexports.verifyTruthy = verifyTruthy;\nexports.verifyHexString = verifyHexString;\nexports.verifyOptionalHexString = verifyOptionalHexString;\nexports.verifyNumber = verifyNumber;\nexports.verifyInteger = verifyInteger;\nexports.verifyId = verifyId;\nexports.verifyOneOrNone = verifyOneOrNone;\nexports.verifyOne = verifyOne;\nexports.wait = wait;\nexports.randomBytes = randomBytes;\nexports.randomBytesHex = randomBytesHex;\nexports.randomBytesBase64 = randomBytesBase64;\nexports.validateSecondsSinceEpoch = validateSecondsSinceEpoch;\nexports.arraysEqual = arraysEqual;\nexports.optionalArraysEqual = optionalArraysEqual;\nexports.maxDate = maxDate;\nexports.sha256Hash = sha256Hash;\nexports.doubleSha256LE = doubleSha256LE;\nexports.doubleSha256BE = doubleSha256BE;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\nconst index_client_1 = __webpack_require__(/*! ../index.client */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.client.js\");\nasync function getIdentityKey(wallet) {\n return (await wallet.getPublicKey({ identityKey: true })).publicKey;\n}\nfunction toWalletNetwork(chain) {\n return chain === 'main' ? 'mainnet' : 'testnet';\n}\nfunction makeAtomicBeef(tx, beef) {\n if (Array.isArray(beef))\n beef = sdk_1.Beef.fromBinary(beef);\n beef.mergeTransaction(tx);\n return beef.toBinaryAtomic(tx.id('hex'));\n}\n/**\n * Coerce a bsv transaction encoded as a hex string, serialized array, or Transaction to Transaction\n * If tx is already a Transaction, just return it.\n * @publicbody\n */\nfunction asBsvSdkTx(tx) {\n if (Array.isArray(tx)) {\n tx = sdk_1.Transaction.fromBinary(tx);\n }\n else if (typeof tx === 'string') {\n tx = sdk_1.Transaction.fromHex(tx);\n }\n return tx;\n}\n/**\n * Coerce a bsv script encoded as a hex string, serialized array, or Script to Script\n * If script is already a Script, just return it.\n * @publicbody\n */\nfunction asBsvSdkScript(script) {\n if (Array.isArray(script)) {\n script = sdk_1.Script.fromBinary(script);\n }\n else if (typeof script === 'string') {\n script = sdk_1.Script.fromHex(script);\n }\n return script;\n}\n/**\n * @param privKey bitcoin private key in 32 byte hex string form\n * @returns @bsv/sdk PrivateKey\n */\nfunction asBsvSdkPrivateKey(privKey) {\n return sdk_1.PrivateKey.fromString(privKey, 'hex');\n}\n/**\n * @param pubKey bitcoin public key in standard compressed key hex string form\n * @returns @bsv/sdk PublicKey\n */\nfunction asBsvSdkPublickKey(pubKey) {\n return sdk_1.PublicKey.fromString(pubKey);\n}\n/**\n * Helper function.\n *\n * Verifies that a possibly optional value has a value.\n */\nfunction verifyTruthy(v, description) {\n if (!v)\n throw new index_client_1.sdk.WERR_INTERNAL(description !== null && description !== void 0 ? description : 'A truthy value is required.');\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that a hex string is trimmed and lower case.\n */\nfunction verifyHexString(v) {\n if (typeof v !== 'string')\n throw new index_client_1.sdk.WERR_INTERNAL('A string is required.');\n v = v.trim().toLowerCase();\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that an optional or null hex string is undefined or a trimmed lowercase string.\n */\nfunction verifyOptionalHexString(v) {\n if (!v)\n return undefined;\n return verifyHexString(v);\n}\n/**\n * Helper function.\n *\n * Verifies that an optional or null number has a numeric value.\n */\nfunction verifyNumber(v) {\n if (typeof v !== 'number')\n throw new index_client_1.sdk.WERR_INTERNAL('A number is required.');\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that an optional or null number has a numeric value.\n */\nfunction verifyInteger(v) {\n if (typeof v !== 'number' || !Number.isInteger(v))\n throw new index_client_1.sdk.WERR_INTERNAL('An integer is required.');\n return v;\n}\n/**\n * Helper function.\n *\n * Verifies that a database record identifier is an integer greater than zero.\n */\nfunction verifyId(id) {\n id = verifyInteger(id);\n if (id < 1)\n throw new index_client_1.sdk.WERR_INTERNAL(`id must be valid integer greater than zero.`);\n return id;\n}\n/**\n * Helper function.\n *\n * @throws WERR_BAD_REQUEST if results has length greater than one.\n *\n * @returns results[0] or undefined if length is zero.\n */\nfunction verifyOneOrNone(results) {\n if (results.length > 1)\n throw new index_client_1.sdk.WERR_BAD_REQUEST('Result must be unique.');\n return results[0];\n}\n/**\n * Helper function.\n *\n * @throws WERR_BAD_REQUEST if results has length other than one.\n *\n * @returns results[0].\n */\nfunction verifyOne(results, errorDescrition) {\n if (results.length !== 1)\n throw new index_client_1.sdk.WERR_BAD_REQUEST(errorDescrition !== null && errorDescrition !== void 0 ? errorDescrition : 'Result must exist and be unique.');\n return results[0];\n}\n/**\n * Returns an await'able Promise that resolves in the given number of msecs.\n * @publicbody\n */\nfunction wait(msecs) {\n return new Promise(resolve => setTimeout(resolve, msecs));\n}\n/**\n * @returns count cryptographically secure random bytes as array of bytes\n */\nfunction randomBytes(count) {\n return (0, sdk_1.Random)(count);\n}\n/**\n * @returns count cryptographically secure random bytes as hex encoded string\n */\nfunction randomBytesHex(count) {\n return sdk_1.Utils.toHex((0, sdk_1.Random)(count));\n}\n/**\n * @returns count cryptographically secure random bytes as base64 encoded string\n */\nfunction randomBytesBase64(count) {\n return sdk_1.Utils.toBase64((0, sdk_1.Random)(count));\n}\nfunction validateSecondsSinceEpoch(time) {\n const date = new Date(time * 1000);\n if (date.getTime() / 1000 !== time || time < 1600000000 || time > 100000000000) {\n throw new index_client_1.sdk.WERR_INVALID_PARAMETER('time', `valid \"since epoch\" unix time`);\n }\n return date;\n}\n/**\n * Compares lengths and direct equality of values.\n * @param arr1\n * @param arr2\n * @returns\n */\nfunction arraysEqual(arr1, arr2) {\n if (arr1.length !== arr2.length)\n return false;\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i])\n return false;\n }\n return true;\n}\nfunction optionalArraysEqual(arr1, arr2) {\n if (!arr1 && !arr2)\n return true;\n if (!arr1 || !arr2)\n return false;\n return arraysEqual(arr1, arr2);\n}\nfunction maxDate(d1, d2) {\n if (d1 && d2) {\n if (d1 > d2)\n return d1;\n return d2;\n }\n if (d1)\n return d1;\n if (d2)\n return d2;\n return undefined;\n}\n/**\n * Calculate the SHA256 hash of an array of bytes\n * @returns sha256 hash of buffer contents.\n * @publicbody\n */\nfunction sha256Hash(data) {\n const first = new sdk_1.Hash.SHA256().update(data).digest();\n return first;\n}\n/**\n * Calculate the SHA256 hash of the SHA256 hash of an array of bytes.\n * @param data an array of bytes\n * @returns double sha256 hash of data, byte 0 of hash first.\n * @publicbody\n */\nfunction doubleSha256LE(data) {\n const first = new sdk_1.Hash.SHA256().update(data).digest();\n const second = new sdk_1.Hash.SHA256().update(first).digest();\n return second;\n}\n/**\n * Calculate the SHA256 hash of the SHA256 hash of an array of bytes.\n * @param data is an array of bytes.\n * @returns reversed (big-endian) double sha256 hash of data, byte 31 of hash first.\n * @publicbody\n */\nfunction doubleSha256BE(data) {\n return doubleSha256LE(data).reverse();\n}\n//# sourceMappingURL=utilityHelpers.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.js?\n}"); /***/ }), @@ -4257,7 +4147,7 @@ /***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.asString = asString;\nexports.asArray = asArray;\nexports.asUint8Array = asUint8Array;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\n/**\n * Convert a value to an encoded string if currently an encoded string or number[] or Uint8Array.\n * @param val string or number[] or Uint8Array. If string, encoding must be hex. If number[], each value must be 0..255.\n * @param enc optional encoding type if val is string, defaults to 'hex'. Can be 'hex', 'utf8', or 'base64'.\n * @param returnEnc optional encoding type for returned string if different from `enc`, defaults to 'hex'. Can be 'hex', 'utf8', or 'base64'.\n * @returns hex encoded string representation of val.\n * @publicbody\n */\nfunction asString(val, enc, returnEnc) {\n enc || (enc = 'hex');\n returnEnc || (returnEnc = enc);\n if (typeof val === 'string') {\n if (enc === returnEnc)\n return val;\n val = asUint8Array(val, enc);\n }\n let v = Array.isArray(val) ? val : Array.from(val);\n switch (returnEnc) {\n case 'utf8':\n return sdk_1.Utils.toUTF8(v);\n case 'base64':\n return sdk_1.Utils.toBase64(v);\n }\n return sdk_1.Utils.toHex(v);\n}\n/**\n * Convert a value to number[] if currently an encoded string or number[] or Uint8Array.\n * @param val string or number[] or Uint8Array. If string, encoding must be hex. If number[], each value must be 0..255.\n * @param enc optional encoding type if val is string, defaults to 'hex'. Can be 'hex', 'utf8', or 'base64'.\n * @returns number[] array of byte values representation of val.\n * @publicbody\n */\nfunction asArray(val, enc) {\n if (Array.isArray(val))\n return val;\n if (typeof val !== 'string')\n return Array.from(val);\n enc || (enc = 'hex');\n let a = sdk_1.Utils.toArray(val, enc);\n return a;\n}\n/**\n * Convert a value to Uint8Array if currently an encoded string or number[] or Uint8Array.\n * @param val string or number[] or Uint8Array. If string, encoding must be hex. If number[], each value must be 0..255.\n * @param enc optional encoding type if val is string, defaults to 'hex'. Can be 'hex', 'utf8', or 'base64'.\n * @returns Uint8Array representation of val.\n * @publicbody\n */\nfunction asUint8Array(val, enc) {\n if (Array.isArray(val))\n return Uint8Array.from(val);\n if (typeof val !== 'string')\n return val;\n enc || (enc = 'hex');\n let a = sdk_1.Utils.toArray(val, enc);\n return Uint8Array.from(a);\n}\n//# sourceMappingURL=utilityHelpers.noBuffer.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js?\n}"); +eval("{\nObject.defineProperty(exports, \"__esModule\", ({ value: true }));\nexports.asString = asString;\nexports.asArray = asArray;\nconst sdk_1 = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/cjs/mod.js\");\n/**\n * Coerce a value to a hex encoded string if currently a hex encoded string or number[]\n * @param val string or number[]. If string, encoding must be hex. If number[], each value must be 0..255.\n * @returns input val if it is a string; or if number[], converts byte values to hex\n * @publicbody\n */\nfunction asString(val) {\n if (typeof val === 'string')\n return val;\n return sdk_1.Utils.toHex(val);\n}\nfunction asArray(val) {\n if (Array.isArray(val))\n return val;\n let a = sdk_1.Utils.toArray(val, 'hex');\n return a;\n}\n//# sourceMappingURL=utilityHelpers.noBuffer.js.map\n\n//# sourceURL=webpack://$/./node_modules/@bsv/wallet-toolbox-mobile/out/src/utility/utilityHelpers.noBuffer.js?\n}"); /***/ }), @@ -4312,7 +4202,7 @@ /***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { "use strict"; -eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ AuthenticationManager: () => (/* binding */ AuthenticationManager)\n/* harmony export */ });\n/* harmony import */ var _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/esm/mod.js\");\n/* harmony import */ var _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @bsv/wallet-toolbox-mobile */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js\");\n/* harmony import */ var _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var _config__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./config */ \"./src/config.ts\");\n/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./logger */ \"./src/logger.ts\");\nfunction _typeof(o) { \"@babel/helpers - typeof\"; return _typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && \"function\" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? \"symbol\" : typeof o; }, _typeof(o); }\nfunction _regenerator() { /*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/babel/babel/blob/main/packages/babel-helpers/LICENSE */ var e, t, r = \"function\" == typeof Symbol ? Symbol : {}, n = r.iterator || \"@@iterator\", o = r.toStringTag || \"@@toStringTag\"; function i(r, n, o, i) { var c = n && n.prototype instanceof Generator ? n : Generator, u = Object.create(c.prototype); return _regeneratorDefine2(u, \"_invoke\", function (r, n, o) { var i, c, u, f = 0, p = o || [], y = !1, G = { p: 0, n: 0, v: e, a: d, f: d.bind(e, 4), d: function d(t, r) { return i = t, c = 0, u = e, G.n = r, a; } }; function d(r, n) { for (c = r, u = n, t = 0; !y && f && !o && t < p.length; t++) { var o, i = p[t], d = G.p, l = i[2]; r > 3 ? (o = l === n) && (u = i[(c = i[4]) ? 5 : (c = 3, 3)], i[4] = i[5] = e) : i[0] <= d && ((o = r < 2 && d < i[1]) ? (c = 0, G.v = n, G.n = i[1]) : d < l && (o = r < 3 || i[0] > n || n > l) && (i[4] = r, i[5] = n, G.n = l, c = 0)); } if (o || r > 1) return a; throw y = !0, n; } return function (o, p, l) { if (f > 1) throw TypeError(\"Generator is already running\"); for (y && 1 === p && d(p, l), c = p, u = l; (t = c < 2 ? e : u) || !y;) { i || (c ? c < 3 ? (c > 1 && (G.n = -1), d(c, u)) : G.n = u : G.v = u); try { if (f = 2, i) { if (c || (o = \"next\"), t = i[o]) { if (!(t = t.call(i, u))) throw TypeError(\"iterator result is not an object\"); if (!t.done) return t; u = t.value, c < 2 && (c = 0); } else 1 === c && (t = i[\"return\"]) && t.call(i), c < 2 && (u = TypeError(\"The iterator does not provide a '\" + o + \"' method\"), c = 1); i = e; } else if ((t = (y = G.n < 0) ? u : r.call(n, G)) !== a) break; } catch (t) { i = e, c = 1, u = t; } finally { f = 1; } } return { value: t, done: y }; }; }(r, o, i), !0), u; } var a = {}; function Generator() {} function GeneratorFunction() {} function GeneratorFunctionPrototype() {} t = Object.getPrototypeOf; var c = [][n] ? t(t([][n]())) : (_regeneratorDefine2(t = {}, n, function () { return this; }), t), u = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(c); function f(e) { return Object.setPrototypeOf ? Object.setPrototypeOf(e, GeneratorFunctionPrototype) : (e.__proto__ = GeneratorFunctionPrototype, _regeneratorDefine2(e, o, \"GeneratorFunction\")), e.prototype = Object.create(u), e; } return GeneratorFunction.prototype = GeneratorFunctionPrototype, _regeneratorDefine2(u, \"constructor\", GeneratorFunctionPrototype), _regeneratorDefine2(GeneratorFunctionPrototype, \"constructor\", GeneratorFunction), GeneratorFunction.displayName = \"GeneratorFunction\", _regeneratorDefine2(GeneratorFunctionPrototype, o, \"GeneratorFunction\"), _regeneratorDefine2(u), _regeneratorDefine2(u, o, \"Generator\"), _regeneratorDefine2(u, n, function () { return this; }), _regeneratorDefine2(u, \"toString\", function () { return \"[object Generator]\"; }), (_regenerator = function _regenerator() { return { w: i, m: f }; })(); }\nfunction _regeneratorDefine2(e, r, n, t) { var i = Object.defineProperty; try { i({}, \"\", {}); } catch (e) { i = 0; } _regeneratorDefine2 = function _regeneratorDefine(e, r, n, t) { function o(r, n) { _regeneratorDefine2(e, r, function (e) { return this._invoke(r, n, e); }); } r ? i ? i(e, r, { value: n, enumerable: !t, configurable: !t, writable: !t }) : e[r] = n : (o(\"next\", 0), o(\"throw\", 1), o(\"return\", 2)); }, _regeneratorDefine2(e, r, n, t); }\nfunction asyncGeneratorStep(n, t, e, r, o, a, c) { try { var i = n[a](c), u = i.value; } catch (n) { return void e(n); } i.done ? t(u) : Promise.resolve(u).then(r, o); }\nfunction _asyncToGenerator(n) { return function () { var t = this, e = arguments; return new Promise(function (r, o) { var a = n.apply(t, e); function _next(n) { asyncGeneratorStep(a, r, o, _next, _throw, \"next\", n); } function _throw(n) { asyncGeneratorStep(a, r, o, _next, _throw, \"throw\", n); } _next(void 0); }); }; }\nfunction _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError(\"Cannot call a class as a function\"); }\nfunction _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, \"value\" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }\nfunction _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, \"prototype\", { writable: !1 }), e; }\nfunction _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }\nfunction _toPropertyKey(t) { var i = _toPrimitive(t, \"string\"); return \"symbol\" == _typeof(i) ? i : i + \"\"; }\nfunction _toPrimitive(t, r) { if (\"object\" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || \"default\"); if (\"object\" != _typeof(i)) return i; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (\"string\" === r ? String : Number)(t); }\n\n\n\n\nvar AuthenticationManager = /*#__PURE__*/function () {\n // Key promise resolver/rejecter\n\n // Password retriver handlers\n\n function AuthenticationManager(eventManager) {\n _classCallCheck(this, AuthenticationManager);\n // Authentication properties\n _defineProperty(this, \"adminOriginator\", _config__WEBPACK_IMPORTED_MODULE_2__.ADMIN_ORIGINATOR);\n _defineProperty(this, \"selectedStorageUrl\", _config__WEBPACK_IMPORTED_MODULE_2__.DEFAULT_STORAGE_URL);\n _defineProperty(this, \"selectedNetwork\", _config__WEBPACK_IMPORTED_MODULE_2__.DEFAULT_CHAIN);\n _defineProperty(this, \"selectedWabUrl\", _config__WEBPACK_IMPORTED_MODULE_2__.DEFAULT_WAB_URL);\n this.eventManager = eventManager;\n // Register authmanager listeners\n this.eventManager.listen('setAdminOriginator', this);\n this.eventManager.listen('setSelectedStorageUrl', this);\n this.eventManager.listen('setSelectedNetwork', this);\n this.eventManager.listen('setSelectedWabUrl', this);\n this.eventManager.listen('setRecoveryKeySaver', this);\n this.eventManager.listen('setPasswordRetriver', this);\n this.eventManager.listen('initialize', this);\n this.eventManager.listen('loadSnapshot', this);\n this.eventManager.listen('startAuth', this);\n this.eventManager.listen('restartAuth', this);\n }\n return _createClass(AuthenticationManager, [{\n key: \"setAdminOriginator\",\n value: function setAdminOriginator(origin) {\n this.adminOriginator = origin;\n this.checkAndReadyEvent();\n return this.adminOriginator;\n }\n }, {\n key: \"setSelectedStorageUrl\",\n value: function setSelectedStorageUrl(url) {\n this.selectedStorageUrl = url;\n this.checkAndReadyEvent();\n return this.selectedStorageUrl;\n }\n }, {\n key: \"setSelectedNetwork\",\n value: function setSelectedNetwork(network) {\n this.selectedNetwork = network;\n this.checkAndReadyEvent();\n return this.selectedNetwork;\n }\n }, {\n key: \"setSelectedWabUrl\",\n value: function setSelectedWabUrl(url) {\n this.selectedWabUrl = url;\n this.checkAndReadyEvent();\n return this.selectedWabUrl;\n }\n }, {\n key: \"setRecoveryKeySaver\",\n value: function setRecoveryKeySaver() {\n var _this = this;\n this.recoveryKeySaver = function (key) {\n return new Promise(function (resolve, reject) {\n var keyAsStr = _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.Utils.toBase64(key);\n\n // When main thread saves key, resolve\n _this.keySaverResolver = resolve;\n // When main thread reject key, reject\n _this.keySaverRejecter = reject;\n\n // TODO:Send key as string to main thread\n // setRecoveryKey(keyAsStr)\n // setOpen(true)\n });\n };\n this.checkAndReadyEvent();\n return true;\n }\n }, {\n key: \"setPasswordRetriver\",\n value: function setPasswordRetriver() {\n var _this2 = this;\n this.passwordRetriever = function (reason, testFn) {\n return new Promise(function (resolvePromise, rejectPromise) {\n // TODO: Send password_reason event to main thread\n // Actions to perform\n // setReason(reason)\n\n // When main thread test password, call this method\n _this2.passwordTestFn = testFn;\n // When main thread resolves password, call this resolve method\n _this2.passwordResolver = resolvePromise;\n // When main thread rejects password, call this reject method\n _this2.passwordRejecter = rejectPromise;\n\n // TODO: Send password_retriving event to main thread\n // Actions to perform\n // setOpen(true)\n // manageFocus()\n });\n };\n this.checkAndReadyEvent();\n return true;\n }\n }, {\n key: \"isReady\",\n value: function isReady() {\n // Check if all property are defined\n console.log(this.adminOriginator, this.selectedStorageUrl, this.selectedNetwork, this.setSelectedWabUrl, this.recoveryKeySaver, this.passwordRetriever);\n if (!this.adminOriginator || !this.selectedStorageUrl || !this.selectedNetwork || !this.setSelectedWabUrl || !this.recoveryKeySaver || !this.passwordRetriever) {\n return false;\n }\n return true;\n }\n }, {\n key: \"checkAndReadyEvent\",\n value: function checkAndReadyEvent() {\n if (this.isReady()) {\n this.eventManager.send('authenticationReady');\n }\n }\n }, {\n key: \"initialize\",\n value: function initialize() {\n var _this3 = this;\n if (this.isReady()) {\n // Create network service based on selected network\n var networkPreset = this.selectedNetwork === 'main' ? 'mainnet' : 'testnet';\n\n // Create a LookupResolver instance\n var resolver = new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.LookupResolver({\n networkPreset: networkPreset\n });\n\n // Create a broadcaster with proper network settings\n var broadcaster = new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.SHIPBroadcaster(['tm_users'], {\n networkPreset: networkPreset\n });\n\n // Create a WAB Client with proper URL\n var wabClient = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WABClient(this.selectedWabUrl);\n\n // Create a phone interactor\n var phoneInteractor = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.TwilioPhoneInteractor();\n this.authManager = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WalletAuthenticationManager(this.adminOriginator, function (primaryKey, privilegedKeyManager) {\n return _this3.buildWallet(primaryKey, privilegedKeyManager);\n }, new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.OverlayUMPTokenInteractor(resolver, broadcaster), this.recoveryKeySaver, this.passwordRetriever,\n // Type assertions needed due to interface mismatch between our WABClient and the expected SDK client\n wabClient, phoneInteractor);\n console.log(this.authManager);\n\n // Load snapshot\n return true;\n } else {\n _logger__WEBPACK_IMPORTED_MODULE_3__.Logger.log('Missing authentication properties');\n return false;\n }\n }\n }, {\n key: \"buildWallet\",\n value: function () {\n var _buildWallet = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee(primaryKey, privilegedKeyManager) {\n var chain, keyDeriver, storageManager, signer, services, client, _t;\n return _regenerator().w(function (_context) {\n while (1) switch (_context.p = _context.n) {\n case 0:\n _context.p = 0;\n // const newManagers = {} as any;\n chain = this.selectedNetwork;\n keyDeriver = new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.KeyDeriver(new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.PrivateKey(primaryKey));\n storageManager = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WalletStorageManager(keyDeriver.identityKey);\n signer = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WalletSigner(chain, keyDeriver, storageManager);\n services = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.Services(chain);\n this.wallet = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.Wallet(signer, services, undefined, privilegedKeyManager);\n\n // Use user-selected storage provider\n client = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.StorageClient(this.wallet, this.selectedStorageUrl);\n _context.n = 1;\n return client.makeAvailable();\n case 1:\n _context.n = 2;\n return storageManager.addWalletStorageProvider(client);\n case 2:\n _context.n = 4;\n break;\n case 3:\n _context.p = 3;\n _t = _context.v;\n _logger__WEBPACK_IMPORTED_MODULE_3__.Logger.log('Error building wallet:', _t);\n return _context.a(2, null);\n case 4:\n return _context.a(2);\n }\n }, _callee, this, [[0, 3]]);\n }));\n function buildWallet(_x, _x2) {\n return _buildWallet.apply(this, arguments);\n }\n return buildWallet;\n }()\n }, {\n key: \"loadSnapshot\",\n value: function () {\n var _loadSnapshot = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee2(snap) {\n var _t2;\n return _regenerator().w(function (_context2) {\n while (1) switch (_context2.p = _context2.n) {\n case 0:\n console.log('snap', snap, _typeof(snap));\n _context2.p = 1;\n _context2.n = 2;\n return this.authManager.loadSnapshot(snap);\n case 2:\n _context2.n = 3;\n return this.authManager.waitForAuthentication({});\n case 3:\n return _context2.a(2, true);\n case 4:\n _context2.p = 4;\n _t2 = _context2.v;\n _logger__WEBPACK_IMPORTED_MODULE_3__.Logger.log('Wallet snapshot load failed:', _t2);\n return _context2.a(2, _t2);\n }\n }, _callee2, this, [[1, 4]]);\n }));\n function loadSnapshot(_x3) {\n return _loadSnapshot.apply(this, arguments);\n }\n return loadSnapshot;\n }()\n }, {\n key: \"startAuth\",\n value: function () {\n var _startAuth = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee3(phoneNumber) {\n var _t3;\n return _regenerator().w(function (_context3) {\n while (1) switch (_context3.p = _context3.n) {\n case 0:\n _context3.p = 0;\n _context3.n = 1;\n return this.authManager.startAuth({\n phoneNumber: phoneNumber\n });\n case 1:\n return _context3.a(2, _context3.v);\n case 2:\n _context3.p = 2;\n _t3 = _context3.v;\n return _context3.a(2, false);\n }\n }, _callee3, this, [[0, 2]]);\n }));\n function startAuth(_x4) {\n return _startAuth.apply(this, arguments);\n }\n return startAuth;\n }()\n }, {\n key: \"restartAuth\",\n value: function () {\n var _restartAuth = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee4(phoneNumber) {\n return _regenerator().w(function (_context4) {\n while (1) switch (_context4.n) {\n case 0:\n return _context4.a(2, this.startAuth(phoneNumber));\n }\n }, _callee4, this);\n }));\n function restartAuth(_x5) {\n return _restartAuth.apply(this, arguments);\n }\n return restartAuth;\n }()\n }, {\n key: \"completeAuth\",\n value: function () {\n var _completeAuth = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee5(_ref) {\n var phoneNumber, otp, _t4;\n return _regenerator().w(function (_context5) {\n while (1) switch (_context5.p = _context5.n) {\n case 0:\n phoneNumber = _ref.phoneNumber, otp = _ref.otp;\n _context5.p = 1;\n _context5.n = 2;\n return this.authManager.completeAuth({\n phoneNumber: phoneNumber,\n otp: otp\n });\n case 2:\n return _context5.a(2, _context5.v);\n case 3:\n _context5.p = 3;\n _t4 = _context5.v;\n return _context5.a(2, false);\n }\n }, _callee5, this, [[1, 3]]);\n }));\n function completeAuth(_x6) {\n return _completeAuth.apply(this, arguments);\n }\n return completeAuth;\n }()\n }]);\n}();\n\n//# sourceURL=webpack://$/./src/authenticationManager.ts?\n}"); +eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ AuthenticationManager: () => (/* binding */ AuthenticationManager)\n/* harmony export */ });\n/* harmony import */ var _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! @bsv/sdk */ \"./node_modules/@bsv/sdk/dist/esm/mod.js\");\n/* harmony import */ var _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! @bsv/wallet-toolbox-mobile */ \"./node_modules/@bsv/wallet-toolbox-mobile/out/src/index.mobile.js\");\n/* harmony import */ var _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__);\n/* harmony import */ var _config__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./config */ \"./src/config.ts\");\n/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./logger */ \"./src/logger.ts\");\nfunction _typeof(o) { \"@babel/helpers - typeof\"; return _typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && \"function\" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? \"symbol\" : typeof o; }, _typeof(o); }\nfunction _regenerator() { /*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/babel/babel/blob/main/packages/babel-helpers/LICENSE */ var e, t, r = \"function\" == typeof Symbol ? Symbol : {}, n = r.iterator || \"@@iterator\", o = r.toStringTag || \"@@toStringTag\"; function i(r, n, o, i) { var c = n && n.prototype instanceof Generator ? n : Generator, u = Object.create(c.prototype); return _regeneratorDefine2(u, \"_invoke\", function (r, n, o) { var i, c, u, f = 0, p = o || [], y = !1, G = { p: 0, n: 0, v: e, a: d, f: d.bind(e, 4), d: function d(t, r) { return i = t, c = 0, u = e, G.n = r, a; } }; function d(r, n) { for (c = r, u = n, t = 0; !y && f && !o && t < p.length; t++) { var o, i = p[t], d = G.p, l = i[2]; r > 3 ? (o = l === n) && (u = i[(c = i[4]) ? 5 : (c = 3, 3)], i[4] = i[5] = e) : i[0] <= d && ((o = r < 2 && d < i[1]) ? (c = 0, G.v = n, G.n = i[1]) : d < l && (o = r < 3 || i[0] > n || n > l) && (i[4] = r, i[5] = n, G.n = l, c = 0)); } if (o || r > 1) return a; throw y = !0, n; } return function (o, p, l) { if (f > 1) throw TypeError(\"Generator is already running\"); for (y && 1 === p && d(p, l), c = p, u = l; (t = c < 2 ? e : u) || !y;) { i || (c ? c < 3 ? (c > 1 && (G.n = -1), d(c, u)) : G.n = u : G.v = u); try { if (f = 2, i) { if (c || (o = \"next\"), t = i[o]) { if (!(t = t.call(i, u))) throw TypeError(\"iterator result is not an object\"); if (!t.done) return t; u = t.value, c < 2 && (c = 0); } else 1 === c && (t = i[\"return\"]) && t.call(i), c < 2 && (u = TypeError(\"The iterator does not provide a '\" + o + \"' method\"), c = 1); i = e; } else if ((t = (y = G.n < 0) ? u : r.call(n, G)) !== a) break; } catch (t) { i = e, c = 1, u = t; } finally { f = 1; } } return { value: t, done: y }; }; }(r, o, i), !0), u; } var a = {}; function Generator() {} function GeneratorFunction() {} function GeneratorFunctionPrototype() {} t = Object.getPrototypeOf; var c = [][n] ? t(t([][n]())) : (_regeneratorDefine2(t = {}, n, function () { return this; }), t), u = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(c); function f(e) { return Object.setPrototypeOf ? Object.setPrototypeOf(e, GeneratorFunctionPrototype) : (e.__proto__ = GeneratorFunctionPrototype, _regeneratorDefine2(e, o, \"GeneratorFunction\")), e.prototype = Object.create(u), e; } return GeneratorFunction.prototype = GeneratorFunctionPrototype, _regeneratorDefine2(u, \"constructor\", GeneratorFunctionPrototype), _regeneratorDefine2(GeneratorFunctionPrototype, \"constructor\", GeneratorFunction), GeneratorFunction.displayName = \"GeneratorFunction\", _regeneratorDefine2(GeneratorFunctionPrototype, o, \"GeneratorFunction\"), _regeneratorDefine2(u), _regeneratorDefine2(u, o, \"Generator\"), _regeneratorDefine2(u, n, function () { return this; }), _regeneratorDefine2(u, \"toString\", function () { return \"[object Generator]\"; }), (_regenerator = function _regenerator() { return { w: i, m: f }; })(); }\nfunction _regeneratorDefine2(e, r, n, t) { var i = Object.defineProperty; try { i({}, \"\", {}); } catch (e) { i = 0; } _regeneratorDefine2 = function _regeneratorDefine(e, r, n, t) { function o(r, n) { _regeneratorDefine2(e, r, function (e) { return this._invoke(r, n, e); }); } r ? i ? i(e, r, { value: n, enumerable: !t, configurable: !t, writable: !t }) : e[r] = n : (o(\"next\", 0), o(\"throw\", 1), o(\"return\", 2)); }, _regeneratorDefine2(e, r, n, t); }\nfunction asyncGeneratorStep(n, t, e, r, o, a, c) { try { var i = n[a](c), u = i.value; } catch (n) { return void e(n); } i.done ? t(u) : Promise.resolve(u).then(r, o); }\nfunction _asyncToGenerator(n) { return function () { var t = this, e = arguments; return new Promise(function (r, o) { var a = n.apply(t, e); function _next(n) { asyncGeneratorStep(a, r, o, _next, _throw, \"next\", n); } function _throw(n) { asyncGeneratorStep(a, r, o, _next, _throw, \"throw\", n); } _next(void 0); }); }; }\nfunction _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError(\"Cannot call a class as a function\"); }\nfunction _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, \"value\" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }\nfunction _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, \"prototype\", { writable: !1 }), e; }\nfunction _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }\nfunction _toPropertyKey(t) { var i = _toPrimitive(t, \"string\"); return \"symbol\" == _typeof(i) ? i : i + \"\"; }\nfunction _toPrimitive(t, r) { if (\"object\" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || \"default\"); if (\"object\" != _typeof(i)) return i; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (\"string\" === r ? String : Number)(t); }\n\n\n\n\nvar AuthenticationManager = /*#__PURE__*/function () {\n // Key promise resolver/rejecter\n\n // Password retriver handlers\n\n function AuthenticationManager(eventManager) {\n _classCallCheck(this, AuthenticationManager);\n // Authentication properties\n _defineProperty(this, \"adminOriginator\", _config__WEBPACK_IMPORTED_MODULE_2__.ADMIN_ORIGINATOR);\n _defineProperty(this, \"selectedStorageUrl\", _config__WEBPACK_IMPORTED_MODULE_2__.DEFAULT_STORAGE_URL);\n _defineProperty(this, \"selectedNetwork\", _config__WEBPACK_IMPORTED_MODULE_2__.DEFAULT_CHAIN);\n _defineProperty(this, \"selectedWabUrl\", _config__WEBPACK_IMPORTED_MODULE_2__.DEFAULT_WAB_URL);\n this.eventManager = eventManager;\n // Register authmanager listeners\n this.eventManager.listen('setAdminOriginator', this);\n this.eventManager.listen('setSelectedStorageUrl', this);\n this.eventManager.listen('setSelectedNetwork', this);\n this.eventManager.listen('setSelectedWabUrl', this);\n this.eventManager.listen('setRecoveryKeySaver', this);\n this.eventManager.listen('setPasswordRetriver', this);\n this.eventManager.listen('initialize', this);\n this.eventManager.listen('loadSnapshot', this);\n this.eventManager.listen('startAuth', this);\n this.eventManager.listen('restartAuth', this);\n }\n return _createClass(AuthenticationManager, [{\n key: \"setAdminOriginator\",\n value: function setAdminOriginator(origin) {\n this.adminOriginator = origin;\n this.checkAndReadyEvent();\n return this.adminOriginator;\n }\n }, {\n key: \"setSelectedStorageUrl\",\n value: function setSelectedStorageUrl(url) {\n this.selectedStorageUrl = url;\n this.checkAndReadyEvent();\n return this.selectedStorageUrl;\n }\n }, {\n key: \"setSelectedNetwork\",\n value: function setSelectedNetwork(network) {\n this.selectedNetwork = network;\n this.checkAndReadyEvent();\n return this.selectedNetwork;\n }\n }, {\n key: \"setSelectedWabUrl\",\n value: function setSelectedWabUrl(url) {\n this.selectedWabUrl = url;\n this.checkAndReadyEvent();\n return this.selectedWabUrl;\n }\n }, {\n key: \"setRecoveryKeySaver\",\n value: function setRecoveryKeySaver() {\n var _this = this;\n this.recoveryKeySaver = function (key) {\n return new Promise(function (resolve, reject) {\n var keyAsStr = _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.Utils.toBase64(key);\n\n // When main thread saves key, resolve\n _this.keySaverResolver = resolve;\n // When main thread reject key, reject\n _this.keySaverRejecter = reject;\n\n // TODO:Send key as string to main thread\n // setRecoveryKey(keyAsStr)\n // setOpen(true)\n });\n };\n this.checkAndReadyEvent();\n return true;\n }\n }, {\n key: \"setPasswordRetriver\",\n value: function setPasswordRetriver() {\n var _this2 = this;\n this.passwordRetriever = function (reason, testFn) {\n return new Promise(function (resolvePromise, rejectPromise) {\n // TODO: Send password_reason event to main thread\n // Actions to perform\n // setReason(reason)\n\n // When main thread test password, call this method\n _this2.passwordTestFn = testFn;\n // When main thread resolves password, call this resolve method\n _this2.passwordResolver = resolvePromise;\n // When main thread rejects password, call this reject method\n _this2.passwordRejecter = rejectPromise;\n\n // TODO: Send password_retriving event to main thread\n // Actions to perform\n // setOpen(true)\n // manageFocus()\n });\n };\n this.checkAndReadyEvent();\n return true;\n }\n }, {\n key: \"isReady\",\n value: function isReady() {\n // Check if all property are defined\n console.log(this.adminOriginator, this.selectedStorageUrl, this.selectedNetwork, this.selectedWabUrl, this.recoveryKeySaver, this.passwordRetriever);\n if (!this.adminOriginator || !this.selectedStorageUrl || !this.selectedNetwork || !this.selectedWabUrl || !this.recoveryKeySaver || !this.passwordRetriever) {\n return false;\n }\n return true;\n }\n }, {\n key: \"checkAndReadyEvent\",\n value: function checkAndReadyEvent() {\n if (this.isReady()) {\n this.eventManager.send('authenticationReady');\n }\n }\n }, {\n key: \"initialize\",\n value: function initialize() {\n var _this3 = this;\n if (this.isReady()) {\n // Create network service based on selected network\n var networkPreset = this.selectedNetwork === 'main' ? 'mainnet' : 'testnet';\n\n // Create a LookupResolver instance\n var resolver = new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.LookupResolver({\n networkPreset: networkPreset\n });\n\n // Create a broadcaster with proper network settings\n var broadcaster = new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.SHIPBroadcaster(['tm_users'], {\n networkPreset: networkPreset\n });\n\n // Create a WAB Client with proper URL\n var wabClient = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WABClient(this.selectedWabUrl);\n\n // Create a phone interactor\n var phoneInteractor = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.TwilioPhoneInteractor();\n this.authManager = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WalletAuthenticationManager(this.adminOriginator, function (primaryKey, privilegedKeyManager) {\n return _this3.buildWallet(primaryKey, privilegedKeyManager);\n }, new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.OverlayUMPTokenInteractor(resolver, broadcaster), this.recoveryKeySaver, this.passwordRetriever,\n // Type assertions needed due to interface mismatch between our WABClient and the expected SDK client\n wabClient, phoneInteractor);\n console.log(this.authManager);\n\n // Load snapshot\n return true;\n } else {\n _logger__WEBPACK_IMPORTED_MODULE_3__.Logger.log('Missing authentication properties');\n return false;\n }\n }\n }, {\n key: \"buildWallet\",\n value: function () {\n var _buildWallet = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee(primaryKey, privilegedKeyManager) {\n var chain, keyDeriver, storageManager, signer, services, client, _t;\n return _regenerator().w(function (_context) {\n while (1) switch (_context.p = _context.n) {\n case 0:\n _context.p = 0;\n // const newManagers = {} as any;\n chain = this.selectedNetwork;\n keyDeriver = new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.KeyDeriver(new _bsv_sdk__WEBPACK_IMPORTED_MODULE_0__.PrivateKey(primaryKey));\n storageManager = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WalletStorageManager(keyDeriver.identityKey);\n signer = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.WalletSigner(chain, keyDeriver, storageManager);\n services = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.Services(chain);\n this.wallet = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.Wallet(signer, services, undefined, privilegedKeyManager);\n\n // Use user-selected storage provider\n client = new _bsv_wallet_toolbox_mobile__WEBPACK_IMPORTED_MODULE_1__.StorageClient(this.wallet, this.selectedStorageUrl);\n _context.n = 1;\n return client.makeAvailable();\n case 1:\n _context.n = 2;\n return storageManager.addWalletStorageProvider(client);\n case 2:\n _context.n = 4;\n break;\n case 3:\n _context.p = 3;\n _t = _context.v;\n _logger__WEBPACK_IMPORTED_MODULE_3__.Logger.log('Error building wallet:', _t);\n return _context.a(2, null);\n case 4:\n return _context.a(2);\n }\n }, _callee, this, [[0, 3]]);\n }));\n function buildWallet(_x, _x2) {\n return _buildWallet.apply(this, arguments);\n }\n return buildWallet;\n }()\n }, {\n key: \"loadSnapshot\",\n value: function () {\n var _loadSnapshot = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee2(snap) {\n var _t2;\n return _regenerator().w(function (_context2) {\n while (1) switch (_context2.p = _context2.n) {\n case 0:\n console.log('snap', snap, _typeof(snap));\n _context2.p = 1;\n _context2.n = 2;\n return this.authManager.loadSnapshot(snap);\n case 2:\n _context2.n = 3;\n return this.authManager.waitForAuthentication({});\n case 3:\n return _context2.a(2, true);\n case 4:\n _context2.p = 4;\n _t2 = _context2.v;\n _logger__WEBPACK_IMPORTED_MODULE_3__.Logger.log('Wallet snapshot load failed:', _t2);\n return _context2.a(2, _t2);\n }\n }, _callee2, this, [[1, 4]]);\n }));\n function loadSnapshot(_x3) {\n return _loadSnapshot.apply(this, arguments);\n }\n return loadSnapshot;\n }()\n }, {\n key: \"startAuth\",\n value: function () {\n var _startAuth = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee3(phoneNumber) {\n var _t3;\n return _regenerator().w(function (_context3) {\n while (1) switch (_context3.p = _context3.n) {\n case 0:\n _context3.p = 0;\n _context3.n = 1;\n return this.authManager.startAuth({\n phoneNumber: phoneNumber\n });\n case 1:\n return _context3.a(2, _context3.v);\n case 2:\n _context3.p = 2;\n _t3 = _context3.v;\n return _context3.a(2, false);\n }\n }, _callee3, this, [[0, 2]]);\n }));\n function startAuth(_x4) {\n return _startAuth.apply(this, arguments);\n }\n return startAuth;\n }()\n }, {\n key: \"restartAuth\",\n value: function () {\n var _restartAuth = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee4(phoneNumber) {\n return _regenerator().w(function (_context4) {\n while (1) switch (_context4.n) {\n case 0:\n return _context4.a(2, this.startAuth(phoneNumber));\n }\n }, _callee4, this);\n }));\n function restartAuth(_x5) {\n return _restartAuth.apply(this, arguments);\n }\n return restartAuth;\n }()\n }, {\n key: \"completeAuth\",\n value: function () {\n var _completeAuth = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee5(_ref) {\n var phoneNumber, otp, _t4;\n return _regenerator().w(function (_context5) {\n while (1) switch (_context5.p = _context5.n) {\n case 0:\n phoneNumber = _ref.phoneNumber, otp = _ref.otp;\n _context5.p = 1;\n _context5.n = 2;\n return this.authManager.completeAuth({\n phoneNumber: phoneNumber,\n otp: otp\n });\n case 2:\n return _context5.a(2, _context5.v);\n case 3:\n _context5.p = 3;\n _t4 = _context5.v;\n return _context5.a(2, false);\n }\n }, _callee5, this, [[1, 3]]);\n }));\n function completeAuth(_x6) {\n return _completeAuth.apply(this, arguments);\n }\n return completeAuth;\n }()\n }]);\n}();\n\n//# sourceURL=webpack://$/./src/authenticationManager.ts?\n}"); /***/ }), @@ -4334,7 +4224,7 @@ /***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { "use strict"; -eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ EventManager: () => (/* binding */ EventManager)\n/* harmony export */ });\n/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./logger */ \"./src/logger.ts\");\nfunction _typeof(o) { \"@babel/helpers - typeof\"; return _typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && \"function\" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? \"symbol\" : typeof o; }, _typeof(o); }\nfunction _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError(\"Cannot call a class as a function\"); }\nfunction _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, \"value\" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }\nfunction _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, \"prototype\", { writable: !1 }), e; }\nfunction _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }\nfunction _toPropertyKey(t) { var i = _toPrimitive(t, \"string\"); return \"symbol\" == _typeof(i) ? i : i + \"\"; }\nfunction _toPrimitive(t, r) { if (\"object\" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || \"default\"); if (\"object\" != _typeof(i)) return i; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (\"string\" === r ? String : Number)(t); }\n\nvar EventManager = /*#__PURE__*/function () {\n function EventManager() {\n _classCallCheck(this, EventManager);\n _defineProperty(this, \"listeners\", {});\n // Listen to EventManager events\n this.listen('clearLog', this);\n }\n\n /**\n * Clears the log.\n */\n return _createClass(EventManager, [{\n key: \"clearLog\",\n value: function clearLog() {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.clear();\n }\n\n /**\n * Process an event on all registered listeners.\n * @param method Method name that must match the key on the listener object.\n * @param params Optional parameters to be passed to the listener.\n */\n }, {\n key: \"process\",\n value: function process(method) {\n var _this = this;\n var params = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n if (this.listeners[method]) {\n this.listeners[method].forEach(function (listener) {\n var fn = listener[method];\n if (typeof fn === 'function') {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Start process', {\n method: method,\n params: params\n });\n var result = fn.call(listener, params);\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Result process', {\n result: result\n });\n _this.send(\"\".concat(method, \".callback\"), result);\n } else {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Error process', {\n method: method\n });\n }\n });\n } else {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Event not found', {\n method: method\n });\n }\n }\n\n /**\n * Process an event received by the EventManager.\n * @param data event data from event source\n */\n }, {\n key: \"receive\",\n value: function receive(data) {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Receive event', data);\n this.process(data.name, data.params);\n }\n\n /**\n * Send an event to the main thread of the WebView.\n * The event name is modified by appending \".callback\".\n * The event is sent as a JSON string.\n * @param event event name\n * @param results event results\n */\n }, {\n key: \"send\",\n value: function send(event) {\n var _window$ReactNativeWe;\n var results = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;\n var payload = {\n name: \"\".concat(event),\n results: results\n };\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Sending event', payload);\n (_window$ReactNativeWe = window.ReactNativeWebView) === null || _window$ReactNativeWe === void 0 || _window$ReactNativeWe.postMessage(JSON.stringify(payload));\n }\n\n /**\n * Registers a listener for the specified event.\n * @param event event name\n * @param object listener object\n */\n }, {\n key: \"listen\",\n value: function listen(event, object) {\n if (!this.listeners[event]) {\n this.listeners[event] = [];\n }\n this.listeners[event].push(object);\n }\n }]);\n}();\n\n//# sourceURL=webpack://$/./src/eventManager.ts?\n}"); +eval("{__webpack_require__.r(__webpack_exports__);\n/* harmony export */ __webpack_require__.d(__webpack_exports__, {\n/* harmony export */ EventManager: () => (/* binding */ EventManager)\n/* harmony export */ });\n/* harmony import */ var _logger__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./logger */ \"./src/logger.ts\");\nfunction _typeof(o) { \"@babel/helpers - typeof\"; return _typeof = \"function\" == typeof Symbol && \"symbol\" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && \"function\" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? \"symbol\" : typeof o; }, _typeof(o); }\nfunction _classCallCheck(a, n) { if (!(a instanceof n)) throw new TypeError(\"Cannot call a class as a function\"); }\nfunction _defineProperties(e, r) { for (var t = 0; t < r.length; t++) { var o = r[t]; o.enumerable = o.enumerable || !1, o.configurable = !0, \"value\" in o && (o.writable = !0), Object.defineProperty(e, _toPropertyKey(o.key), o); } }\nfunction _createClass(e, r, t) { return r && _defineProperties(e.prototype, r), t && _defineProperties(e, t), Object.defineProperty(e, \"prototype\", { writable: !1 }), e; }\nfunction _defineProperty(e, r, t) { return (r = _toPropertyKey(r)) in e ? Object.defineProperty(e, r, { value: t, enumerable: !0, configurable: !0, writable: !0 }) : e[r] = t, e; }\nfunction _toPropertyKey(t) { var i = _toPrimitive(t, \"string\"); return \"symbol\" == _typeof(i) ? i : i + \"\"; }\nfunction _toPrimitive(t, r) { if (\"object\" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || \"default\"); if (\"object\" != _typeof(i)) return i; throw new TypeError(\"@@toPrimitive must return a primitive value.\"); } return (\"string\" === r ? String : Number)(t); }\n\nvar EventManager = /*#__PURE__*/function () {\n function EventManager() {\n _classCallCheck(this, EventManager);\n _defineProperty(this, \"listenners\", {});\n // Listen to EventManager events\n this.listen('clearLog', this);\n }\n\n /**\n * Clears the log.\n */\n return _createClass(EventManager, [{\n key: \"clearLog\",\n value: function clearLog() {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.clear();\n }\n\n /**\n * Process an event on all registered listeners.\n * @param method Method name that must match the key on the listener object.\n * @param params Optional parameters to be passed to the listener.\n */\n }, {\n key: \"process\",\n value: function process(method) {\n var _this = this;\n var params = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n if (this.listenners[method]) {\n this.listenners[method].forEach(function (listener) {\n var fn = listener[method];\n if (typeof fn === 'function') {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Start process', {\n method: method,\n params: params\n });\n var result = fn.call(listener, params);\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Result process', {\n result: result\n });\n _this.send(\"\".concat(method, \".callback\"), result);\n } else {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Error process', {\n method: method\n });\n }\n });\n } else {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Event not found', {\n method: method\n });\n }\n }\n\n /**\n * Process an event received by the EventManager.\n * @param data event data from event source\n */\n }, {\n key: \"receive\",\n value: function receive(data) {\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Receive event', data);\n this.process(data.name, data.params);\n }\n\n /**\n * Send an event to the main thread of the WebView.\n * The event name is modified by appending \".callback\".\n * The event is sent as a JSON string.\n * @param event event name\n * @param results event results\n */\n }, {\n key: \"send\",\n value: function send(event) {\n var _window$ReactNativeWe;\n var results = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;\n var payload = {\n name: \"\".concat(event),\n results: results\n };\n _logger__WEBPACK_IMPORTED_MODULE_0__.Logger.log('Sending event', payload);\n (_window$ReactNativeWe = window.ReactNativeWebView) === null || _window$ReactNativeWe === void 0 || _window$ReactNativeWe.postMessage(JSON.stringify(payload));\n }\n\n /**\n * Registers a listener for the specified event.\n * @param event event name\n * @param object listener object\n */\n }, {\n key: \"listen\",\n value: function listen(event, object) {\n if (!this.listenners[event]) {\n this.listenners[event] = [];\n }\n this.listenners[event].push(object);\n }\n }]);\n}();\n\n//# sourceURL=webpack://$/./src/eventManager.ts?\n}"); /***/ }), @@ -4384,7 +4274,7 @@ /************************************************************************/ /******/ // The module cache /******/ var __webpack_module_cache__ = {}; -/******/ +/******/ /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ // Check if module is in cache @@ -4398,14 +4288,14 @@ /******/ // no module.loaded needed /******/ exports: {} /******/ }; -/******/ +/******/ /******/ // Execute the module function /******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ +/******/ /******/ // Return the exports of the module /******/ return module.exports; /******/ } -/******/ +/******/ /************************************************************************/ /******/ /* webpack/runtime/compat get default export */ /******/ (() => { @@ -4418,7 +4308,7 @@ /******/ return getter; /******/ }; /******/ })(); -/******/ +/******/ /******/ /* webpack/runtime/define property getters */ /******/ (() => { /******/ // define getter functions for harmony exports @@ -4430,12 +4320,12 @@ /******/ } /******/ }; /******/ })(); -/******/ +/******/ /******/ /* webpack/runtime/hasOwnProperty shorthand */ /******/ (() => { /******/ __webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) /******/ })(); -/******/ +/******/ /******/ /* webpack/runtime/make namespace object */ /******/ (() => { /******/ // define __esModule on exports @@ -4446,14 +4336,14 @@ /******/ Object.defineProperty(exports, '__esModule', { value: true }); /******/ }; /******/ })(); -/******/ +/******/ /************************************************************************/ -/******/ +/******/ /******/ // startup /******/ // Load entry module and return exports /******/ // This entry module can't be inlined because the eval devtool is used. /******/ var __webpack_exports__ = __webpack_require__("./src/index.ts"); -/******/ +/******/ /******/ return __webpack_exports__; /******/ })() ; diff --git a/wallet/src/authenticationManager.ts b/wallet/src/authenticationManager.ts index 52de12d..d094840 100644 --- a/wallet/src/authenticationManager.ts +++ b/wallet/src/authenticationManager.ts @@ -34,11 +34,16 @@ export class AuthenticationManager { this.eventManager.listen('setSelectedNetwork', this); this.eventManager.listen('setSelectedWabUrl', this); this.eventManager.listen('setRecoveryKeySaver', this); - this.eventManager.listen('setPasswordRetriver', this); + this.eventManager.listen('setPasswordRetriever', this); this.eventManager.listen('initialize', this); this.eventManager.listen('loadSnapshot', this); this.eventManager.listen('startAuth', this); this.eventManager.listen('restartAuth', this); + this.eventManager.listen('testPassword', this); + this.eventManager.listen('testPasswordResolve', this); + this.eventManager.listen('testPasswordReject', this); + this.eventManager.listen('recoveryKeyResolve', this); + this.eventManager.listen('recoveryKeyReject', this); } setAdminOriginator(origin: string) { @@ -75,9 +80,8 @@ export class AuthenticationManager { // When main thread reject key, reject this.keySaverRejecter = reject; - // TODO:Send key as string to main thread - // setRecoveryKey(keyAsStr) - // setOpen(true) + // Send key as string to main thread + this.eventManager.send('recoveryKey.completed', keyAsStr); }) }; @@ -86,13 +90,9 @@ export class AuthenticationManager { return true; } - setPasswordRetriver() { + setPasswordRetriever() { this.passwordRetriever = (reason: string, testFn: (passwordCandidate: string) => boolean) => { return new Promise((resolvePromise: Function, rejectPromise: Function) => { - // TODO: Send password_reason event to main thread - // Actions to perform - // setReason(reason) - // When main thread test password, call this method this.passwordTestFn = testFn; // When main thread resolves password, call this resolve method @@ -100,10 +100,7 @@ export class AuthenticationManager { // When main thread rejects password, call this reject method this.passwordRejecter = rejectPromise; - // TODO: Send password_retriving event to main thread - // Actions to perform - // setOpen(true) - // manageFocus() + this.eventManager.send('passwordRetriever.completed', reason) }) }; @@ -117,13 +114,13 @@ export class AuthenticationManager { this.adminOriginator, this.selectedStorageUrl, this.selectedNetwork, - this.setSelectedWabUrl, + this.selectedWabUrl, this.recoveryKeySaver, this.passwordRetriever ) if (!this.adminOriginator || !this.selectedStorageUrl || !this.selectedNetwork || - !this.setSelectedWabUrl || !this.recoveryKeySaver || !this.passwordRetriever) { + !this.selectedWabUrl || !this.recoveryKeySaver || !this.passwordRetriever) { return false } @@ -261,4 +258,28 @@ export class AuthenticationManager { return false; } } + + testPassword(password: string) { + this.passwordTestFn(password).then((result: boolean) => { + this.eventManager.send('testPassword.completed', result); + }).catch(() => { + this.eventManager.send('testPassword.completed', false); + }); + } + + testPasswordResolve() { + this.passwordResolver(); + } + + testPasswordReject(error: string) { + this.passwordRejecter(new Error(error)); + } + + recoveryKeyResolve(arg: boolean) { + this.keySaverResolver(arg); + } + + recoveryKeyReject(error: string) { + this.keySaverRejecter(new Error(error)); + } }