From f1c10fee0b214dabc90a94eac6d24350511fe35a Mon Sep 17 00:00:00 2001 From: Tobias Merkle Date: Thu, 14 Aug 2025 16:53:57 -0400 Subject: [PATCH 01/70] implement basename option 1 --- hyperdrive/packages/app-store/public-ui/src/App.tsx | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/hyperdrive/packages/app-store/public-ui/src/App.tsx b/hyperdrive/packages/app-store/public-ui/src/App.tsx index 8518b3a6f..6725be893 100644 --- a/hyperdrive/packages/app-store/public-ui/src/App.tsx +++ b/hyperdrive/packages/app-store/public-ui/src/App.tsx @@ -10,9 +10,17 @@ const BASE_URL = import.meta.env.BASE_URL; if (window.our) window.our.process = BASE_URL?.replace("/", ""); function App() { + const getBasename = () => { + const path = window.location.pathname; + if (path.startsWith('/main:app-store:sys/public')) { + return '/main:app-store:sys/public'; + } + return '/'; + }; + return (
- + } /> From bad63a6f63263461469532d8e5cd2a8f428333c1 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 22 Aug 2025 13:43:27 -0700 Subject: [PATCH 02/70] change multicall to initialize first --- Cargo.lock | 23 +++++- .../packages/app-store/ui/src/abis/helpers.ts | 15 +++- .../app-store/ui/src/pages/PublishPage.tsx | 7 +- .../app-store/ui/src/utils/predictTBA.ts | 78 +++++++++++++++++++ hyperdrive/packages/file-explorer/Cargo.toml | 1 + .../file-explorer/explorer/Cargo.toml | 3 + .../src/register-ui/src/abis/helpers.ts | 15 +++- .../src/register-ui/src/pages/MintCustom.tsx | 14 +++- .../register-ui/src/pages/MintDotOsName.tsx | 11 ++- .../src/register-ui/src/utils/predictTBA.ts | 78 +++++++++++++++++++ 10 files changed, 231 insertions(+), 14 deletions(-) create mode 100644 hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts create mode 100644 hyperdrive/src/register-ui/src/utils/predictTBA.ts diff --git a/Cargo.lock b/Cargo.lock index 12af32c87..406ddf5e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1323,6 +1323,22 @@ dependencies = [ "serde", ] +[[package]] +name = "caller-utils" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures", + "futures-util", + "hyperware_app_common", + "once_cell", + "process_macros", + "serde", + "serde_json", + "uuid 1.17.0", + "wit-bindgen 0.41.0", +] + [[package]] name = "camino" version = "1.1.10" @@ -2484,6 +2500,7 @@ name = "explorer" version = "0.1.0" dependencies = [ "anyhow", + "caller-utils", "hyperprocess_macro", "hyperware_app_common", "md5", @@ -3291,7 +3308,7 @@ dependencies = [ [[package]] name = "hyperdrive" -version = "1.6.0" +version = "1.6.1" dependencies = [ "aes-gcm", "alloy", @@ -3346,7 +3363,7 @@ dependencies = [ [[package]] name = "hyperdrive_lib" -version = "1.6.0" +version = "1.6.1" dependencies = [ "lib", ] @@ -3972,7 +3989,7 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "lib" -version = "1.6.0" +version = "1.6.1" dependencies = [ "alloy", "anyhow", diff --git a/hyperdrive/packages/app-store/ui/src/abis/helpers.ts b/hyperdrive/packages/app-store/ui/src/abis/helpers.ts index 99e22dde3..cab6a56b6 100644 --- a/hyperdrive/packages/app-store/ui/src/abis/helpers.ts +++ b/hyperdrive/packages/app-store/ui/src/abis/helpers.ts @@ -1,7 +1,7 @@ import { multicallAbi, hypermapAbi, mechAbi, HYPERMAP, MULTICALL, HYPER_ACCOUNT_UPGRADABLE_IMPL } from "./"; import { encodeFunctionData, encodePacked, stringToHex } from "viem"; -export function encodeMulticalls(metadataUri: string, metadataHash: string) { +export function encodeMulticalls(metadataUri: string, metadataHash: string, tbaAddress?: `0x${string}`) { const metadataHashCall = encodeFunctionData({ abi: hypermapAbi, functionName: 'note', @@ -20,11 +20,22 @@ export function encodeMulticalls(metadataUri: string, metadataHash: string) { ] }) - const calls = [ + // Add initialize call if TBA address is provided + const initializeCall = tbaAddress ? encodeFunctionData({ + abi: [{"inputs":[],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"}], + functionName: 'initialize', + args: [] + }) : null; + + const baseCalls = [ { target: HYPERMAP, callData: metadataHashCall }, { target: HYPERMAP, callData: metadataUriCall }, ]; + const calls = initializeCall && tbaAddress ? + [{ target: tbaAddress, callData: initializeCall }, ...baseCalls] : + baseCalls; + const multicall = encodeFunctionData({ abi: multicallAbi, functionName: 'aggregate', diff --git a/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx b/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx index a5e0a981f..aab73bffb 100644 --- a/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx +++ b/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx @@ -5,6 +5,7 @@ import { ConnectButton, useConnectModal } from '@rainbow-me/rainbowkit'; import { keccak256, toBytes } from 'viem'; import { mechAbi, HYPERMAP, encodeIntoMintCall, encodeMulticalls, hypermapAbi, MULTICALL } from "../abis"; import { hyperhash } from '../utils/hyperhash'; +import { predictTBAAddress } from '../utils/predictTBA'; import useAppsStore from "../store"; import { PackageSelector } from "../components"; import { Tooltip } from '../components/Tooltip'; @@ -229,7 +230,9 @@ export default function PublishPage() { metadata = keccak256(toBytes(metadataText)); } - const multicall = encodeMulticalls(metadataUrl, metadata); + // When creating a new package, predict the TBA address that will be created + const predictedTBA = !isUpdate ? predictTBAAddress(currentTBA || HYPERMAP, packageName, publicClient?.chain?.id || 8453) : undefined; + const multicall = encodeMulticalls(metadataUrl, metadata, predictedTBA); const args = isUpdate ? multicall : encodeIntoMintCall(multicall, address, packageName); writeContract({ @@ -444,4 +447,4 @@ export default function PublishPage() { )}
); -} \ No newline at end of file +} diff --git a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts new file mode 100644 index 000000000..46d775277 --- /dev/null +++ b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts @@ -0,0 +1,78 @@ +import { encodePacked, keccak256, getAddress } from 'viem'; +import { hyperhash } from './hyperhash'; + +const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; + +export function predictTBAAddress( + hypermapAddr: `0x${string}`, + label: string, + chainId: number = 8453 // Base chain ID +): `0x${string}` { + // Calculate the namehash for the label + const namehash = hyperhash(label); + + // First compute the proxy address + const proxyAddr = computeProxyAddress(hypermapAddr, hypermapAddr, namehash); + + // Implementation bytecode hash for ERC6551 v3 + const ACCOUNT_IMPLEMENTATION_BYTECODE_HASH = keccak256(encodePacked(['string'], ['erc6551:v3:account'])); + + // Create the init code for ERC6551 account + const initCode = encodePacked( + ['bytes10', 'address', 'bytes32', 'uint256', 'address', 'uint256'], + [ + '0x' + '00'.repeat(10), // 10 bytes of zeros for ERC6551 v3 + proxyAddr, // implementation (proxy) + namehash, // salt + BigInt(chainId), // chainId + hypermapAddr, // tokenContract + BigInt(namehash) // tokenId (using namehash as tokenId) + ] + ); + + // Compute init code hash for ERC6551 account + const initCodeHash = keccak256( + encodePacked( + ['bytes', 'bytes32'], + [initCode, ACCOUNT_IMPLEMENTATION_BYTECODE_HASH] + ) + ); + + // Compute the TBA address using CREATE2 + const hash = keccak256( + encodePacked( + ['bytes1', 'address', 'bytes32'], + ['0xff', ERC6551_REGISTRY, initCodeHash] + ) + ); + + return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; +} + +function computeProxyAddress( + deployer: `0x${string}`, + hypermapAddr: `0x${string}`, + salt: `0x${string}` +): `0x${string}` { + // HyperAccountProxy creation code with constructor argument + const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea264697066735822122014ea9f00459f92ee24210cb4740336c93386a9e0bc81a68800b4e50d397e23ab64736f6c634300081c0033'; + + const proxyCreationCodeHash = keccak256( + encodePacked( + ['bytes', 'bytes'], + [ + PROXY_CREATION_CODE, + encodePacked(['address'], [hypermapAddr]) + ] + ) + ); + + const hash = keccak256( + encodePacked( + ['bytes1', 'address', 'bytes32', 'bytes32'], + ['0xff', deployer, salt, proxyCreationCodeHash] + ) + ); + + return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; +} diff --git a/hyperdrive/packages/file-explorer/Cargo.toml b/hyperdrive/packages/file-explorer/Cargo.toml index 299a6ba1d..4aa010ccf 100644 --- a/hyperdrive/packages/file-explorer/Cargo.toml +++ b/hyperdrive/packages/file-explorer/Cargo.toml @@ -6,5 +6,6 @@ panic = "abort" [workspace] members = [ "explorer", + "target/caller-utils", ] resolver = "2" diff --git a/hyperdrive/packages/file-explorer/explorer/Cargo.toml b/hyperdrive/packages/file-explorer/explorer/Cargo.toml index 3b36f1446..0218351a2 100644 --- a/hyperdrive/packages/file-explorer/explorer/Cargo.toml +++ b/hyperdrive/packages/file-explorer/explorer/Cargo.toml @@ -7,6 +7,9 @@ serde_urlencoded = "0.7" tracing = "0.1.37" wit-bindgen = "0.42.1" +[dependencies.caller-utils] +path = "../target/caller-utils" + [dependencies.hyperprocess_macro] git = "https://github.com/hyperware-ai/hyperprocess-macro" rev = "9836e2a" diff --git a/hyperdrive/src/register-ui/src/abis/helpers.ts b/hyperdrive/src/register-ui/src/abis/helpers.ts index b4765ed46..a471f081b 100644 --- a/hyperdrive/src/register-ui/src/abis/helpers.ts +++ b/hyperdrive/src/register-ui/src/abis/helpers.ts @@ -19,6 +19,7 @@ export const generateNetworkingKeys = async ({ setTcpPort, setRouters, reset, + tbaAddress, }: { direct: boolean, label: string, @@ -29,6 +30,7 @@ export const generateNetworkingKeys = async ({ setTcpPort: (tcpPort: number) => void; setRouters: (routers: string[]) => void; reset: boolean; + tbaAddress?: `0x${string}`; }) => { const { networking_key, @@ -106,7 +108,14 @@ export const generateNetworkingKeys = async ({ )] }); - const calls = direct ? [ + // Add initialize call if TBA address is provided + const initializeCall = tbaAddress ? encodeFunctionData({ + abi: [{"inputs":[],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"}], + functionName: 'initialize', + args: [] + }) : null; + + const baseCalls = direct ? [ { target: HYPERMAP, callData: netkeycall }, { target: HYPERMAP, callData: ws_port_call }, { target: HYPERMAP, callData: tcp_port_call }, @@ -116,6 +125,10 @@ export const generateNetworkingKeys = async ({ { target: HYPERMAP, callData: router_call }, ]; + const calls = initializeCall && tbaAddress ? + [{ target: tbaAddress, callData: initializeCall }, ...baseCalls] : + baseCalls; + const multicalls = encodeFunctionData({ abi: multicallAbi, functionName: 'aggregate', diff --git a/hyperdrive/src/register-ui/src/pages/MintCustom.tsx b/hyperdrive/src/register-ui/src/pages/MintCustom.tsx index 535cb4b9f..54baacff9 100644 --- a/hyperdrive/src/register-ui/src/pages/MintCustom.tsx +++ b/hyperdrive/src/register-ui/src/pages/MintCustom.tsx @@ -7,9 +7,10 @@ import DirectNodeCheckbox from "../components/DirectCheckbox"; import { useAccount, useWaitForTransactionReceipt, useSendTransaction } from "wagmi"; import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit" -import { tbaMintAbi, generateNetworkingKeys, HYPER_ACCOUNT_IMPL } from "../abis"; +import { tbaMintAbi, generateNetworkingKeys, HYPER_ACCOUNT_IMPL, HYPERMAP } from "../abis"; import { encodePacked, encodeFunctionData, stringToHex } from "viem"; import BackButton from "../components/BackButton"; +import { predictTBAAddress } from "../utils/predictTBA"; interface MintCustomNameProps extends PageProps { } function MintCustom({ @@ -65,6 +66,13 @@ function MintCustom({ return } + const name = formData.get('name') as string + const tbaAddr = formData.get('tba') as `0x${string}` || HYPERMAP; + const fullLabel = `${name}.${tbaAddr === HYPERMAP ? '' : tbaAddr}`; + + // Predict the TBA address that will be created + const predictedTBA = predictTBAAddress(tbaAddr, name); + const initCall = await generateNetworkingKeys({ direct, our_address: address, @@ -75,14 +83,14 @@ function MintCustom({ setTcpPort, setRouters, reset: false, + tbaAddress: predictedTBA, }); setHnsName(formData.get('full-hns-name') as string) - const name = formData.get('name') as string - console.log("full hns name", formData.get('full-hns-name')) console.log("name", name) + console.log("predicted TBA", predictedTBA) const data = encodeFunctionData({ abi: tbaMintAbi, diff --git a/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx b/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx index 2ccf86eda..77cf5cb55 100644 --- a/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx +++ b/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx @@ -8,6 +8,7 @@ import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit import { generateNetworkingKeys, HYPER_ACCOUNT_IMPL, DOTOS, tbaMintAbi } from "../abis"; import { createPublicClient, encodePacked, http, stringToHex, BaseError, ContractFunctionRevertedError } from "viem"; import { base } from 'viem/chains' +import { predictTBAAddress } from "../utils/predictTBA"; interface RegisterOsNameProps extends PageProps { } @@ -60,6 +61,12 @@ function MintDotOsName({ setHasMinted(true); + // strip .os suffix + const name = hnsName.replace(/\.os$/, ''); + + // Predict the TBA address that will be created + const predictedTBA = predictTBAAddress(DOTOS, name, base.id); + const initCall = await generateNetworkingKeys({ direct, our_address: address, @@ -70,11 +77,9 @@ function MintDotOsName({ setTcpPort, setRouters, reset: false, + tbaAddress: predictedTBA, }); - // strip .os suffix - const name = hnsName.replace(/\.os$/, ''); - const publicClient = createPublicClient({ chain: base, transport: http(), diff --git a/hyperdrive/src/register-ui/src/utils/predictTBA.ts b/hyperdrive/src/register-ui/src/utils/predictTBA.ts new file mode 100644 index 000000000..46d775277 --- /dev/null +++ b/hyperdrive/src/register-ui/src/utils/predictTBA.ts @@ -0,0 +1,78 @@ +import { encodePacked, keccak256, getAddress } from 'viem'; +import { hyperhash } from './hyperhash'; + +const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; + +export function predictTBAAddress( + hypermapAddr: `0x${string}`, + label: string, + chainId: number = 8453 // Base chain ID +): `0x${string}` { + // Calculate the namehash for the label + const namehash = hyperhash(label); + + // First compute the proxy address + const proxyAddr = computeProxyAddress(hypermapAddr, hypermapAddr, namehash); + + // Implementation bytecode hash for ERC6551 v3 + const ACCOUNT_IMPLEMENTATION_BYTECODE_HASH = keccak256(encodePacked(['string'], ['erc6551:v3:account'])); + + // Create the init code for ERC6551 account + const initCode = encodePacked( + ['bytes10', 'address', 'bytes32', 'uint256', 'address', 'uint256'], + [ + '0x' + '00'.repeat(10), // 10 bytes of zeros for ERC6551 v3 + proxyAddr, // implementation (proxy) + namehash, // salt + BigInt(chainId), // chainId + hypermapAddr, // tokenContract + BigInt(namehash) // tokenId (using namehash as tokenId) + ] + ); + + // Compute init code hash for ERC6551 account + const initCodeHash = keccak256( + encodePacked( + ['bytes', 'bytes32'], + [initCode, ACCOUNT_IMPLEMENTATION_BYTECODE_HASH] + ) + ); + + // Compute the TBA address using CREATE2 + const hash = keccak256( + encodePacked( + ['bytes1', 'address', 'bytes32'], + ['0xff', ERC6551_REGISTRY, initCodeHash] + ) + ); + + return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; +} + +function computeProxyAddress( + deployer: `0x${string}`, + hypermapAddr: `0x${string}`, + salt: `0x${string}` +): `0x${string}` { + // HyperAccountProxy creation code with constructor argument + const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea264697066735822122014ea9f00459f92ee24210cb4740336c93386a9e0bc81a68800b4e50d397e23ab64736f6c634300081c0033'; + + const proxyCreationCodeHash = keccak256( + encodePacked( + ['bytes', 'bytes'], + [ + PROXY_CREATION_CODE, + encodePacked(['address'], [hypermapAddr]) + ] + ) + ); + + const hash = keccak256( + encodePacked( + ['bytes1', 'address', 'bytes32', 'bytes32'], + ['0xff', deployer, salt, proxyCreationCodeHash] + ) + ); + + return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; +} From c0b74a04ba4c235eae4c6ff7794b7b86ea107444 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 8 Sep 2025 20:49:32 -0700 Subject: [PATCH 03/70] notifications: remove old endpoints --- hyperdrive/src/notifications.rs | 81 +++++++++++++++++++++++++++++---- 1 file changed, 71 insertions(+), 10 deletions(-) diff --git a/hyperdrive/src/notifications.rs b/hyperdrive/src/notifications.rs index 34ded85ff..6010b74c8 100644 --- a/hyperdrive/src/notifications.rs +++ b/hyperdrive/src/notifications.rs @@ -516,10 +516,12 @@ async fn handle_request( // Clone what we need for the async task let state_clone = state.clone(); let send_to_terminal_clone = send_to_terminal.clone(); + let send_to_state_clone = send_to_state.clone(); + let our_node_clone = our_node.to_string(); // Start the queue processor let handle = tokio::spawn(async move { - process_notification_queue(&send_to_terminal_clone, &state_clone).await; + process_notification_queue(&our_node_clone, &send_to_terminal_clone, &send_to_state_clone, &state_clone).await; }); state_guard.queue_processor_handle = Some(handle); @@ -783,7 +785,9 @@ async fn save_subscriptions_to_state( } async fn process_notification_queue( + our_node: &str, send_to_terminal: &PrintSender, + send_to_state: &MessageSender, state: &Arc>, ) { loop { @@ -819,7 +823,7 @@ async fn process_notification_queue( // Send the notification if let Err(e) = - send_notification_to_all(send_to_terminal, state, notification).await + send_notification_to_all(our_node, send_to_terminal, send_to_state, state, notification).await { Printout::new( 0, @@ -883,7 +887,9 @@ async fn process_notification_queue( } async fn send_notification_to_all( + our_node: &str, send_to_terminal: &PrintSender, + send_to_state: &MessageSender, state: &Arc>, notification: QueuedNotification, ) -> Result<(), NotificationsError> { @@ -927,6 +933,7 @@ async fn send_notification_to_all( // Send to all subscriptions let mut send_errors = Vec::new(); let mut send_count = 0; + let mut invalid_endpoints = Vec::new(); for subscription in &state_guard.subscriptions { // Create subscription info for web-push @@ -1007,17 +1014,72 @@ async fn send_notification_to_all( send_count += 1; } Err(e) => { + let error_str = format!("{:?}", e); + + // Check if this is an EndpointNotValid error + if error_str.contains("EndpointNotValid") || error_str.contains("410") { + Printout::new( + 0, + NOTIFICATIONS_PROCESS_ID.clone(), + format!( + "notifications: Endpoint invalid, will remove: {}", + subscription.endpoint + ), + ) + .send(send_to_terminal) + .await; + invalid_endpoints.push(subscription.endpoint.clone()); + } else { + Printout::new( + 0, + NOTIFICATIONS_PROCESS_ID.clone(), + format!( + "notifications: Failed to send to {}: {:?}", + subscription.endpoint, e + ), + ) + .send(send_to_terminal) + .await; + } + send_errors.push(format!("Failed to send to endpoint: {:?}", e)); + } + } + } + + // Drop the read guard before attempting to write + drop(state_guard); + + // Remove invalid endpoints if any were found + if !invalid_endpoints.is_empty() { + let mut state_guard = state.write().await; + let initial_count = state_guard.subscriptions.len(); + + for endpoint in &invalid_endpoints { + state_guard.subscriptions.retain(|s| &s.endpoint != endpoint); + } + + if state_guard.subscriptions.len() < initial_count { + Printout::new( + 2, + NOTIFICATIONS_PROCESS_ID.clone(), + format!( + "notifications: Removed {} invalid endpoints, {} subscriptions remaining", + initial_count - state_guard.subscriptions.len(), + state_guard.subscriptions.len() + ), + ) + .send(send_to_terminal) + .await; + + // Save the updated subscriptions to state + if let Err(e) = save_subscriptions_to_state(our_node, send_to_state, &state_guard.subscriptions).await { Printout::new( 0, NOTIFICATIONS_PROCESS_ID.clone(), - format!( - "notifications: Failed to send to {}: {:?}", - subscription.endpoint, e - ), + format!("notifications: Failed to save updated subscriptions: {:?}", e), ) .send(send_to_terminal) .await; - send_errors.push(format!("Failed to send to endpoint: {:?}", e)); } } } @@ -1026,9 +1088,8 @@ async fn send_notification_to_all( 2, NOTIFICATIONS_PROCESS_ID.clone(), format!( - "notifications: Sent to {}/{} devices", - send_count, - state_guard.subscriptions.len() + "notifications: Sent to {} devices successfully", + send_count ), ) .send(send_to_terminal) From cc5ad39235ea2bb441c18ed27a8843d402ad2a99 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 03:49:57 +0000 Subject: [PATCH 04/70] Format Rust code using rustfmt --- hyperdrive/src/notifications.rs | 37 ++++++++++++++++++++++++--------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/hyperdrive/src/notifications.rs b/hyperdrive/src/notifications.rs index 6010b74c8..c45dfc703 100644 --- a/hyperdrive/src/notifications.rs +++ b/hyperdrive/src/notifications.rs @@ -521,7 +521,13 @@ async fn handle_request( // Start the queue processor let handle = tokio::spawn(async move { - process_notification_queue(&our_node_clone, &send_to_terminal_clone, &send_to_state_clone, &state_clone).await; + process_notification_queue( + &our_node_clone, + &send_to_terminal_clone, + &send_to_state_clone, + &state_clone, + ) + .await; }); state_guard.queue_processor_handle = Some(handle); @@ -822,8 +828,14 @@ async fn process_notification_queue( .await; // Send the notification - if let Err(e) = - send_notification_to_all(our_node, send_to_terminal, send_to_state, state, notification).await + if let Err(e) = send_notification_to_all( + our_node, + send_to_terminal, + send_to_state, + state, + notification, + ) + .await { Printout::new( 0, @@ -1055,7 +1067,9 @@ async fn send_notification_to_all( let initial_count = state_guard.subscriptions.len(); for endpoint in &invalid_endpoints { - state_guard.subscriptions.retain(|s| &s.endpoint != endpoint); + state_guard + .subscriptions + .retain(|s| &s.endpoint != endpoint); } if state_guard.subscriptions.len() < initial_count { @@ -1072,11 +1086,17 @@ async fn send_notification_to_all( .await; // Save the updated subscriptions to state - if let Err(e) = save_subscriptions_to_state(our_node, send_to_state, &state_guard.subscriptions).await { + if let Err(e) = + save_subscriptions_to_state(our_node, send_to_state, &state_guard.subscriptions) + .await + { Printout::new( 0, NOTIFICATIONS_PROCESS_ID.clone(), - format!("notifications: Failed to save updated subscriptions: {:?}", e), + format!( + "notifications: Failed to save updated subscriptions: {:?}", + e + ), ) .send(send_to_terminal) .await; @@ -1087,10 +1107,7 @@ async fn send_notification_to_all( Printout::new( 2, NOTIFICATIONS_PROCESS_ID.clone(), - format!( - "notifications: Sent to {} devices successfully", - send_count - ), + format!("notifications: Sent to {} devices successfully", send_count), ) .send(send_to_terminal) .await; From 4400b448399e4f967109c475e61a35bc318cbf82 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 9 Sep 2025 07:51:32 -0700 Subject: [PATCH 05/70] bump to v1.7.1 --- Cargo.lock | 6 +++--- Cargo.toml | 2 +- hyperdrive/Cargo.toml | 2 +- lib/Cargo.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2988e3bf0..2f78e1cef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3546,7 +3546,7 @@ dependencies = [ [[package]] name = "hyperdrive" -version = "1.7.0" +version = "1.7.1" dependencies = [ "aes-gcm", "alloy", @@ -3603,7 +3603,7 @@ dependencies = [ [[package]] name = "hyperdrive_lib" -version = "1.7.0" +version = "1.7.1" dependencies = [ "lib", ] @@ -4380,7 +4380,7 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "lib" -version = "1.7.0" +version = "1.7.1" dependencies = [ "alloy", "anyhow", diff --git a/Cargo.toml b/Cargo.toml index b19740ec4..4ef04985a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "hyperdrive_lib" authors = ["Sybil Technologies AG"] -version = "1.7.0" +version = "1.7.1" edition = "2021" description = "A general-purpose sovereign cloud computing platform" homepage = "https://hyperware.ai" diff --git a/hyperdrive/Cargo.toml b/hyperdrive/Cargo.toml index 44321e69d..0a92005b4 100644 --- a/hyperdrive/Cargo.toml +++ b/hyperdrive/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "hyperdrive" authors = ["Sybil Technologies AG"] -version = "1.7.0" +version = "1.7.1" edition = "2021" description = "A general-purpose sovereign cloud computing platform" homepage = "https://hyperware.ai" diff --git a/lib/Cargo.toml b/lib/Cargo.toml index a3e17ac41..8290887c6 100644 --- a/lib/Cargo.toml +++ b/lib/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lib" authors = ["Sybil Technologies AG"] -version = "1.7.0" +version = "1.7.1" edition = "2021" description = "A general-purpose sovereign cloud computing platform" homepage = "https://hyperware.ai" From 8c392bb72a2cedbc7e5231bcd9f79e354c55a8f3 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Tue, 9 Sep 2025 07:53:06 -0700 Subject: [PATCH 06/70] spider: add initial phase 2 work --- hyperdrive/packages/spider/spider/src/lib.rs | 354 +++++++++++++++++- .../src/tool_providers/build_container.rs | 88 +++++ .../spider/spider/src/tool_providers/mod.rs | 1 + .../packages/spider/spider/src/types.rs | 13 + 4 files changed, 455 insertions(+), 1 deletion(-) create mode 100644 hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 8ed1d71f0..99f74ae7c 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -44,7 +44,7 @@ use utils::{ }; mod tool_providers; -use tool_providers::{hypergrid::HypergridToolProvider, ToolProvider}; +use tool_providers::{hypergrid::HypergridToolProvider, build_container::BuildContainerToolProvider, ToolProvider}; const ICON: &str = include_str!("./icon"); @@ -192,6 +192,36 @@ impl SpiderState { } } + // Register Build Container tool provider + let build_container_provider = BuildContainerToolProvider::new("build_container".to_string()); + + // Get initial tools from the provider (just init_build_container initially) + let build_container_tools = build_container_provider.get_tools(self); + + // Register the provider + self.tool_provider_registry + .register(Box::new(build_container_provider)); + + // Create the build container MCP server + let build_container_server = McpServer { + id: "build_container".to_string(), + name: "Build Container".to_string(), + transport: types::TransportConfig { + transport_type: "build_container".to_string(), + command: None, + args: None, + url: None, + hypergrid_token: None, + hypergrid_client_id: None, + hypergrid_node: None, + }, + tools: build_container_tools, + connected: true, + }; + + self.mcp_servers.push(build_container_server); + println!("Spider: Build Container MCP server initialized"); + // Create an admin Spider key for the GUI with a random suffix for security // Check if admin key already exists (look for keys with admin permission and the GUI name) let existing_admin_key = self @@ -2178,6 +2208,24 @@ impl SpiderState { }) .unwrap()) } + "build_container" => { + // Handle build container tools + match tool_name { + "init_build_container" => { + self.handle_init_build_container(parameters).await + } + "start_package" => { + self.handle_start_package(parameters).await + } + "persist" => { + self.handle_persist(parameters).await + } + "done_build_container" => { + self.handle_done_build_container(parameters).await + } + _ => Err(format!("Unknown build container tool: {}", tool_name)), + } + } _ => Err(format!( "Unsupported transport type: {}", server.transport.transport_type @@ -2333,4 +2381,308 @@ impl SpiderState { Ok(response_text) } + + // Build Container Tool Handlers + async fn handle_init_build_container(&mut self, parameters: &Value) -> Result { + let project_uuid = parameters + .get("project_uuid") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing project_uuid parameter".to_string())?; + + let project_name = parameters + .get("project_name") + .and_then(|v| v.as_str()); + + let initial_zip = parameters + .get("initial_zip") + .and_then(|v| v.as_str()); + + let metadata = parameters.get("metadata"); + + // Get constructor URL from environment or use default + let constructor_url = std::env::var("SPIDER_CONSTRUCTOR_URL") + .unwrap_or_else(|_| "http://localhost:8081/init-build-container".to_string()); + + // Prepare request body + let mut request_body = serde_json::json!({ + "uuid": project_uuid + }); + + if let Some(name) = project_name { + request_body["name"] = serde_json::json!(name); + } + + if let Some(zip) = initial_zip { + request_body["initial_zip"] = serde_json::json!(zip); + } + + if let Some(meta) = metadata { + request_body["metadata"] = meta.clone(); + } + + // Make HTTP request to constructor + let response = hyperware_process_lib::hyperapp::http::send_request( + hyperware_process_lib::hyperapp::http::Method::POST, + constructor_url.parse().map_err(|e| format!("Invalid URL: {}", e))?, + None, + 5000, + request_body.to_string().into_bytes(), + ) + .await + .map_err(|e| format!("Failed to init build container: {:?}", e))?; + + if response.status().as_u16() >= 400 { + let error_text = String::from_utf8_lossy(response.body()); + return Err(format!("Constructor error (status {}): {}", response.status().as_u16(), error_text)); + } + + // Parse response + let response_text = String::from_utf8(response.body().to_vec()) + .map_err(|e| format!("Invalid UTF-8 response: {}", e))?; + + let response_json: Value = serde_json::from_str(&response_text) + .map_err(|e| format!("Failed to parse constructor response: {}", e))?; + + let ws_uri = response_json + .get("ws_uri") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing ws_uri in response".to_string())?; + + let api_key = response_json + .get("api_key") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing api_key in response".to_string())?; + + // Connect to the build container's ws-mcp server + let channel_id = self.connect_to_build_container_ws(ws_uri, api_key).await?; + + // Store the build container connection + self.build_container_connection = Some(BuildContainerConnection { + project_uuid: project_uuid.to_string(), + ws_uri: ws_uri.to_string(), + api_key: api_key.to_string(), + channel_id, + connected: true, + tools: Vec::new(), + }); + + // Update the build container server to show all tools + if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == "build_container") { + let provider = BuildContainerToolProvider::new("build_container".to_string()); + server.tools = provider.get_tools(self); + } + + Ok(serde_json::json!({ + "content": [{ + "type": "text", + "text": format!("✅ Build container initialized successfully!\n- Project UUID: {}\n- WebSocket URI: {}\n- Container is ready for use", project_uuid, ws_uri) + }] + })) + } + + async fn connect_to_build_container_ws(&mut self, ws_uri: &str, api_key: &str) -> Result { + // Allocate a channel ID + let channel_id = self.next_channel_id; + self.next_channel_id += 1; + + // Parse the WebSocket URI + let parsed_url = hyperware_process_lib::hyperapp::http::Url::parse(ws_uri) + .map_err(|e| format!("Invalid WebSocket URI: {}", e))?; + + // Connect to the WebSocket + hyperware_process_lib::hyperapp::websocket::connect( + channel_id, + parsed_url.clone(), + None, + Vec::new(), + ) + .await + .map_err(|e| format!("Failed to connect to build container WebSocket: {:?}", e))?; + + // Send authentication message + let auth_message = serde_json::json!({ + "method": "spider/authorization", + "params": { + "api_key": api_key + }, + "id": format!("auth_{}", channel_id) + }); + + hyperware_process_lib::hyperapp::websocket::send_text( + channel_id, + auth_message.to_string(), + ) + .await + .map_err(|e| format!("Failed to send auth message: {:?}", e))?; + + // Wait for auth response (simplified - in production would handle properly) + let _ = hyperware_process_lib::hyperapp::sleep(500).await; + + // Initialize MCP connection + let init_request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "initialize".to_string(), + params: Some(serde_json::to_value(McpInitializeParams { + protocol_version: "2024-11-05".to_string(), + client_info: McpClientInfo { + name: "Spider".to_string(), + version: "1.0.0".to_string(), + }, + capabilities: McpCapabilities {}, + }).unwrap()), + id: format!("init_{}", channel_id), + }; + + hyperware_process_lib::hyperapp::websocket::send_text( + channel_id, + serde_json::to_string(&init_request).unwrap(), + ) + .await + .map_err(|e| format!("Failed to send initialize request: {:?}", e))?; + + Ok(channel_id) + } + + async fn handle_start_package(&mut self, parameters: &Value) -> Result { + let package_dir = parameters + .get("package_dir") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing package_dir parameter".to_string())?; + + let conn = self.build_container_connection + .as_ref() + .ok_or_else(|| "No build container connection. Call init_build_container first.".to_string())?; + + // Send start_package request to ws-mcp + let request = serde_json::json!({ + "jsonrpc": "2.0", + "method": "spider/start_package", + "params": { + "package_dir": package_dir + }, + "id": format!("start_package_{}", conn.channel_id) + }); + + hyperware_process_lib::hyperapp::websocket::send_text( + conn.channel_id, + request.to_string(), + ) + .await + .map_err(|e| format!("Failed to send start_package request: {:?}", e))?; + + // Wait for response and handle the package deployment + // This would receive the zipped package from ws-mcp and deploy it + // Implementation would follow kit start-package logic + + Ok(serde_json::json!({ + "content": [{ + "type": "text", + "text": format!("✅ Package deployed successfully from: {}", package_dir) + }] + })) + } + + async fn handle_persist(&mut self, parameters: &Value) -> Result { + let directories = parameters + .get("directories") + .and_then(|v| v.as_array()) + .ok_or_else(|| "Missing directories parameter".to_string())?; + + let conn = self.build_container_connection + .as_ref() + .ok_or_else(|| "No build container connection. Call init_build_container first.".to_string())?; + + let dir_strings: Vec = directories + .iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect(); + + // Send persist request to ws-mcp + let request = serde_json::json!({ + "jsonrpc": "2.0", + "method": "spider/persist", + "params": { + "directories": dir_strings + }, + "id": format!("persist_{}", conn.channel_id) + }); + + hyperware_process_lib::hyperapp::websocket::send_text( + conn.channel_id, + request.to_string(), + ) + .await + .map_err(|e| format!("Failed to send persist request: {:?}", e))?; + + // Wait for response with zipped directories + // Save them appropriately + + Ok(serde_json::json!({ + "content": [{ + "type": "text", + "text": format!("✅ Persisted {} directories successfully", dir_strings.len()) + }] + })) + } + + async fn handle_done_build_container(&mut self, parameters: &Value) -> Result { + let project_uuid = parameters + .get("project_uuid") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing project_uuid parameter".to_string())?; + + let metadata = parameters.get("metadata"); + + // Close WebSocket connection if exists + if let Some(conn) = &self.build_container_connection { + hyperware_process_lib::hyperapp::websocket::close(conn.channel_id, 1000, "Done".to_string()) + .await + .ok(); // Ignore errors on close + } + + // Get constructor URL from environment or use default + let constructor_url = std::env::var("SPIDER_CONSTRUCTOR_URL") + .unwrap_or_else(|_| "http://localhost:8081/done-build-container".to_string()); + + // Prepare request body + let mut request_body = serde_json::json!({ + "uuid": project_uuid + }); + + if let Some(meta) = metadata { + request_body["metadata"] = meta.clone(); + } + + // Make HTTP request to constructor + let response = hyperware_process_lib::hyperapp::http::send_request( + hyperware_process_lib::hyperapp::http::Method::POST, + constructor_url.parse().map_err(|e| format!("Invalid URL: {}", e))?, + None, + 5000, + request_body.to_string().into_bytes(), + ) + .await + .map_err(|e| format!("Failed to done build container: {:?}", e))?; + + if response.status().as_u16() >= 400 { + let error_text = String::from_utf8_lossy(response.body()); + return Err(format!("Constructor error (status {}): {}", response.status().as_u16(), error_text)); + } + + // Clear the build container connection + self.build_container_connection = None; + + // Update the build container server to show only init tool + if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == "build_container") { + let provider = BuildContainerToolProvider::new("build_container".to_string()); + server.tools = provider.get_tools(self); + } + + Ok(serde_json::json!({ + "content": [{ + "type": "text", + "text": format!("✅ Build container for project {} has been torn down successfully", project_uuid) + }] + })) + } } diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs b/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs new file mode 100644 index 000000000..b2a898e14 --- /dev/null +++ b/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs @@ -0,0 +1,88 @@ +use crate::tool_providers::ToolProvider; +use crate::types::{SpiderState, Tool}; +use serde_json::Value; + +pub struct BuildContainerToolProvider { + provider_id: String, +} + +impl BuildContainerToolProvider { + pub fn new(provider_id: String) -> Self { + Self { provider_id } + } + + fn create_init_build_container_tool(&self) -> Tool { + Tool { + name: "init_build_container".to_string(), + description: "Initialize a build container for remote compilation. Returns WebSocket URI and API key for authentication.".to_string(), + parameters: r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"project_name":{"type":"string","description":"Optional name of the project"},"initial_zip":{"type":"string","description":"Optional base64-encoded zipped directory to extract in $HOME"},"metadata":{"type":"object","description":"Additional metadata for the build container"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"project_name":{"type":"string","description":"Optional name of the project"},"initial_zip":{"type":"string","description":"Optional base64-encoded zipped directory to extract in $HOME"},"metadata":{"type":"object","description":"Additional metadata for the build container"}}}"#.to_string()), + } + } + + fn create_start_package_tool(&self) -> Tool { + Tool { + name: "start_package".to_string(), + description: "Deploy a built package to the Hyperware node. Package must be previously built with 'kit build'.".to_string(), + parameters: r#"{"type":"object","required":["package_dir"],"properties":{"package_dir":{"type":"string","description":"Path to the package directory containing the built pkg/ folder"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["package_dir"],"properties":{"package_dir":{"type":"string","description":"Path to the package directory containing the built pkg/ folder"}}}"#.to_string()), + } + } + + fn create_persist_tool(&self) -> Tool { + Tool { + name: "persist".to_string(), + description: "Persist directories from the build container by zipping and saving them.".to_string(), + parameters: r#"{"type":"object","required":["directories"],"properties":{"directories":{"type":"array","items":{"type":"string"},"description":"List of directory paths to persist"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["directories"],"properties":{"directories":{"type":"array","items":{"type":"string"},"description":"List of directory paths to persist"}}}"#.to_string()), + } + } + + fn create_done_build_container_tool(&self) -> Tool { + Tool { + name: "done_build_container".to_string(), + description: "Notify that work with the build container is complete and it can be torn down.".to_string(), + parameters: r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"metadata":{"type":"object","description":"Additional metadata about project completion"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"metadata":{"type":"object","description":"Additional metadata about project completion"}}}"#.to_string()), + } + } +} + +impl ToolProvider for BuildContainerToolProvider { + fn get_tools(&self, state: &SpiderState) -> Vec { + let mut tools = vec![self.create_init_build_container_tool()]; + + // Only show other tools if we have an active build container connection + if state.build_container_connection.is_some() { + tools.push(self.create_start_package_tool()); + tools.push(self.create_persist_tool()); + tools.push(self.create_done_build_container_tool()); + } + + tools + } + + fn should_include_tool(&self, tool_name: &str, state: &SpiderState) -> bool { + match tool_name { + "init_build_container" => true, + "start_package" | "persist" | "done_build_container" => { + state.build_container_connection.is_some() + } + _ => false, + } + } + + fn execute_tool( + &self, + _tool_name: &str, + _parameters: &Value, + _state: &mut SpiderState, + ) -> Result { + // Execution is handled by the main Spider implementation + Err("Tool execution should be handled by the main Spider implementation".to_string()) + } + + fn get_provider_id(&self) -> &str { + &self.provider_id + } +} diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs index 51a1ed305..4ff338e70 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs @@ -1,3 +1,4 @@ +pub mod build_container; pub mod hypergrid; use crate::types::{SpiderState, Tool}; diff --git a/hyperdrive/packages/spider/spider/src/types.rs b/hyperdrive/packages/spider/spider/src/types.rs index b8e507c1d..9b28a0243 100644 --- a/hyperdrive/packages/spider/spider/src/types.rs +++ b/hyperdrive/packages/spider/spider/src/types.rs @@ -34,6 +34,8 @@ pub struct SpiderState { pub show_trial_key_notification: bool, // Flag to show trial key notification popup #[serde(skip)] pub tool_provider_registry: ToolProviderRegistry, // Registry for modular tool providers + #[serde(skip)] + pub build_container_connection: Option, // Active build container connection } #[derive(Clone, Debug)] @@ -563,3 +565,14 @@ pub(crate) struct OAuthRefreshRequest { #[serde(rename = "refreshToken")] pub(crate) refresh_token: String, } + +// Build Container types +#[derive(Clone, Debug)] +pub(crate) struct BuildContainerConnection { + pub(crate) project_uuid: String, + pub(crate) ws_uri: String, + pub(crate) api_key: String, + pub(crate) channel_id: u32, + pub(crate) connected: bool, + pub(crate) tools: Vec, +} From fdeff550cabf02f1b8b7df3d84e1e50a7888aa40 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 14:53:32 +0000 Subject: [PATCH 07/70] Format Rust code using rustfmt --- hyperdrive/packages/spider/spider/src/lib.rs | 137 ++++++++++--------- 1 file changed, 75 insertions(+), 62 deletions(-) diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 99f74ae7c..8adcf7880 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -44,7 +44,9 @@ use utils::{ }; mod tool_providers; -use tool_providers::{hypergrid::HypergridToolProvider, build_container::BuildContainerToolProvider, ToolProvider}; +use tool_providers::{ + build_container::BuildContainerToolProvider, hypergrid::HypergridToolProvider, ToolProvider, +}; const ICON: &str = include_str!("./icon"); @@ -193,7 +195,8 @@ impl SpiderState { } // Register Build Container tool provider - let build_container_provider = BuildContainerToolProvider::new("build_container".to_string()); + let build_container_provider = + BuildContainerToolProvider::new("build_container".to_string()); // Get initial tools from the provider (just init_build_container initially) let build_container_tools = build_container_provider.get_tools(self); @@ -2211,18 +2214,10 @@ impl SpiderState { "build_container" => { // Handle build container tools match tool_name { - "init_build_container" => { - self.handle_init_build_container(parameters).await - } - "start_package" => { - self.handle_start_package(parameters).await - } - "persist" => { - self.handle_persist(parameters).await - } - "done_build_container" => { - self.handle_done_build_container(parameters).await - } + "init_build_container" => self.handle_init_build_container(parameters).await, + "start_package" => self.handle_start_package(parameters).await, + "persist" => self.handle_persist(parameters).await, + "done_build_container" => self.handle_done_build_container(parameters).await, _ => Err(format!("Unknown build container tool: {}", tool_name)), } } @@ -2389,13 +2384,9 @@ impl SpiderState { .and_then(|v| v.as_str()) .ok_or_else(|| "Missing project_uuid parameter".to_string())?; - let project_name = parameters - .get("project_name") - .and_then(|v| v.as_str()); + let project_name = parameters.get("project_name").and_then(|v| v.as_str()); - let initial_zip = parameters - .get("initial_zip") - .and_then(|v| v.as_str()); + let initial_zip = parameters.get("initial_zip").and_then(|v| v.as_str()); let metadata = parameters.get("metadata"); @@ -2423,7 +2414,9 @@ impl SpiderState { // Make HTTP request to constructor let response = hyperware_process_lib::hyperapp::http::send_request( hyperware_process_lib::hyperapp::http::Method::POST, - constructor_url.parse().map_err(|e| format!("Invalid URL: {}", e))?, + constructor_url + .parse() + .map_err(|e| format!("Invalid URL: {}", e))?, None, 5000, request_body.to_string().into_bytes(), @@ -2433,7 +2426,11 @@ impl SpiderState { if response.status().as_u16() >= 400 { let error_text = String::from_utf8_lossy(response.body()); - return Err(format!("Constructor error (status {}): {}", response.status().as_u16(), error_text)); + return Err(format!( + "Constructor error (status {}): {}", + response.status().as_u16(), + error_text + )); } // Parse response @@ -2467,7 +2464,11 @@ impl SpiderState { }); // Update the build container server to show all tools - if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == "build_container") { + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { let provider = BuildContainerToolProvider::new("build_container".to_string()); server.tools = provider.get_tools(self); } @@ -2480,7 +2481,11 @@ impl SpiderState { })) } - async fn connect_to_build_container_ws(&mut self, ws_uri: &str, api_key: &str) -> Result { + async fn connect_to_build_container_ws( + &mut self, + ws_uri: &str, + api_key: &str, + ) -> Result { // Allocate a channel ID let channel_id = self.next_channel_id; self.next_channel_id += 1; @@ -2508,12 +2513,9 @@ impl SpiderState { "id": format!("auth_{}", channel_id) }); - hyperware_process_lib::hyperapp::websocket::send_text( - channel_id, - auth_message.to_string(), - ) - .await - .map_err(|e| format!("Failed to send auth message: {:?}", e))?; + hyperware_process_lib::hyperapp::websocket::send_text(channel_id, auth_message.to_string()) + .await + .map_err(|e| format!("Failed to send auth message: {:?}", e))?; // Wait for auth response (simplified - in production would handle properly) let _ = hyperware_process_lib::hyperapp::sleep(500).await; @@ -2522,14 +2524,17 @@ impl SpiderState { let init_request = JsonRpcRequest { jsonrpc: "2.0".to_string(), method: "initialize".to_string(), - params: Some(serde_json::to_value(McpInitializeParams { - protocol_version: "2024-11-05".to_string(), - client_info: McpClientInfo { - name: "Spider".to_string(), - version: "1.0.0".to_string(), - }, - capabilities: McpCapabilities {}, - }).unwrap()), + params: Some( + serde_json::to_value(McpInitializeParams { + protocol_version: "2024-11-05".to_string(), + client_info: McpClientInfo { + name: "Spider".to_string(), + version: "1.0.0".to_string(), + }, + capabilities: McpCapabilities {}, + }) + .unwrap(), + ), id: format!("init_{}", channel_id), }; @@ -2549,9 +2554,9 @@ impl SpiderState { .and_then(|v| v.as_str()) .ok_or_else(|| "Missing package_dir parameter".to_string())?; - let conn = self.build_container_connection - .as_ref() - .ok_or_else(|| "No build container connection. Call init_build_container first.".to_string())?; + let conn = self.build_container_connection.as_ref().ok_or_else(|| { + "No build container connection. Call init_build_container first.".to_string() + })?; // Send start_package request to ws-mcp let request = serde_json::json!({ @@ -2563,12 +2568,9 @@ impl SpiderState { "id": format!("start_package_{}", conn.channel_id) }); - hyperware_process_lib::hyperapp::websocket::send_text( - conn.channel_id, - request.to_string(), - ) - .await - .map_err(|e| format!("Failed to send start_package request: {:?}", e))?; + hyperware_process_lib::hyperapp::websocket::send_text(conn.channel_id, request.to_string()) + .await + .map_err(|e| format!("Failed to send start_package request: {:?}", e))?; // Wait for response and handle the package deployment // This would receive the zipped package from ws-mcp and deploy it @@ -2588,9 +2590,9 @@ impl SpiderState { .and_then(|v| v.as_array()) .ok_or_else(|| "Missing directories parameter".to_string())?; - let conn = self.build_container_connection - .as_ref() - .ok_or_else(|| "No build container connection. Call init_build_container first.".to_string())?; + let conn = self.build_container_connection.as_ref().ok_or_else(|| { + "No build container connection. Call init_build_container first.".to_string() + })?; let dir_strings: Vec = directories .iter() @@ -2607,12 +2609,9 @@ impl SpiderState { "id": format!("persist_{}", conn.channel_id) }); - hyperware_process_lib::hyperapp::websocket::send_text( - conn.channel_id, - request.to_string(), - ) - .await - .map_err(|e| format!("Failed to send persist request: {:?}", e))?; + hyperware_process_lib::hyperapp::websocket::send_text(conn.channel_id, request.to_string()) + .await + .map_err(|e| format!("Failed to send persist request: {:?}", e))?; // Wait for response with zipped directories // Save them appropriately @@ -2635,9 +2634,13 @@ impl SpiderState { // Close WebSocket connection if exists if let Some(conn) = &self.build_container_connection { - hyperware_process_lib::hyperapp::websocket::close(conn.channel_id, 1000, "Done".to_string()) - .await - .ok(); // Ignore errors on close + hyperware_process_lib::hyperapp::websocket::close( + conn.channel_id, + 1000, + "Done".to_string(), + ) + .await + .ok(); // Ignore errors on close } // Get constructor URL from environment or use default @@ -2656,7 +2659,9 @@ impl SpiderState { // Make HTTP request to constructor let response = hyperware_process_lib::hyperapp::http::send_request( hyperware_process_lib::hyperapp::http::Method::POST, - constructor_url.parse().map_err(|e| format!("Invalid URL: {}", e))?, + constructor_url + .parse() + .map_err(|e| format!("Invalid URL: {}", e))?, None, 5000, request_body.to_string().into_bytes(), @@ -2666,14 +2671,22 @@ impl SpiderState { if response.status().as_u16() >= 400 { let error_text = String::from_utf8_lossy(response.body()); - return Err(format!("Constructor error (status {}): {}", response.status().as_u16(), error_text)); + return Err(format!( + "Constructor error (status {}): {}", + response.status().as_u16(), + error_text + )); } // Clear the build container connection self.build_container_connection = None; // Update the build container server to show only init tool - if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == "build_container") { + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { let provider = BuildContainerToolProvider::new("build_container".to_string()); server.tools = provider.get_tools(self); } From cf1eb30c63ff7ff9102839bcb94144e50b11eeb3 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 10 Sep 2025 10:45:14 -0700 Subject: [PATCH 08/70] remove accidental build artifact --- hyperdrive/packages/file-explorer/Cargo.toml | 1 - hyperdrive/packages/file-explorer/explorer/Cargo.toml | 3 --- 2 files changed, 4 deletions(-) diff --git a/hyperdrive/packages/file-explorer/Cargo.toml b/hyperdrive/packages/file-explorer/Cargo.toml index 4aa010ccf..299a6ba1d 100644 --- a/hyperdrive/packages/file-explorer/Cargo.toml +++ b/hyperdrive/packages/file-explorer/Cargo.toml @@ -6,6 +6,5 @@ panic = "abort" [workspace] members = [ "explorer", - "target/caller-utils", ] resolver = "2" diff --git a/hyperdrive/packages/file-explorer/explorer/Cargo.toml b/hyperdrive/packages/file-explorer/explorer/Cargo.toml index 0218351a2..3b36f1446 100644 --- a/hyperdrive/packages/file-explorer/explorer/Cargo.toml +++ b/hyperdrive/packages/file-explorer/explorer/Cargo.toml @@ -7,9 +7,6 @@ serde_urlencoded = "0.7" tracing = "0.1.37" wit-bindgen = "0.42.1" -[dependencies.caller-utils] -path = "../target/caller-utils" - [dependencies.hyperprocess_macro] git = "https://github.com/hyperware-ai/hyperprocess-macro" rev = "9836e2a" From 808ca6beede74d6bd203721694b7677013455790 Mon Sep 17 00:00:00 2001 From: Pax G Date: Thu, 11 Sep 2025 04:00:40 -0700 Subject: [PATCH 09/70] fixed computeProxyAddress --- .../app-store/ui/src/utils/predictTBA.ts | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts index 46d775277..e98866f84 100644 --- a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts +++ b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts @@ -1,4 +1,4 @@ -import { encodePacked, keccak256, getAddress } from 'viem'; +import { encodePacked, keccak256, getAddress, encodeAbiParameters } from 'viem'; import { hyperhash } from './hyperhash'; const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; @@ -55,24 +55,28 @@ function computeProxyAddress( salt: `0x${string}` ): `0x${string}` { // HyperAccountProxy creation code with constructor argument - const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea264697066735822122014ea9f00459f92ee24210cb4740336c93386a9e0bc81a68800b4e50d397e23ab64736f6c634300081c0033'; + const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea26469706673582212205c8437c90a52b26afb62a6e21b8baa0d106dcc547054521f0074dea229fd630f64736f6c634300081c0033'; - const proxyCreationCodeHash = keccak256( - encodePacked( - ['bytes', 'bytes'], - [ - PROXY_CREATION_CODE, - encodePacked(['address'], [hypermapAddr]) - ] - ) + const encodedInit = encodePacked( + ['bytes', 'bytes'], + [ + PROXY_CREATION_CODE, + encodeAbiParameters( + [{ type: 'address' }], + [getAddress(hypermapAddr)] + ) + ] + ); + + const creationHash = keccak256( + encodedInit ); const hash = keccak256( encodePacked( ['bytes1', 'address', 'bytes32', 'bytes32'], - ['0xff', deployer, salt, proxyCreationCodeHash] + ['0xff', deployer, salt, creationHash] ) ); - return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; } From dfd63f9b38e70a12b66ea163f1f3072de70f5254 Mon Sep 17 00:00:00 2001 From: Pax G Date: Thu, 11 Sep 2025 04:04:33 -0700 Subject: [PATCH 10/70] restored original var names --- .../app-store/ui/src/utils/predictTBA.ts | 26 +++++++++---------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts index e98866f84..6dc88ee6a 100644 --- a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts +++ b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts @@ -57,25 +57,23 @@ function computeProxyAddress( // HyperAccountProxy creation code with constructor argument const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea26469706673582212205c8437c90a52b26afb62a6e21b8baa0d106dcc547054521f0074dea229fd630f64736f6c634300081c0033'; - const encodedInit = encodePacked( - ['bytes', 'bytes'], - [ - PROXY_CREATION_CODE, - encodeAbiParameters( - [{ type: 'address' }], - [getAddress(hypermapAddr)] - ) - ] - ); - - const creationHash = keccak256( - encodedInit + const proxyCreationCodeHash = keccak256( + encodePacked( + ['bytes', 'bytes'], + [ + PROXY_CREATION_CODE, + encodeAbiParameters( + [{ type: 'address' }], + [getAddress(hypermapAddr)] + ) + ] + ) ); const hash = keccak256( encodePacked( ['bytes1', 'address', 'bytes32', 'bytes32'], - ['0xff', deployer, salt, creationHash] + ['0xff', deployer, salt, proxyCreationCodeHash] ) ); return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; From b6e57b36347bebf837ec24f0d49d246d823ebc6f Mon Sep 17 00:00:00 2001 From: Pax G Date: Thu, 11 Sep 2025 12:22:40 -0700 Subject: [PATCH 11/70] fixed predict tba address --- .../app-store/ui/src/utils/predictTBA.ts | 80 ++++++++++------- .../src/register-ui/src/utils/predictTBA.ts | 88 +++++++++++-------- 2 files changed, 99 insertions(+), 69 deletions(-) diff --git a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts index 6dc88ee6a..c849db538 100644 --- a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts +++ b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts @@ -1,59 +1,73 @@ -import { encodePacked, keccak256, getAddress, encodeAbiParameters } from 'viem'; +import { encodePacked, keccak256, getAddress, encodeAbiParameters, type Address, type Hex } from 'viem'; import { hyperhash } from './hyperhash'; const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; export function predictTBAAddress( - hypermapAddr: `0x${string}`, + hypermapAddr: Address, label: string, chainId: number = 8453 // Base chain ID -): `0x${string}` { +): Address { // Calculate the namehash for the label const namehash = hyperhash(label); // First compute the proxy address const proxyAddr = computeProxyAddress(hypermapAddr, hypermapAddr, namehash); + console.log("proxyAddr", proxyAddr); + return computeAccount(proxyAddr, namehash, BigInt(chainId), hypermapAddr, BigInt(namehash)); +} - // Implementation bytecode hash for ERC6551 v3 - const ACCOUNT_IMPLEMENTATION_BYTECODE_HASH = keccak256(encodePacked(['string'], ['erc6551:v3:account'])); +function computeAccount( + implementation: Address, + salt: bigint | `0x${string}`, + chainId: bigint, + tokenContract: Address, + tokenId: bigint +): Address { + // ERC-1167 minimal proxy bytecode components + const fullHeader = "0x3d60ad80600a3d3981f3363d3d373d3d3d363d73" as Hex; + const footer = "0x5af43d82803e903d91602b57fd5bf3" as Hex; - // Create the init code for ERC6551 account - const initCode = encodePacked( - ['bytes10', 'address', 'bytes32', 'uint256', 'address', 'uint256'], + const bytecode = encodePacked( + ["bytes", "address", "bytes"], + [fullHeader, implementation, footer] + ); + + // Encode the constructor arguments (salt, chainId, tokenContract, tokenId) + const constructorArgs = encodeAbiParameters( [ - '0x' + '00'.repeat(10), // 10 bytes of zeros for ERC6551 v3 - proxyAddr, // implementation (proxy) - namehash, // salt - BigInt(chainId), // chainId - hypermapAddr, // tokenContract - BigInt(namehash) // tokenId (using namehash as tokenId) - ] + { type: "bytes32" }, + { type: "uint256" }, + { type: "address" }, + { type: "uint256" }, + ], + [salt as `0x${string}`, chainId, tokenContract, tokenId] ); - // Compute init code hash for ERC6551 account - const initCodeHash = keccak256( - encodePacked( - ['bytes', 'bytes32'], - [initCode, ACCOUNT_IMPLEMENTATION_BYTECODE_HASH] - ) + // Combine bytecode with constructor arguments to match the exact memory layout + const initCode = encodePacked( + ["bytes", "bytes"], + [bytecode, constructorArgs] ); - // Compute the TBA address using CREATE2 - const hash = keccak256( + // CREATE2 formula + const create2Hash = keccak256( encodePacked( - ['bytes1', 'address', 'bytes32'], - ['0xff', ERC6551_REGISTRY, initCodeHash] + ["bytes1", "address", "bytes32", "bytes32"], + ["0xff" as Hex, ERC6551_REGISTRY, salt as `0x${string}`, keccak256(initCode)] ) ); - return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; + return getAddress(`0x${create2Hash.slice(-40)}`); + } + function computeProxyAddress( - deployer: `0x${string}`, - hypermapAddr: `0x${string}`, - salt: `0x${string}` -): `0x${string}` { + deployer: Address, + hypermapAddr: Address, + salt: string +): Address { // HyperAccountProxy creation code with constructor argument const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea26469706673582212205c8437c90a52b26afb62a6e21b8baa0d106dcc547054521f0074dea229fd630f64736f6c634300081c0033'; @@ -73,8 +87,8 @@ function computeProxyAddress( const hash = keccak256( encodePacked( ['bytes1', 'address', 'bytes32', 'bytes32'], - ['0xff', deployer, salt, proxyCreationCodeHash] + ['0xff', deployer, salt as `0x${string}`, proxyCreationCodeHash] ) ); - return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; -} + return getAddress(`0x${hash.slice(-40)}`) as Address; +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/utils/predictTBA.ts b/hyperdrive/src/register-ui/src/utils/predictTBA.ts index 46d775277..c849db538 100644 --- a/hyperdrive/src/register-ui/src/utils/predictTBA.ts +++ b/hyperdrive/src/register-ui/src/utils/predictTBA.ts @@ -1,68 +1,85 @@ -import { encodePacked, keccak256, getAddress } from 'viem'; +import { encodePacked, keccak256, getAddress, encodeAbiParameters, type Address, type Hex } from 'viem'; import { hyperhash } from './hyperhash'; const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; export function predictTBAAddress( - hypermapAddr: `0x${string}`, + hypermapAddr: Address, label: string, chainId: number = 8453 // Base chain ID -): `0x${string}` { +): Address { // Calculate the namehash for the label const namehash = hyperhash(label); // First compute the proxy address const proxyAddr = computeProxyAddress(hypermapAddr, hypermapAddr, namehash); + console.log("proxyAddr", proxyAddr); + return computeAccount(proxyAddr, namehash, BigInt(chainId), hypermapAddr, BigInt(namehash)); +} - // Implementation bytecode hash for ERC6551 v3 - const ACCOUNT_IMPLEMENTATION_BYTECODE_HASH = keccak256(encodePacked(['string'], ['erc6551:v3:account'])); +function computeAccount( + implementation: Address, + salt: bigint | `0x${string}`, + chainId: bigint, + tokenContract: Address, + tokenId: bigint +): Address { + // ERC-1167 minimal proxy bytecode components + const fullHeader = "0x3d60ad80600a3d3981f3363d3d373d3d3d363d73" as Hex; + const footer = "0x5af43d82803e903d91602b57fd5bf3" as Hex; - // Create the init code for ERC6551 account - const initCode = encodePacked( - ['bytes10', 'address', 'bytes32', 'uint256', 'address', 'uint256'], + const bytecode = encodePacked( + ["bytes", "address", "bytes"], + [fullHeader, implementation, footer] + ); + + // Encode the constructor arguments (salt, chainId, tokenContract, tokenId) + const constructorArgs = encodeAbiParameters( [ - '0x' + '00'.repeat(10), // 10 bytes of zeros for ERC6551 v3 - proxyAddr, // implementation (proxy) - namehash, // salt - BigInt(chainId), // chainId - hypermapAddr, // tokenContract - BigInt(namehash) // tokenId (using namehash as tokenId) - ] + { type: "bytes32" }, + { type: "uint256" }, + { type: "address" }, + { type: "uint256" }, + ], + [salt as `0x${string}`, chainId, tokenContract, tokenId] ); - // Compute init code hash for ERC6551 account - const initCodeHash = keccak256( - encodePacked( - ['bytes', 'bytes32'], - [initCode, ACCOUNT_IMPLEMENTATION_BYTECODE_HASH] - ) + // Combine bytecode with constructor arguments to match the exact memory layout + const initCode = encodePacked( + ["bytes", "bytes"], + [bytecode, constructorArgs] ); - // Compute the TBA address using CREATE2 - const hash = keccak256( + // CREATE2 formula + const create2Hash = keccak256( encodePacked( - ['bytes1', 'address', 'bytes32'], - ['0xff', ERC6551_REGISTRY, initCodeHash] + ["bytes1", "address", "bytes32", "bytes32"], + ["0xff" as Hex, ERC6551_REGISTRY, salt as `0x${string}`, keccak256(initCode)] ) ); - return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; + return getAddress(`0x${create2Hash.slice(-40)}`); + } + function computeProxyAddress( - deployer: `0x${string}`, - hypermapAddr: `0x${string}`, - salt: `0x${string}` -): `0x${string}` { + deployer: Address, + hypermapAddr: Address, + salt: string +): Address { // HyperAccountProxy creation code with constructor argument - const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea264697066735822122014ea9f00459f92ee24210cb4740336c93386a9e0bc81a68800b4e50d397e23ab64736f6c634300081c0033'; + const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea26469706673582212205c8437c90a52b26afb62a6e21b8baa0d106dcc547054521f0074dea229fd630f64736f6c634300081c0033'; const proxyCreationCodeHash = keccak256( encodePacked( ['bytes', 'bytes'], [ PROXY_CREATION_CODE, - encodePacked(['address'], [hypermapAddr]) + encodeAbiParameters( + [{ type: 'address' }], + [getAddress(hypermapAddr)] + ) ] ) ); @@ -70,9 +87,8 @@ function computeProxyAddress( const hash = keccak256( encodePacked( ['bytes1', 'address', 'bytes32', 'bytes32'], - ['0xff', deployer, salt, proxyCreationCodeHash] + ['0xff', deployer, salt as `0x${string}`, proxyCreationCodeHash] ) ); - - return getAddress(`0x${hash.slice(-40)}`) as `0x${string}`; -} + return getAddress(`0x${hash.slice(-40)}`) as Address; +} \ No newline at end of file From cbf77b7c4bd97c40ebf88d1046983269b68b5b4b Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 12 Sep 2025 23:32:56 -0700 Subject: [PATCH 12/70] spider: get phase 2 working well enough for demo: https://files.yael.solutions/250912/IMG_1395.mp4 --- hyperdrive/packages/spider/spider/Cargo.toml | 11 +- hyperdrive/packages/spider/spider/src/lib.rs | 1487 ++++++++++------- .../spider/spider/src/provider/anthropic.rs | 250 ++- .../spider/spider/src/provider/mod.rs | 4 +- .../src/tool_providers/build_container.rs | 1342 ++++++++++++++- .../spider/src/tool_providers/hypergrid.rs | 405 ++++- .../spider/spider/src/tool_providers/mod.rs | 61 +- .../packages/spider/spider/src/types.rs | 114 +- hyperdrive/packages/spider/ui/src/App.css | 8 + .../spider/ui/src/components/Chat.tsx | 59 +- .../spider/ui/src/components/Settings.tsx | 79 +- .../packages/spider/ui/src/store/spider.ts | 4 + .../packages/spider/ui/src/utils/api.ts | 2 + 13 files changed, 3067 insertions(+), 759 deletions(-) diff --git a/hyperdrive/packages/spider/spider/Cargo.toml b/hyperdrive/packages/spider/spider/Cargo.toml index c76145793..26b377352 100644 --- a/hyperdrive/packages/spider/spider/Cargo.toml +++ b/hyperdrive/packages/spider/spider/Cargo.toml @@ -15,21 +15,25 @@ version = "0.4" [dependencies.hyperprocess_macro] git = "https://github.com/hyperware-ai/hyperprocess-macro" -rev = "ed99c19" +rev = "66884c0" [dependencies.hyperware-anthropic-sdk] git = "https://github.com/hyperware-ai/hyperware-anthropic-sdk" -rev = "363630c" +rev = "c0cbd5e" [dependencies.hyperware_process_lib] features = ["hyperapp"] git = "https://github.com/hyperware-ai/process_lib" -rev = "753dac3" +rev = "4beff93" [dependencies.serde] features = ["derive"] version = "1.0" +[dependencies.spider_dev_caller_utils] +optional = true +path = "../target/spider-dev-caller-utils" + [dependencies.uuid] features = [ "v4", @@ -38,6 +42,7 @@ features = [ version = "1.4.1" [features] +caller-utils = ["spider_dev_caller_utils"] simulation-mode = [] [lib] diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 8adcf7880..27216616f 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -17,7 +17,7 @@ use hyperware_process_lib::{ our, println, Address, LazyLoadBlob, ProcessId, }; #[cfg(not(feature = "simulation-mode"))] -use spider_caller_utils::anthropic_api_key_manager::request_api_key_remote_rpc; +use spider_dev_caller_utils::anthropic_api_key_manager::request_api_key_remote_rpc; mod provider; use provider::create_llm_provider; @@ -26,15 +26,16 @@ mod types; use types::{ AddMcpServerRequest, ApiKey, ApiKeyInfo, ChatClient, ChatRequest, ChatResponse, ConfigResponse, ConnectMcpServerRequest, Conversation, ConversationMetadata, CreateSpiderKeyRequest, - DisconnectMcpServerRequest, GetConfigRequest, GetConversationRequest, HypergridConnection, - HypergridMessage, HypergridMessageType, JsonRpcNotification, JsonRpcRequest, - ListApiKeysRequest, ListConversationsRequest, ListMcpServersRequest, ListSpiderKeysRequest, - McpCapabilities, McpClientInfo, McpInitializeParams, McpRequestType, McpServer, - McpServerDetails, McpToolCallParams, McpToolInfo, Message, OAuthExchangeRequest, - OAuthRefreshRequest, OAuthTokenResponse, PendingMcpRequest, ProcessRequest, ProcessResponse, - RemoveApiKeyRequest, RemoveMcpServerRequest, RevokeSpiderKeyRequest, SetApiKeyRequest, - SpiderApiKey, SpiderState, Tool, ToolCall, ToolExecutionResult, ToolResult, TrialNotification, - UpdateConfigRequest, WsClientMessage, WsConnection, WsServerMessage, + DisconnectMcpServerRequest, ErrorResponse, GetConfigRequest, GetConversationRequest, + HypergridConnection, HypergridMessage, HypergridMessageType, JsonRpcNotification, + JsonRpcRequest, ListApiKeysRequest, ListConversationsRequest, ListMcpServersRequest, + ListSpiderKeysRequest, McpCapabilities, McpClientInfo, McpInitializeParams, McpRequestType, + McpServer, McpServerDetails, McpToolCallParams, McpToolInfo, Message, OAuthCodeExchangeRequest, + OAuthExchangeRequest, OAuthRefreshRequest, OAuthRefreshTokenRequest, OAuthTokenResponse, + PendingMcpRequest, ProcessRequest, ProcessResponse, RemoveApiKeyRequest, + RemoveMcpServerRequest, RevokeSpiderKeyRequest, SetApiKeyRequest, SpiderApiKey, SpiderState, + Tool, ToolCall, ToolExecutionResult, ToolResponseContent, ToolResponseContentItem, ToolResult, + TrialNotification, UpdateConfigRequest, WsClientMessage, WsConnection, WsServerMessage, }; mod utils; @@ -45,7 +46,9 @@ use utils::{ mod tool_providers; use tool_providers::{ - build_container::BuildContainerToolProvider, hypergrid::HypergridToolProvider, ToolProvider, + build_container::{BuildContainerExt, BuildContainerToolProvider}, + hypergrid::{HypergridExt, HypergridToolProvider}, + ToolProvider, }; const ICON: &str = include_str!("./icon"); @@ -80,7 +83,7 @@ const HYPERGRID: &str = "operator:hypergrid:ware.hypr"; } ], save_config = hyperware_process_lib::hyperapp::SaveOptions::OnDiff, - wit_world = "spider-dot-os-v0" + wit_world = "spider-sys-v0" )] impl SpiderState { #[init] @@ -88,31 +91,90 @@ impl SpiderState { add_to_homepage("Spider", Some(ICON), Some("/"), None); self.default_llm_provider = "anthropic".to_string(); - self.max_tokens = 4096; + self.max_tokens = 32_000; self.temperature = 1.0; + // Only set if empty (preserves existing value from deserialized state) self.next_channel_id = 1000; // Start channel IDs at 1000 let our_node = our().node.clone(); println!("Spider MCP client initialized on node: {}", our_node); - // Register Hypergrid tool provider if not already registered + // Register Build Container tool provider + let build_container_provider = BuildContainerToolProvider::new(); + + // Always register the provider (even if server exists) + self.tool_provider_registry + .register(Box::new(build_container_provider)); + + // Check if build container server exists + let has_build_container = self + .mcp_servers + .iter() + .any(|s| s.transport.transport_type == "build_container"); + + if !has_build_container { + // Create new build container server + let build_container_provider = BuildContainerToolProvider::new(); + let build_container_tools = build_container_provider.get_tools(self); + + let build_container_server = McpServer { + id: "build_container".to_string(), + name: "Build Container".to_string(), + transport: types::TransportConfig { + transport_type: "build_container".to_string(), + command: None, + args: None, + url: None, + hypergrid_token: None, + hypergrid_client_id: None, + hypergrid_node: None, + }, + tools: build_container_tools, + connected: true, // Always mark as connected + }; + + self.mcp_servers.push(build_container_server); + println!("Spider: Build Container MCP server initialized"); + } else { + // Server exists, refresh its tools from the provider + println!("Spider: Refreshing Build Container tools on startup"); + + // Get fresh tools from provider + let build_container_provider = BuildContainerToolProvider::new(); + let fresh_tools = build_container_provider.get_tools(self); + + // Update the existing server's tools + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { + server.tools = fresh_tools; + println!( + "Spider: Build Container tools refreshed with {} tools", + server.tools.len() + ); + } + } + + // Register Hypergrid tool provider + let hypergrid_provider = HypergridToolProvider::new("hypergrid_default".to_string()); + + // Always register the provider (even if server exists) + self.tool_provider_registry + .register(Box::new(hypergrid_provider)); + + // Check if hypergrid server exists let has_hypergrid = self .mcp_servers .iter() .any(|s| s.transport.transport_type == "hypergrid"); - // Only create the hypergrid MCP server if none exists if !has_hypergrid { - // Register the Hypergrid tool provider + // Create new hypergrid server let hypergrid_provider = HypergridToolProvider::new("hypergrid_default".to_string()); - - // Get ALL tools from the provider (not filtered) let hypergrid_tools = hypergrid_provider.get_tools(self); - // Register the provider for later use - self.tool_provider_registry - .register(Box::new(hypergrid_provider)); - let hypergrid_server = McpServer { id: "hypergrid_default".to_string(), name: "Hypergrid".to_string(), @@ -120,9 +182,7 @@ impl SpiderState { transport_type: "hypergrid".to_string(), command: None, args: None, - url: Some( - "http://localhost:8080/operator:hypergrid:ware.hypr/shim/mcp".to_string(), - ), + url: Some(format!("http://localhost:8080/{HYPERGRID}/shim/mcp")), hypergrid_token: None, hypergrid_client_id: None, hypergrid_node: None, @@ -134,7 +194,24 @@ impl SpiderState { self.mcp_servers.push(hypergrid_server); println!("Spider: Hypergrid MCP server initialized (unconfigured)"); } else { - println!("Spider: Hypergrid MCP server already exists, skipping initialization"); + println!("Spider: Refreshing Hypergrid tools on startup"); + + // Get fresh tools from provider + let hypergrid_provider = HypergridToolProvider::new("hypergrid_default".to_string()); + let fresh_tools = hypergrid_provider.get_tools(self); + + // Update the existing server's tools + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "hypergrid_default") + { + server.tools = fresh_tools; + println!( + "Spider: Hypergrid tools refreshed with {} tools", + server.tools.len() + ); + } // Restore hypergrid connections for configured servers for server in self.mcp_servers.iter() { @@ -194,37 +271,6 @@ impl SpiderState { } } - // Register Build Container tool provider - let build_container_provider = - BuildContainerToolProvider::new("build_container".to_string()); - - // Get initial tools from the provider (just init_build_container initially) - let build_container_tools = build_container_provider.get_tools(self); - - // Register the provider - self.tool_provider_registry - .register(Box::new(build_container_provider)); - - // Create the build container MCP server - let build_container_server = McpServer { - id: "build_container".to_string(), - name: "Build Container".to_string(), - transport: types::TransportConfig { - transport_type: "build_container".to_string(), - command: None, - args: None, - url: None, - hypergrid_token: None, - hypergrid_client_id: None, - hypergrid_node: None, - }, - tools: build_container_tools, - connected: true, - }; - - self.mcp_servers.push(build_container_server); - println!("Spider: Build Container MCP server initialized"); - // Create an admin Spider key for the GUI with a random suffix for security // Check if admin key already exists (look for keys with admin permission and the GUI name) let existing_admin_key = self @@ -266,7 +312,7 @@ impl SpiderState { println!("Auto-reconnecting to MCP server: {}", server_id); // Retry logic with exponential backoff - let max_retries = 3; + let max_retries = 10; let mut retry_delay_ms = 1000u64; // Start with 1 second let mut success = false; @@ -394,6 +440,9 @@ impl SpiderState { }, ); + // Clean up disconnected Build Container MCP connections + self.cleanup_disconnected_build_containers(); + // Send auth success response let response = WsServerMessage::AuthSuccess { message: "Authenticated successfully".to_string(), @@ -579,6 +628,10 @@ impl SpiderState { // Parse the message as JSON let message_str = String::from_utf8(message_bytes).unwrap_or_default(); + println!( + "Spider: Received WebSocket message on channel {}: {}", + channel_id, message_str + ); if let Ok(json_msg) = serde_json::from_str::(&message_str) { self.handle_mcp_message(channel_id, json_msg); } else { @@ -604,6 +657,10 @@ impl SpiderState { server.connected = false; println!("Spider: MCP server {} disconnected", server.name); } + + // Also remove any ws_mcp server that was created for this connection + let ws_mcp_server_id = format!("ws_mcp_{}", channel_id); + self.mcp_servers.retain(|s| s.id != ws_mcp_server_id); } // Clean up any pending requests for this connection @@ -1092,6 +1149,8 @@ impl SpiderState { default_llm_provider: self.default_llm_provider.clone(), max_tokens: self.max_tokens, temperature: self.temperature, + build_container_ws_uri: self.build_container_ws_uri.clone(), + build_container_api_key: self.build_container_api_key.clone(), }) } @@ -1114,6 +1173,46 @@ impl SpiderState { self.temperature = temp; } + // Track if build container settings changed + let mut build_container_changed = false; + + if let Some(uri) = request.build_container_ws_uri { + if self.build_container_ws_uri != uri { + self.build_container_ws_uri = uri; + build_container_changed = true; + } + } + + if let Some(key) = request.build_container_api_key { + if self.build_container_api_key != key { + self.build_container_api_key = key; + build_container_changed = true; + } + } + + // If build container settings changed, update the tools list + if build_container_changed { + // Try multiple tool names since the provider has tools with hyphens + let provider = self + .tool_provider_registry + .find_provider_for_tool("init-build-container", self) + .or_else(|| { + self.tool_provider_registry + .find_provider_for_tool("load-project", self) + }); + + if let Some(provider) = provider { + let updated_tools = provider.get_tools(self); + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { + server.tools = updated_tools; + } + } + } + Ok("Configuration updated".to_string()) } @@ -1205,14 +1304,14 @@ impl SpiderState { let state = parts.get(1).unwrap_or(&"").to_string(); // Prepare the request body - let body = serde_json::json!({ - "code": code, - "state": state, - "grant_type": "authorization_code", - "client_id": "9d1c250a-e61b-44d9-88ed-5944d1962f5e", - "redirect_uri": "https://console.anthropic.com/oauth/code/callback", - "code_verifier": req.verifier - }); + let body = OAuthCodeExchangeRequest { + code, + state, + grant_type: "authorization_code".to_string(), + client_id: "9d1c250a-e61b-44d9-88ed-5944d1962f5e".to_string(), + redirect_uri: "https://console.anthropic.com/oauth/code/callback".to_string(), + code_verifier: req.verifier, + }; // Prepare headers let mut headers = std::collections::HashMap::new(); @@ -1222,7 +1321,9 @@ impl SpiderState { let url = url::Url::parse("https://console.anthropic.com/v1/oauth/token") .map_err(|e| format!("Invalid URL: {}", e))?; - let body_bytes = body.to_string().into_bytes(); + let body_bytes = serde_json::to_string(&body) + .map_err(|e| format!("Failed to serialize request: {}", e))? + .into_bytes(); let response = send_request_await_response(Method::POST, url, Some(headers), 30000, body_bytes) .await @@ -1258,11 +1359,11 @@ impl SpiderState { use hyperware_process_lib::http::Method; // Prepare the request body - let body = serde_json::json!({ - "grant_type": "refresh_token", - "refresh_token": req.refresh_token, - "client_id": "9d1c250a-e61b-44d9-88ed-5944d1962f5e" - }); + let body = OAuthRefreshTokenRequest { + grant_type: "refresh_token".to_string(), + refresh_token: req.refresh_token, + client_id: "9d1c250a-e61b-44d9-88ed-5944d1962f5e".to_string(), + }; // Prepare headers let mut headers = std::collections::HashMap::new(); @@ -1272,7 +1373,9 @@ impl SpiderState { let url = url::Url::parse("https://console.anthropic.com/v1/oauth/token") .map_err(|e| format!("Invalid URL: {}", e))?; - let body_bytes = body.to_string().into_bytes(); + let body_bytes = serde_json::to_string(&body) + .map_err(|e| format!("Failed to serialize request: {}", e))? + .into_bytes(); let response = send_request_await_response(Method::POST, url, Some(headers), 30000, body_bytes) .await @@ -1330,6 +1433,62 @@ impl SpiderState { .any(|k| k.key == key && k.permissions.contains(&permission.to_string())) } + fn cleanup_disconnected_build_containers(&mut self) { + // Find all ws_mcp_* servers that are disconnected + let disconnected_server_ids: Vec = self + .mcp_servers + .iter() + .filter(|s| { + // Only cleanup ws_mcp_* servers (Build Container connections) + s.id.starts_with("ws_mcp_") && !s.connected + }) + .map(|s| s.id.clone()) + .collect(); + + if !disconnected_server_ids.is_empty() { + println!( + "Spider: Cleaning up {} disconnected Build Container MCP connections", + disconnected_server_ids.len() + ); + + for server_id in disconnected_server_ids { + // Extract channel_id from server_id (format: "ws_mcp_{channel_id}") + if let Some(channel_str) = server_id.strip_prefix("ws_mcp_") { + if let Ok(old_channel_id) = channel_str.parse::() { + // Remove from ws_connections if it exists + if self.ws_connections.remove(&old_channel_id).is_some() { + println!( + "Spider: Removed ws_connection for channel {}", + old_channel_id + ); + } + + // Clean up any pending MCP requests for this server + let requests_to_remove: Vec = self + .pending_mcp_requests + .iter() + .filter(|(_, req)| req.server_id == server_id) + .map(|(id, _)| id.clone()) + .collect(); + + for req_id in requests_to_remove { + self.pending_mcp_requests.remove(&req_id); + self.tool_responses.remove(&req_id); + } + } + } + + // Remove the server from mcp_servers list + self.mcp_servers.retain(|s| s.id != server_id); + println!("Spider: Removed Build Container MCP server {}", server_id); + } + + println!("Spider: Build Container cleanup complete"); + } else { + println!("Spider: No disconnected Build Container MCP connections to clean up"); + } + } + // Streaming version of chat for WebSocket clients async fn process_chat_request_with_streaming( &mut self, @@ -1470,22 +1629,6 @@ impl SpiderState { } }; - // Collect available tools from connected MCP servers - let available_tools: Vec = if let Some(ref mcp_server_ids) = request.mcp_servers { - self.mcp_servers - .iter() - .filter(|s| s.connected && mcp_server_ids.contains(&s.id)) - .flat_map(|s| s.tools.clone()) - .collect() - } else { - // Use all connected servers if none specified - self.mcp_servers - .iter() - .filter(|s| s.connected) - .flat_map(|s| s.tools.clone()) - .collect() - }; - // Start the agentic loop - runs indefinitely until the agent stops making tool calls let mut working_messages = request.messages.clone(); let mut iteration_count = 0; @@ -1493,6 +1636,29 @@ impl SpiderState { let response = loop { iteration_count += 1; + // Collect available tools from connected MCP servers - refreshed each iteration + // This ensures newly available tools (e.g., after load-project) are immediately available + let available_tools: Vec = if let Some(ref mcp_server_ids) = request.mcp_servers { + self.mcp_servers + .iter() + .filter(|s| { + s.connected && ( + mcp_server_ids.contains(&s.id) || + // If build_container is selected, also include ws_mcp_* servers + (mcp_server_ids.contains(&"build_container".to_string()) && s.id.starts_with("ws_mcp_")) + ) + }) + .flat_map(|s| s.tools.clone()) + .collect() + } else { + // Use all connected servers if none specified + self.mcp_servers + .iter() + .filter(|s| s.connected) + .flat_map(|s| s.tools.clone()) + .collect() + }; + // Check for cancellation if let Some(ch_id) = channel_id { if let Some(cancel_flag) = self.active_chat_cancellation.get(&ch_id) { @@ -1562,12 +1728,20 @@ impl SpiderState { }; // Check if the response contains tool calls + println!("[DEBUG] LLM response received:"); + println!("[DEBUG] - content: {}", llm_response.content); + println!( + "[DEBUG] - has tool_calls_json: {}", + llm_response.tool_calls_json.is_some() + ); + if let Some(ref tool_calls_json) = llm_response.tool_calls_json { // The agent wants to use tools - execute them println!( "Spider: Iteration {} - Agent requested tool calls", iteration_count ); + println!("[DEBUG] Tool calls JSON: {}", tool_calls_json); // Send streaming update for tool calls if let Some(ch_id) = channel_id { @@ -1630,27 +1804,91 @@ impl SpiderState { // Continue the loop - the agent will decide what to do next continue; } else { - // No tool calls - the agent has decided to provide a final response - // Break the loop and return this response + // No tool calls - check if the agent is actually done println!( - "Spider: Iteration {} - Agent provided final response (no tool calls)", + "Spider: Iteration {} - No tool calls, checking if agent is done", iteration_count ); - // Send the final assistant message to the client - if let Some(ch_id) = channel_id { - let msg_update = WsServerMessage::Message { - message: llm_response.clone(), + // Check if response is just a "." - if so, continue immediately + let completion_status = if llm_response.content.trim() == "." { + println!("[DEBUG] Response is just '.', treating as continue"); + "continue".to_string() + } else if llm_provider == "anthropic" { + // Use the same API key that was used for the main request + use crate::provider::AnthropicProvider; + + // The api_key variable already contains the correct key for this conversation + let is_oauth = is_oauth_token(&api_key); + let anthropic_provider = AnthropicProvider::new(api_key.clone(), is_oauth); + + anthropic_provider + .check_tool_loop_completion(&llm_response.content) + .await + } else { + // For non-Anthropic providers, assume done + "done".to_string() + }; + + if completion_status == "continue" { + println!( + "[DEBUG] Agent indicated it wants to continue, sending continue message" + ); + + // Add the assistant's response to messages + working_messages.push(llm_response.clone()); + + // Send the assistant message to the client (but skip if it's just ".") + if let Some(ch_id) = channel_id { + if llm_response.content.trim() != "." { + let msg_update = WsServerMessage::Message { + message: llm_response.clone(), + }; + let json = serde_json::to_string(&msg_update).unwrap(); + send_ws_push( + ch_id, + WsMessageType::Text, + LazyLoadBlob::new(Some("application/json"), json), + ); + } + } + + // Add a continue message and loop + let continue_message = Message { + role: "user".to_string(), + content: "continue".to_string(), + tool_calls_json: None, + tool_results_json: None, + timestamp: Utc::now().timestamp() as u64, }; - let json = serde_json::to_string(&msg_update).unwrap(); - send_ws_push( - ch_id, - WsMessageType::Text, - LazyLoadBlob::new(Some("application/json"), json), + working_messages.push(continue_message); + + // Continue the loop + continue; + } else { + // Agent is done (or error/failed to parse) + println!( + "Spider: Iteration {} - Agent provided final response (completion check: {})", + iteration_count, completion_status ); - } - break llm_response; + // Send the final assistant message to the client (but skip if it's just ".") + if let Some(ch_id) = channel_id { + if llm_response.content.trim() != "." { + let msg_update = WsServerMessage::Message { + message: llm_response.clone(), + }; + let json = serde_json::to_string(&msg_update).unwrap(); + send_ws_push( + ch_id, + WsMessageType::Text, + LazyLoadBlob::new(Some("application/json"), json), + ); + } + } + + break llm_response; + } } }; @@ -1721,6 +1959,11 @@ impl SpiderState { } fn handle_mcp_message(&mut self, channel_id: u32, message: Value) { + println!( + "Spider: handle_mcp_message received on channel {}: {:?}", + channel_id, message + ); + // Find the connection for this channel let conn = match self.ws_connections.get(&channel_id) { Some(c) => c.clone(), @@ -1735,7 +1978,37 @@ impl SpiderState { // Check if this is a response to a pending request if let Some(id) = message.get("id").and_then(|v| v.as_str()) { + println!("Spider: Message has id: {}", id); + + // Check if this is a spider/* method response (not in pending_mcp_requests) + // These are direct responses to spider/* methods like load-project, auth, etc. + if id.starts_with("load-project-") + || id.starts_with("start-package-") + || id.starts_with("persist") + || id.starts_with("auth_") + { + println!("Spider: Handling spider/* method response with id: {}", id); + // Store the response for the waiting execute_*_impl method + let result = if let Some(result_value) = message.get("result") { + result_value.clone() + } else if let Some(error) = message.get("error") { + serde_json::to_value(ErrorResponse { + error: error.clone(), + }) + .unwrap_or_else(|_| Value::Null) + } else { + serde_json::to_value(ErrorResponse { + error: Value::String("Invalid response format".to_string()), + }) + .unwrap_or_else(|_| Value::Null) + }; + self.tool_responses.insert(id.to_string(), result); + println!("Spider: Stored response for id {} in tool_responses", id); + return; + } + if let Some(pending) = self.pending_mcp_requests.remove(id) { + println!("Spider: Found pending request for id: {}", id); match pending.request_type { McpRequestType::Initialize => { self.handle_initialize_response(channel_id, &conn, &message); @@ -1747,6 +2020,8 @@ impl SpiderState { self.handle_tool_call_response(&pending, &message); } } + } else { + println!("Spider: No pending request found for id: {}", id); } } @@ -1878,10 +2153,88 @@ impl SpiderState { ws_conn.tools = tools.clone(); } - // Update server with tools and mark as connected - if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == conn.server_id) { - server.tools = tools; - server.connected = true; + // For build container connections, we need special handling + if conn.server_id == "build_container_self_hosted" + || conn.server_id.starts_with("build_container_") + { + // Create or update a separate ws-mcp server entry for the remote tools + let ws_mcp_server_id = format!("ws_mcp_{}", channel_id); + + // Check if this ws-mcp server already exists + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == ws_mcp_server_id) + { + server.tools = tools; + server.connected = true; + println!( + "Spider: Updated ws-mcp server {} with {} tools", + ws_mcp_server_id, + server.tools.len() + ); + } else { + // Create a new MCP server entry for ws-mcp tools + let ws_mcp_server = McpServer { + id: ws_mcp_server_id.clone(), + name: "Build Container MCP".to_string(), + transport: crate::types::TransportConfig { + transport_type: "websocket".to_string(), + command: None, + args: None, + url: Some(self.build_container_ws_uri.clone()), + hypergrid_token: None, + hypergrid_client_id: None, + hypergrid_node: None, + }, + tools, + connected: true, + }; + self.mcp_servers.push(ws_mcp_server); + println!( + "Spider: Created new ws-mcp server {} with {} tools", + ws_mcp_server_id, tool_count + ); + } + + // Make sure the build_container server retains its native tools + // by refreshing them from the tool provider + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool("load-project", self) + { + let native_tools = provider.get_tools(self); + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { + server.tools = native_tools; + server.connected = true; + println!( + "Spider: Refreshed build_container server with {} native tools", + server.tools.len() + ); + } + } + } else { + // For non-build-container connections, update normally + if let Some(server) = + self.mcp_servers.iter_mut().find(|s| s.id == conn.server_id) + { + server.tools = tools; + server.connected = true; + println!( + "Spider: Updated MCP server {} with {} tools", + conn.server_id, + server.tools.len() + ); + } else { + println!( + "Spider: Warning - could not find MCP server with id {}", + conn.server_id + ); + } } } } else if let Some(error) = message.get("error") { @@ -1902,13 +2255,15 @@ impl SpiderState { let result = if let Some(result_value) = message.get("result") { result_value.clone() } else if let Some(error) = message.get("error") { - serde_json::json!({ - "error": error + serde_json::to_value(ErrorResponse { + error: error.clone(), }) + .unwrap_or_else(|_| Value::Null) } else { - serde_json::json!({ - "error": "Invalid MCP response format" + serde_json::to_value(ErrorResponse { + error: Value::String("Invalid MCP response format".to_string()), }) + .unwrap_or_else(|_| Value::Null) }; self.tool_responses @@ -1922,6 +2277,113 @@ impl SpiderState { parameters: &Value, conversation_id: Option, ) -> Result { + println!( + "[DEBUG] execute_mcp_tool called with server_id: {}, tool_name: {}", + server_id, tool_name + ); + println!("[DEBUG] parameters: {}", parameters); + println!( + "Spider: Available MCP servers: {:?}", + self.mcp_servers + .iter() + .map(|s| (&s.id, s.connected)) + .collect::>() + ); + + // Special handling for ws_mcp servers (build container WebSocket connections) + if server_id.starts_with("ws_mcp_") { + // Extract channel_id from server_id (format: "ws_mcp_{channel_id}") + let channel_id = server_id + .strip_prefix("ws_mcp_") + .and_then(|s| s.parse::().ok()) + .ok_or_else(|| format!("Invalid ws_mcp server id: {}", server_id))?; + + println!( + "Spider: Looking for WebSocket connection with channel_id {} for server {}", + channel_id, server_id + ); + println!( + "Spider: Available ws_connections: {:?}", + self.ws_connections.keys().collect::>() + ); + + // Verify the connection exists + if !self.ws_connections.contains_key(&channel_id) { + return Err(format!( + "No WebSocket connection found for server {}", + server_id + )); + } + + // Execute via WebSocket using MCP protocol + let request_id = format!("tool_{}_{}", channel_id, Uuid::new_v4()); + let tool_request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "tools/call".to_string(), + params: Some( + serde_json::to_value(McpToolCallParams { + name: tool_name.to_string(), + arguments: parameters.clone(), + }) + .unwrap(), + ), + id: request_id.clone(), + }; + + // Store pending request + self.pending_mcp_requests.insert( + request_id.clone(), + PendingMcpRequest { + request_id: request_id.clone(), + conversation_id, + server_id: server_id.to_string(), + request_type: McpRequestType::ToolCall { + tool_name: tool_name.to_string(), + }, + }, + ); + + // Send the request + let request_json = serde_json::to_string(&tool_request).unwrap(); + let blob = LazyLoadBlob::new(Some("application/json"), request_json.into_bytes()); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Wait for response + let start = std::time::Instant::now(); + let timeout = std::time::Duration::from_secs(30); + + loop { + if start.elapsed() > timeout { + self.pending_mcp_requests.remove(&request_id); + return Err(format!("Tool call timed out: {}", tool_name)); + } + + if let Some(result) = self.tool_responses.remove(&request_id) { + // Parse the MCP result format + if let Some(content) = result.get("content") { + return Ok(serde_json::to_value(ToolExecutionResult { + result: content.clone(), + success: true, + }) + .unwrap()); + } else if let Some(error) = result.get("error") { + return Err(format!("Tool execution failed: {}", error)); + } else { + // Fallback: return the raw result wrapped in ToolExecutionResult + return Ok(serde_json::to_value(ToolExecutionResult { + result: result, + success: true, + }) + .unwrap()); + } + } + + // Sleep briefly before checking again + let _ = hyperware_process_lib::hyperapp::sleep(100).await; + } + } + + // Regular MCP server handling let server = self .mcp_servers .iter() @@ -1938,179 +2400,221 @@ impl SpiderState { // Execute the tool based on transport type match server.transport.transport_type.as_str() { "hypergrid" => { - // Map old tool names to new ones for backward compatibility - let normalized_tool_name = match tool_name { - "authorize" => "hypergrid_authorize", - "search-registry" => "hypergrid_search", - "call-provider" => "hypergrid_call", - name => name, - }; - - // Handle the different hypergrid tools - match normalized_tool_name { - "hypergrid_authorize" => { - println!( - "Spider: hypergrid_authorize called for server_id: {}", - server_id - ); - println!(" Parameters received: {:?}", parameters); - - // Update hypergrid credentials - let new_url = parameters - .get("url") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing url parameter".to_string())?; - let new_token = parameters - .get("token") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing token parameter".to_string())?; - let new_client_id = parameters - .get("client_id") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing client_id parameter".to_string())?; - let new_node = parameters - .get("node") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing node parameter".to_string())?; - - println!("Spider: Authorizing hypergrid with:"); - println!(" - URL: {}", new_url); - println!(" - Token: {}...", &new_token[..new_token.len().min(20)]); - println!(" - Client ID: {}", new_client_id); - println!(" - Node: {}", new_node); - - // Test new connection - println!("Spider: Testing hypergrid connection..."); - self.test_hypergrid_connection(new_url, new_token, new_client_id) - .await?; - println!("Spider: Connection test successful!"); - - // Create or update the hypergrid connection - let hypergrid_conn = HypergridConnection { - server_id: server_id.to_string(), - url: new_url.to_string(), - token: new_token.to_string(), - client_id: new_client_id.to_string(), - node: new_node.to_string(), - last_retry: Instant::now(), - retry_count: 0, - connected: true, - }; - - self.hypergrid_connections - .insert(server_id.to_string(), hypergrid_conn); - println!("Spider: Stored hypergrid connection in memory"); + // Use the hypergrid tool provider + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool(tool_name, self) + { + let command = provider.prepare_execution(tool_name, parameters, self)?; + self.execute_tool_command(command, conversation_id).await + } else { + // Map old tool names to new ones for backward compatibility + let normalized_tool_name = match tool_name { + "authorize" => "hypergrid_authorize", + "search-registry" => "hypergrid_search", + "call-provider" => "hypergrid_call", + name => name, + }; - // Update transport config - if let Some(server) = - self.mcp_servers.iter_mut().find(|s| s.id == server_id) - { - println!("Spider: Updating server '{}' transport config", server.name); - server.transport.url = Some(new_url.to_string()); - server.transport.hypergrid_token = Some(new_token.to_string()); - server.transport.hypergrid_client_id = Some(new_client_id.to_string()); - server.transport.hypergrid_node = Some(new_node.to_string()); - println!("Spider: Server transport config updated successfully"); - println!("Spider: State should auto-save due to SaveOptions::OnDiff"); - } else { - println!( - "Spider: WARNING - Could not find server with id: {}", - server_id - ); - } + // Try with normalized name + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool(normalized_tool_name, self) + { + let command = + provider.prepare_execution(normalized_tool_name, parameters, self)?; + self.execute_tool_command(command, conversation_id).await + } else { + // Fall back to old implementation for backward compatibility + match normalized_tool_name { + "hypergrid_authorize" => { + println!( + "Spider: hypergrid_authorize called for server_id: {}", + server_id + ); + println!(" Parameters received: {:?}", parameters); + + // Update hypergrid credentials + let new_url = parameters + .get("url") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing url parameter".to_string())?; + let new_token = parameters + .get("token") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing token parameter".to_string())?; + let new_client_id = parameters + .get("client_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing client_id parameter".to_string())?; + let new_node = parameters + .get("node") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing node parameter".to_string())?; + + println!("Spider: Authorizing hypergrid with:"); + println!(" - URL: {}", new_url); + println!(" - Token: {}...", &new_token[..new_token.len().min(20)]); + println!(" - Client ID: {}", new_client_id); + println!(" - Node: {}", new_node); + + // Test new connection + println!("Spider: Testing hypergrid connection..."); + self.test_hypergrid_connection(new_url, new_token, new_client_id) + .await?; + println!("Spider: Connection test successful!"); + + // Create or update the hypergrid connection + let hypergrid_conn = HypergridConnection { + server_id: server_id.to_string(), + url: new_url.to_string(), + token: new_token.to_string(), + client_id: new_client_id.to_string(), + node: new_node.to_string(), + last_retry: Instant::now(), + retry_count: 0, + connected: true, + }; + + self.hypergrid_connections + .insert(server_id.to_string(), hypergrid_conn); + println!("Spider: Stored hypergrid connection in memory"); + + // Update transport config + if let Some(server) = + self.mcp_servers.iter_mut().find(|s| s.id == server_id) + { + println!( + "Spider: Updating server '{}' transport config", + server.name + ); + server.transport.url = Some(new_url.to_string()); + server.transport.hypergrid_token = Some(new_token.to_string()); + server.transport.hypergrid_client_id = + Some(new_client_id.to_string()); + server.transport.hypergrid_node = Some(new_node.to_string()); + println!( + "Spider: Server transport config updated successfully" + ); + println!( + "Spider: State should auto-save due to SaveOptions::OnDiff" + ); + } else { + println!( + "Spider: WARNING - Could not find server with id: {}", + server_id + ); + } - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": format!("✅ Successfully authorized! Hypergrid is now configured with:\n- Node: {}\n- Client ID: {}\n- URL: {}", new_node, new_client_id, new_url) - }] - })) - } - "hypergrid_search" => { - // Check if configured - let hypergrid_conn = self.hypergrid_connections.get(server_id) + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!("✅ Successfully authorized! Hypergrid is now configured with:\n- Node: {}\n- Client ID: {}\n- URL: {}", new_node, new_client_id, new_url), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + "hypergrid_search" => { + // Check if configured + let hypergrid_conn = self.hypergrid_connections.get(server_id) .ok_or_else(|| "Hypergrid not configured. Please use hypergrid_authorize first with your credentials.".to_string())?; - let query = parameters - .get("query") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing query parameter".to_string())?; - - let response = self - .call_hypergrid_api( - &hypergrid_conn.url, - &hypergrid_conn.token, - &hypergrid_conn.client_id, - &HypergridMessage { - request: HypergridMessageType::SearchRegistry( - query.to_string(), - ), - }, - ) - .await?; - - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": response - }] - })) - } - "hypergrid_call" => { - // Check if configured - let hypergrid_conn = self.hypergrid_connections.get(server_id) + let query = parameters + .get("query") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing query parameter".to_string())?; + + let response = self + .call_hypergrid_api( + &hypergrid_conn.url, + &hypergrid_conn.token, + &hypergrid_conn.client_id, + &HypergridMessage { + request: HypergridMessageType::SearchRegistry( + query.to_string(), + ), + }, + ) + .await?; + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: response, + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + "hypergrid_call" => { + // Check if configured + let hypergrid_conn = self.hypergrid_connections.get(server_id) .ok_or_else(|| "Hypergrid not configured. Please use hypergrid_authorize first with your credentials.".to_string())?; - let provider_id = parameters - .get("providerId") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing providerId parameter".to_string())?; - let provider_name = parameters - .get("providerName") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing providerName parameter".to_string())?; - // Support both "callArgs" (old) and "arguments" (new) parameter names - let call_args = parameters - .get("arguments") - .or_else(|| parameters.get("callArgs")) - .and_then(|v| v.as_array()) - .ok_or_else(|| "Missing arguments parameter".to_string())?; - - // Convert callArgs to Vec<(String, String)> - let mut arguments = Vec::new(); - for arg in call_args { - if let Some(pair) = arg.as_array() { - if pair.len() == 2 { - if let (Some(key), Some(val)) = - (pair[0].as_str(), pair[1].as_str()) - { - arguments.push((key.to_string(), val.to_string())); + let provider_id = parameters + .get("providerId") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing providerId parameter".to_string())?; + let provider_name = parameters + .get("providerName") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing providerName parameter".to_string())?; + // Support both "callArgs" (old) and "arguments" (new) parameter names + let call_args = parameters + .get("arguments") + .or_else(|| parameters.get("callArgs")) + .and_then(|v| v.as_array()) + .ok_or_else(|| "Missing arguments parameter".to_string())?; + + // Convert callArgs to Vec<(String, String)> + let mut arguments = Vec::new(); + for arg in call_args { + if let Some(pair) = arg.as_array() { + if pair.len() == 2 { + if let (Some(key), Some(val)) = + (pair[0].as_str(), pair[1].as_str()) + { + arguments.push((key.to_string(), val.to_string())); + } + } } } + + let response = self + .call_hypergrid_api( + &hypergrid_conn.url, + &hypergrid_conn.token, + &hypergrid_conn.client_id, + &HypergridMessage { + request: HypergridMessageType::CallProvider { + provider_id: provider_id.to_string(), + provider_name: provider_name.to_string(), + arguments, + }, + }, + ) + .await?; + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: response, + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) } + _ => Err(format!("Unknown hypergrid tool: {}", tool_name)), } - - let response = self - .call_hypergrid_api( - &hypergrid_conn.url, - &hypergrid_conn.token, - &hypergrid_conn.client_id, - &HypergridMessage { - request: HypergridMessageType::CallProvider { - provider_id: provider_id.to_string(), - provider_name: provider_name.to_string(), - arguments, - }, - }, - ) - .await?; - - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": response - }] - })) } - _ => Err(format!("Unknown hypergrid tool: {}", tool_name)), + } + } + "build_container" => { + // Native build container tools are handled by the tool provider + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool(tool_name, self) + { + let command = provider.prepare_execution(tool_name, parameters, self)?; + self.execute_tool_command(command, conversation_id).await + } else { + Err(format!("Unknown build container tool: {}", tool_name)) } } "stdio" | "websocket" => { @@ -2173,14 +2677,20 @@ impl SpiderState { if let Some(response) = self.tool_responses.remove(&request_id) { self.pending_mcp_requests.remove(&request_id); + println!("[DEBUG] Tool response received:"); + println!("[DEBUG] - response: {}", response); + // Parse the MCP result if let Some(content) = response.get("content") { - return Ok(serde_json::to_value(ToolExecutionResult { + let result = serde_json::to_value(ToolExecutionResult { result: content.clone(), success: true, }) - .unwrap()); + .unwrap(); + println!("[DEBUG] - returning content result: {}", result); + return Ok(result); } else { + println!("[DEBUG] - returning full response: {}", response); return Ok(response); } } @@ -2203,7 +2713,7 @@ impl SpiderState { // Execute via HTTP // This is a placeholder - actual implementation would make HTTP requests Ok(serde_json::to_value(ToolExecutionResult { - result: serde_json::json!(format!( + result: Value::String(format!( "HTTP execution of {} with params: {}", tool_name, parameters )), @@ -2211,16 +2721,6 @@ impl SpiderState { }) .unwrap()) } - "build_container" => { - // Handle build container tools - match tool_name { - "init_build_container" => self.handle_init_build_container(parameters).await, - "start_package" => self.handle_start_package(parameters).await, - "persist" => self.handle_persist(parameters).await, - "done_build_container" => self.handle_done_build_container(parameters).await, - _ => Err(format!("Unknown build container tool: {}", tool_name)), - } - } _ => Err(format!( "Unsupported transport type: {}", server.transport.transport_type @@ -2233,12 +2733,20 @@ impl SpiderState { tool_calls_json: &str, conversation_id: Option, ) -> Result, String> { + println!("[DEBUG] process_tool_calls called"); + println!("[DEBUG] tool_calls_json: {}", tool_calls_json); + let tool_calls: Vec = serde_json::from_str(tool_calls_json) .map_err(|e| format!("Failed to parse tool calls: {}", e))?; + println!("[DEBUG] Parsed {} tool calls", tool_calls.len()); let mut results = Vec::new(); for tool_call in tool_calls { + println!("[DEBUG] Processing tool call:"); + println!("[DEBUG] - id: {}", tool_call.id); + println!("[DEBUG] - tool_name: {}", tool_call.tool_name); + println!("[DEBUG] - parameters: {}", tool_call.parameters); // Find which MCP server has this tool and get its ID let server_id = self .mcp_servers @@ -2247,6 +2755,7 @@ impl SpiderState { .map(|s| s.id.clone()); let result = if let Some(server_id) = server_id { + println!("[DEBUG] Found tool in server: {}", server_id); let params: Value = serde_json::from_str(&tool_call.parameters) .unwrap_or(Value::Object(serde_json::Map::new())); match self @@ -2258,14 +2767,24 @@ impl SpiderState { ) .await { - Ok(res) => res.to_string(), - Err(e) => format!(r#"{{"error":"{}"}}"#, e), + Ok(res) => { + let result_str = res.to_string(); + println!("[DEBUG] Tool execution successful: {}", result_str); + result_str + } + Err(e) => { + let error_str = format!(r#"{{"error":"{}"}}"#, e); + println!("[DEBUG] Tool execution error: {}", error_str); + error_str + } } } else { - format!( + let error_str = format!( r#"{{"error":"Tool {} not found in any connected MCP server"}}"#, tool_call.tool_name - ) + ); + println!("[DEBUG] {}", error_str); + error_str }; results.push(ToolResult { @@ -2274,6 +2793,7 @@ impl SpiderState { }); } + println!("[DEBUG] Returning {} tool results", results.len()); Ok(results) } @@ -2377,325 +2897,74 @@ impl SpiderState { Ok(response_text) } - // Build Container Tool Handlers - async fn handle_init_build_container(&mut self, parameters: &Value) -> Result { - let project_uuid = parameters - .get("project_uuid") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing project_uuid parameter".to_string())?; - - let project_name = parameters.get("project_name").and_then(|v| v.as_str()); - - let initial_zip = parameters.get("initial_zip").and_then(|v| v.as_str()); - - let metadata = parameters.get("metadata"); - - // Get constructor URL from environment or use default - let constructor_url = std::env::var("SPIDER_CONSTRUCTOR_URL") - .unwrap_or_else(|_| "http://localhost:8081/init-build-container".to_string()); - - // Prepare request body - let mut request_body = serde_json::json!({ - "uuid": project_uuid - }); - - if let Some(name) = project_name { - request_body["name"] = serde_json::json!(name); - } - - if let Some(zip) = initial_zip { - request_body["initial_zip"] = serde_json::json!(zip); - } - - if let Some(meta) = metadata { - request_body["metadata"] = meta.clone(); - } - - // Make HTTP request to constructor - let response = hyperware_process_lib::hyperapp::http::send_request( - hyperware_process_lib::hyperapp::http::Method::POST, - constructor_url - .parse() - .map_err(|e| format!("Invalid URL: {}", e))?, - None, - 5000, - request_body.to_string().into_bytes(), - ) - .await - .map_err(|e| format!("Failed to init build container: {:?}", e))?; - - if response.status().as_u16() >= 400 { - let error_text = String::from_utf8_lossy(response.body()); - return Err(format!( - "Constructor error (status {}): {}", - response.status().as_u16(), - error_text - )); - } - - // Parse response - let response_text = String::from_utf8(response.body().to_vec()) - .map_err(|e| format!("Invalid UTF-8 response: {}", e))?; - - let response_json: Value = serde_json::from_str(&response_text) - .map_err(|e| format!("Failed to parse constructor response: {}", e))?; - - let ws_uri = response_json - .get("ws_uri") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing ws_uri in response".to_string())?; - - let api_key = response_json - .get("api_key") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing api_key in response".to_string())?; - - // Connect to the build container's ws-mcp server - let channel_id = self.connect_to_build_container_ws(ws_uri, api_key).await?; - - // Store the build container connection - self.build_container_connection = Some(BuildContainerConnection { - project_uuid: project_uuid.to_string(), - ws_uri: ws_uri.to_string(), - api_key: api_key.to_string(), - channel_id, - connected: true, - tools: Vec::new(), - }); - - // Update the build container server to show all tools - if let Some(server) = self - .mcp_servers - .iter_mut() - .find(|s| s.id == "build_container") - { - let provider = BuildContainerToolProvider::new("build_container".to_string()); - server.tools = provider.get_tools(self); - } - - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": format!("✅ Build container initialized successfully!\n- Project UUID: {}\n- WebSocket URI: {}\n- Container is ready for use", project_uuid, ws_uri) - }] - })) - } - - async fn connect_to_build_container_ws( + // Execute tool commands returned by tool providers + async fn execute_tool_command( &mut self, - ws_uri: &str, - api_key: &str, - ) -> Result { - // Allocate a channel ID - let channel_id = self.next_channel_id; - self.next_channel_id += 1; - - // Parse the WebSocket URI - let parsed_url = hyperware_process_lib::hyperapp::http::Url::parse(ws_uri) - .map_err(|e| format!("Invalid WebSocket URI: {}", e))?; - - // Connect to the WebSocket - hyperware_process_lib::hyperapp::websocket::connect( - channel_id, - parsed_url.clone(), - None, - Vec::new(), - ) - .await - .map_err(|e| format!("Failed to connect to build container WebSocket: {:?}", e))?; - - // Send authentication message - let auth_message = serde_json::json!({ - "method": "spider/authorization", - "params": { - "api_key": api_key - }, - "id": format!("auth_{}", channel_id) - }); - - hyperware_process_lib::hyperapp::websocket::send_text(channel_id, auth_message.to_string()) - .await - .map_err(|e| format!("Failed to send auth message: {:?}", e))?; - - // Wait for auth response (simplified - in production would handle properly) - let _ = hyperware_process_lib::hyperapp::sleep(500).await; - - // Initialize MCP connection - let init_request = JsonRpcRequest { - jsonrpc: "2.0".to_string(), - method: "initialize".to_string(), - params: Some( - serde_json::to_value(McpInitializeParams { - protocol_version: "2024-11-05".to_string(), - client_info: McpClientInfo { - name: "Spider".to_string(), - version: "1.0.0".to_string(), - }, - capabilities: McpCapabilities {}, - }) - .unwrap(), - ), - id: format!("init_{}", channel_id), - }; - - hyperware_process_lib::hyperapp::websocket::send_text( - channel_id, - serde_json::to_string(&init_request).unwrap(), - ) - .await - .map_err(|e| format!("Failed to send initialize request: {:?}", e))?; - - Ok(channel_id) - } - - async fn handle_start_package(&mut self, parameters: &Value) -> Result { - let package_dir = parameters - .get("package_dir") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing package_dir parameter".to_string())?; - - let conn = self.build_container_connection.as_ref().ok_or_else(|| { - "No build container connection. Call init_build_container first.".to_string() - })?; - - // Send start_package request to ws-mcp - let request = serde_json::json!({ - "jsonrpc": "2.0", - "method": "spider/start_package", - "params": { - "package_dir": package_dir - }, - "id": format!("start_package_{}", conn.channel_id) - }); - - hyperware_process_lib::hyperapp::websocket::send_text(conn.channel_id, request.to_string()) - .await - .map_err(|e| format!("Failed to send start_package request: {:?}", e))?; - - // Wait for response and handle the package deployment - // This would receive the zipped package from ws-mcp and deploy it - // Implementation would follow kit start-package logic - - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": format!("✅ Package deployed successfully from: {}", package_dir) - }] - })) - } - - async fn handle_persist(&mut self, parameters: &Value) -> Result { - let directories = parameters - .get("directories") - .and_then(|v| v.as_array()) - .ok_or_else(|| "Missing directories parameter".to_string())?; - - let conn = self.build_container_connection.as_ref().ok_or_else(|| { - "No build container connection. Call init_build_container first.".to_string() - })?; - - let dir_strings: Vec = directories - .iter() - .filter_map(|v| v.as_str().map(String::from)) - .collect(); - - // Send persist request to ws-mcp - let request = serde_json::json!({ - "jsonrpc": "2.0", - "method": "spider/persist", - "params": { - "directories": dir_strings - }, - "id": format!("persist_{}", conn.channel_id) - }); - - hyperware_process_lib::hyperapp::websocket::send_text(conn.channel_id, request.to_string()) - .await - .map_err(|e| format!("Failed to send persist request: {:?}", e))?; - - // Wait for response with zipped directories - // Save them appropriately - - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": format!("✅ Persisted {} directories successfully", dir_strings.len()) - }] - })) - } - - async fn handle_done_build_container(&mut self, parameters: &Value) -> Result { - let project_uuid = parameters - .get("project_uuid") - .and_then(|v| v.as_str()) - .ok_or_else(|| "Missing project_uuid parameter".to_string())?; - - let metadata = parameters.get("metadata"); - - // Close WebSocket connection if exists - if let Some(conn) = &self.build_container_connection { - hyperware_process_lib::hyperapp::websocket::close( - conn.channel_id, - 1000, - "Done".to_string(), - ) - .await - .ok(); // Ignore errors on close - } - - // Get constructor URL from environment or use default - let constructor_url = std::env::var("SPIDER_CONSTRUCTOR_URL") - .unwrap_or_else(|_| "http://localhost:8081/done-build-container".to_string()); - - // Prepare request body - let mut request_body = serde_json::json!({ - "uuid": project_uuid - }); - - if let Some(meta) = metadata { - request_body["metadata"] = meta.clone(); - } - - // Make HTTP request to constructor - let response = hyperware_process_lib::hyperapp::http::send_request( - hyperware_process_lib::hyperapp::http::Method::POST, - constructor_url - .parse() - .map_err(|e| format!("Invalid URL: {}", e))?, - None, - 5000, - request_body.to_string().into_bytes(), - ) - .await - .map_err(|e| format!("Failed to done build container: {:?}", e))?; - - if response.status().as_u16() >= 400 { - let error_text = String::from_utf8_lossy(response.body()); - return Err(format!( - "Constructor error (status {}): {}", - response.status().as_u16(), - error_text - )); - } - - // Clear the build container connection - self.build_container_connection = None; + command: tool_providers::ToolExecutionCommand, + _conversation_id: Option, + ) -> Result { + use tool_providers::ToolExecutionCommand; - // Update the build container server to show only init tool - if let Some(server) = self - .mcp_servers - .iter_mut() - .find(|s| s.id == "build_container") - { - let provider = BuildContainerToolProvider::new("build_container".to_string()); - server.tools = provider.get_tools(self); + match command { + ToolExecutionCommand::InitBuildContainer { metadata } => { + self.execute_init_build_container_impl(metadata).await + } + ToolExecutionCommand::LoadProject { + project_uuid, + name, + initial_zip, + channel_id, + } => { + self.execute_load_project_impl(project_uuid, name, initial_zip, channel_id) + .await + } + ToolExecutionCommand::StartPackage { + channel_id, + package_dir, + } => { + self.execute_start_package_impl(channel_id, package_dir) + .await + } + ToolExecutionCommand::Persist { + channel_id, + directories, + } => self.execute_persist_impl(channel_id, directories).await, + ToolExecutionCommand::GetProjects => { + // Return the project name to UUID mapping as JSON + Ok(serde_json::to_value(&self.project_name_to_uuids) + .map_err(|e| format!("Failed to serialize project mapping: {}", e))?) + } + ToolExecutionCommand::DoneBuildContainer { + metadata, + channel_id, + } => { + self.execute_done_build_container_impl(metadata, channel_id) + .await + } + ToolExecutionCommand::HypergridAuthorize { + server_id, + url, + token, + client_id, + node, + name, + } => { + self.execute_hypergrid_authorize_impl(server_id, url, token, client_id, node, name) + .await + } + ToolExecutionCommand::HypergridSearch { server_id, query } => { + self.execute_hypergrid_search_impl(server_id, query).await + } + ToolExecutionCommand::HypergridCall { + server_id, + provider_id, + provider_name, + call_args, + } => { + self.execute_hypergrid_call_impl(server_id, provider_id, provider_name, call_args) + .await + } + ToolExecutionCommand::DirectResult(result) => result, } - - Ok(serde_json::json!({ - "content": [{ - "type": "text", - "text": format!("✅ Build container for project {} has been torn down successfully", project_uuid) - }] - })) } } diff --git a/hyperdrive/packages/spider/spider/src/provider/anthropic.rs b/hyperdrive/packages/spider/spider/src/provider/anthropic.rs index adab2dbde..3ecdfc3bf 100644 --- a/hyperdrive/packages/spider/spider/src/provider/anthropic.rs +++ b/hyperdrive/packages/spider/spider/src/provider/anthropic.rs @@ -5,22 +5,86 @@ use chrono::Utc; use serde_json::Value; use hyperware_anthropic_sdk::{ - AnthropicClient, Content, CreateMessageRequest, Message as SdkMessage, ResponseContentBlock, - Role, Tool as SdkTool, ToolChoice, + AnthropicClient, CacheControl, Content, ContentBlock, CreateMessageRequest, + Message as SdkMessage, ResponseContentBlock, Role, SystemPromptBlock, Tool as SdkTool, + ToolChoice, }; +use hyperware_process_lib::println; + use crate::provider::LlmProvider; use crate::types::{Message, Tool, ToolCall, ToolResult}; -pub(crate) struct AnthropicProvider { +pub struct AnthropicProvider { api_key: String, is_oauth: bool, } impl AnthropicProvider { - pub(crate) fn new(api_key: String, is_oauth: bool) -> Self { + pub fn new(api_key: String, is_oauth: bool) -> Self { Self { api_key, is_oauth } } + + /// Check if the tool loop is actually done by asking Sonnet 4 + pub async fn check_tool_loop_completion(&self, agent_message: &str) -> String { + // Create a specific prompt to check if the agent is done + let prompt = format!( + r#"The following is a response from an LLM agent to serve a user request, possibly after a tool loop. Respond with `done` (and nothing else) if this message seems to imply the agent is finished replying; `continue` (and nothing else) if it seems to imply the agent is not yet done with serving the request; error and one-sentence explanation else. If the agent is asking for input from the user, you must reply `done`. +""" +{} +""""#, + agent_message + ); + + // Create a message to send to Sonnet 4 + let check_message = Message { + role: "user".to_string(), + content: prompt, + tool_calls_json: None, + tool_results_json: None, + timestamp: Utc::now().timestamp() as u64, + }; + + // Use Sonnet 4 specifically for this check + match self + .complete_with_retry( + &[check_message], + &[], + Some("claude-sonnet-4-20250514"), + 100, + 0.0, + ) + .await + { + Ok(response) => { + let response_text = response.content.trim().to_lowercase(); + + // Parse the response + if response_text == "done" { + "done".to_string() + } else if response_text == "continue" { + "continue".to_string() + } else if response_text.starts_with("error") { + println!( + "[DEBUG] Tool loop completion check error: {}", + response_text + ); + "done".to_string() // Behave like done on error + } else { + // Failed to parse - behave like done but log error + println!( + "[DEBUG] Failed to parse tool loop completion check response: {}", + response_text + ); + "done".to_string() + } + } + Err(e) => { + println!("[DEBUG] Error checking tool loop completion: {}", e); + "done".to_string() // Default to done on error + } + } + } } impl LlmProvider for AnthropicProvider { @@ -48,9 +112,11 @@ impl AnthropicProvider { // Transform MCP JSON Schema to Anthropic-compatible format fn transform_mcp_to_anthropic_schema(&self, mcp_schema: &Value) -> Value { // Start with basic structure - let mut anthropic_schema = serde_json::json!({ - "type": "object" - }); + let mut anthropic_schema = Value::Object(serde_json::Map::new()); + anthropic_schema + .as_object_mut() + .unwrap() + .insert("type".to_string(), Value::String("object".to_string())); if let Some(t) = mcp_schema.get("type") { anthropic_schema["type"] = t.clone(); @@ -242,10 +308,11 @@ impl AnthropicProvider { AnthropicClient::new(&self.api_key) }; - // Convert our Message format to SDK Message format + // Convert our Message format to SDK Message format with caching on the final message let mut sdk_messages = Vec::new(); + let messages_count = messages.len(); - for msg in messages { + for (index, msg) in messages.iter().enumerate() { let role = match msg.role.as_str() { "user" => Role::User, "assistant" => Role::Assistant, @@ -253,6 +320,9 @@ impl AnthropicProvider { _ => Role::User, }; + // Check if this is the final message + let is_final_message = index == messages_count - 1; + // Handle different message types let content = if let Some(tool_results_json) = &msg.tool_results_json { // Parse tool results and format them for the SDK @@ -267,77 +337,99 @@ impl AnthropicProvider { result.tool_call_id, result.result )); } - Content::Text(result_text) + + // Add cache control to final message + if is_final_message { + Content::Blocks(vec![ContentBlock::Text { + text: result_text, + cache_control: Some(CacheControl::ephemeral()), + }]) + } else { + Content::Text(result_text) + } } else if let Some(_tool_calls_json) = &msg.tool_calls_json { - // For now, include tool calls as text in the message - // The SDK will handle tool use blocks separately - Content::Text(format!("{}\n[Tool calls pending]", msg.content)) + // Add cache control to final message + if is_final_message { + Content::Blocks(vec![ContentBlock::Text { + text: msg.content.clone(), + cache_control: Some(CacheControl::ephemeral()), + }]) + } else { + Content::Text(msg.content.clone()) + } } else { - Content::Text(msg.content.clone()) + // Add cache control to final message + if is_final_message { + Content::Blocks(vec![ContentBlock::Text { + text: msg.content.clone(), + cache_control: Some(CacheControl::ephemeral()), + }]) + } else { + Content::Text(msg.content.clone()) + } }; sdk_messages.push(SdkMessage { role, content }); } - // Convert our Tool format to SDK Tool format - let sdk_tools: Vec = tools - .iter() - .map(|tool| { - // Parse the MCP schema from either inputSchema or parameters - let mcp_schema = if let Some(ref input_schema_json) = tool.input_schema_json { - serde_json::from_str::(input_schema_json) - .unwrap_or_else(|_| serde_json::json!({})) - } else { - serde_json::from_str::(&tool.parameters) - .unwrap_or_else(|_| serde_json::json!({})) - }; - - // Transform MCP schema to Anthropic-compatible format - let anthropic_schema = self.transform_mcp_to_anthropic_schema(&mcp_schema); - - // Debug: Log the transformed schema - println!( - "Spider: Tool {} transformed schema: {}", - tool.name, - serde_json::to_string_pretty(&anthropic_schema) - .unwrap_or_else(|_| "error".to_string()) - ); - - // Extract required fields from the transformed schema - let required = anthropic_schema - .get("required") - .and_then(|r| r.as_array()) - .map(|arr| { - arr.iter() - .filter_map(|v| v.as_str().map(String::from)) - .collect() - }) - .unwrap_or_else(Vec::new); - - SdkTool::new( - tool.name.clone(), - tool.description.clone(), - anthropic_schema["properties"].clone(), - required, - None, - //anthropic_schema.get("type").and_then(|v| v.as_str()).map(|s| s.to_string()), - ) - }) - .collect(); + // Convert our Tool format to SDK Tool format with caching on the last tool + let mut sdk_tools: Vec = Vec::new(); + let tools_count = tools.len(); + + for (index, tool) in tools.iter().enumerate() { + // Parse the MCP schema from either inputSchema or parameters + let mcp_schema = if let Some(ref input_schema_json) = tool.input_schema_json { + serde_json::from_str::(input_schema_json) + .unwrap_or_else(|_| Value::Object(serde_json::Map::new())) + } else { + serde_json::from_str::(&tool.parameters) + .unwrap_or_else(|_| Value::Object(serde_json::Map::new())) + }; + + // Transform MCP schema to Anthropic-compatible format + let anthropic_schema = self.transform_mcp_to_anthropic_schema(&mcp_schema); + + // Extract required fields from the transformed schema + let required = anthropic_schema + .get("required") + .and_then(|r| r.as_array()) + .map(|arr| { + arr.iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect() + }) + .unwrap_or_else(Vec::new); + + let mut sdk_tool = SdkTool::new( + tool.name.clone(), + tool.description.clone(), + anthropic_schema["properties"].clone(), + required, + None, + //anthropic_schema.get("type").and_then(|v| v.as_str()).map(|s| s.to_string()), + ); + + // Add cache control to the last tool to cache all tool definitions + if index == tools_count - 1 && tools_count > 0 { + sdk_tool = sdk_tool.with_cache_control(CacheControl::ephemeral()); + } + + sdk_tools.push(sdk_tool); + } // Create the request with the specified model or default let model_id = model.unwrap_or("claude-sonnet-4-20250514"); let mut request = CreateMessageRequest::new(model_id, sdk_messages, max_tokens) .with_temperature(temperature); - // Add system prompt for OAuth tokens + // Add system prompt for OAuth tokens with caching if self.is_oauth { - request = - request.with_system("You are Claude Code, Anthropic's official CLI for Claude."); + request = request.with_system_blocks(vec![SystemPromptBlock::text( + "You are Claude Code, Anthropic's official CLI for Claude.", + ) + .with_cache_control(CacheControl::ephemeral())]); } - println!("Tools: {sdk_tools:?}"); - // Add tools if any if !sdk_tools.is_empty() { request = request @@ -348,16 +440,21 @@ impl AnthropicProvider { } // Send the message using the SDK - let response = client - .send_message(request) - .await - .map_err(|e| format!("Failed to send message via SDK: {:?}", e))?; + println!("[DEBUG] Sending request to Anthropic API"); + //println!("[DEBUG] Request: {:?}", request); + let response = client.send_message(request).await.map_err(|e| { + println!("[DEBUG] ERROR: Failed to send message via SDK: {:?}", e); + format!("Failed to send message via SDK: {:?}", e) + })?; + + println!("[DEBUG] Received response from Anthropic API"); + println!("[DEBUG] Raw SDK response: {:?}", response); // Convert SDK response back to our Message format let mut content_text = String::new(); let mut tool_calls: Vec = Vec::new(); - for block in &response.content { + for block in response.content.iter() { match block { ResponseContentBlock::Text { text, .. } => { if !content_text.is_empty() { @@ -376,9 +473,16 @@ impl AnthropicProvider { } } - Ok(Message { + // Replace empty content with "." to avoid Anthropic API issues + let final_content = if content_text.is_empty() { + ".".to_string() + } else { + content_text.clone() + }; + + let final_message = Message { role: "assistant".to_string(), - content: content_text, + content: final_content, tool_calls_json: if tool_calls.is_empty() { None } else { @@ -386,6 +490,8 @@ impl AnthropicProvider { }, tool_results_json: None, timestamp: Utc::now().timestamp() as u64, - }) + }; + + Ok(final_message) } } diff --git a/hyperdrive/packages/spider/spider/src/provider/mod.rs b/hyperdrive/packages/spider/spider/src/provider/mod.rs index 4712fb64e..fba53996b 100644 --- a/hyperdrive/packages/spider/spider/src/provider/mod.rs +++ b/hyperdrive/packages/spider/spider/src/provider/mod.rs @@ -3,8 +3,8 @@ use std::pin::Pin; use crate::types::{Message, Tool}; -mod anthropic; -use anthropic::AnthropicProvider; +pub mod anthropic; +pub use anthropic::AnthropicProvider; pub(crate) trait LlmProvider { fn complete<'a>( diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs b/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs index b2a898e14..ac408dbbd 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/build_container.rs @@ -1,38 +1,68 @@ -use crate::tool_providers::ToolProvider; -use crate::types::{SpiderState, Tool}; +use crate::tool_providers::{ToolExecutionCommand, ToolProvider}; +use crate::types::{ + BuildContainerRequest, InitializeParams, JsonRpcRequest, LoadProjectParams, McpCapabilities, + McpClientInfo, McpRequestType, PendingMcpRequest, PersistParams, SpiderAuthParams, + SpiderAuthRequest, SpiderState, StartPackageParams, Tool, ToolResponseContent, + ToolResponseContentItem, WsConnection, +}; +use hyperware_process_lib::{ + http::{ + client::{open_ws_connection, send_ws_client_push}, + server::WsMessageType, + }, + hyperapp::sleep, + vfs::open_file, + LazyLoadBlob, Request, +}; use serde_json::Value; +use std::collections::HashMap; +use std::time::Duration; +use uuid::Uuid; pub struct BuildContainerToolProvider { provider_id: String, } +const CONSTRUCTOR_SERVER_URL: &str = "http://localhost:8090"; + impl BuildContainerToolProvider { - pub fn new(provider_id: String) -> Self { - Self { provider_id } + pub fn new() -> Self { + Self { + provider_id: "build_container".to_string(), + } } fn create_init_build_container_tool(&self) -> Tool { Tool { - name: "init_build_container".to_string(), - description: "Initialize a build container for remote compilation. Returns WebSocket URI and API key for authentication.".to_string(), - parameters: r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"project_name":{"type":"string","description":"Optional name of the project"},"initial_zip":{"type":"string","description":"Optional base64-encoded zipped directory to extract in $HOME"},"metadata":{"type":"object","description":"Additional metadata for the build container"}}}"#.to_string(), - input_schema_json: Some(r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"project_name":{"type":"string","description":"Optional name of the project"},"initial_zip":{"type":"string","description":"Optional base64-encoded zipped directory to extract in $HOME"},"metadata":{"type":"object","description":"Additional metadata for the build container"}}}"#.to_string()), + name: "init-build-container".to_string(), + description: "Initialize a new build container for remote compilation and development (hosted mode only)".to_string(), + parameters: r#"{"type":"object","properties":{"metadata":{"type":"object","description":"Optional metadata about the project (type, estimated duration, etc.)"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","properties":{"metadata":{"type":"object","description":"Optional metadata about the project (type, estimated duration, etc.)"}}}"#.to_string()), + } + } + + fn create_load_project_tool(&self) -> Tool { + Tool { + name: "load-project".to_string(), + description: "Load a project into the build container. Creates a directory at `~/` which should be used as the working directory for all subsequent file operations and development work. A project name is required - if the user doesn't specify one explicitly, create a descriptive name based on their input or the project context.".to_string(), + parameters: r#"{"type":"object","required":["name"],"properties":{"project_uuid":{"type":"string","description":"Optional unique identifier for the project"},"name":{"type":"string","description":"Required project name. If user doesn't specify, create a descriptive name based on their input or project context"},"initial_zip":{"type":"string","description":"Optional VFS path to a zip file to extract in container's $HOME/ directory (e.g., /spider:dev.hypr/projects//backup.zip)"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["name"],"properties":{"project_uuid":{"type":"string","description":"Optional unique identifier for the project"},"name":{"type":"string","description":"Required project name. If user doesn't specify, create a descriptive name based on their input or project context"},"initial_zip":{"type":"string","description":"Optional VFS path to a zip file to extract in container's $HOME/ directory (e.g., /spider:dev.hypr/projects//backup.zip)"}}}"#.to_string()), } } fn create_start_package_tool(&self) -> Tool { Tool { - name: "start_package".to_string(), - description: "Deploy a built package to the Hyperware node. Package must be previously built with 'kit build'.".to_string(), - parameters: r#"{"type":"object","required":["package_dir"],"properties":{"package_dir":{"type":"string","description":"Path to the package directory containing the built pkg/ folder"}}}"#.to_string(), - input_schema_json: Some(r#"{"type":"object","required":["package_dir"],"properties":{"package_dir":{"type":"string","description":"Path to the package directory containing the built pkg/ folder"}}}"#.to_string()), + name: "start-package".to_string(), + description: "Deploy a built package from the build container to the Hyperware node. A package is distinguishable by a pkg/ directory inside of it. Do not use this tool on the pkg/ directory, but the directory that contains the pkg/".to_string(), + parameters: r#"{"type":"object","required":["package_dir"],"properties":{"package_dir":{"type":"string","description":"Path to the package directory that was built with 'kit build'"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["package_dir"],"properties":{"package_dir":{"type":"string","description":"Path to the package directory that was built with 'kit build'"}}}"#.to_string()), } } fn create_persist_tool(&self) -> Tool { Tool { name: "persist".to_string(), - description: "Persist directories from the build container by zipping and saving them.".to_string(), + description: "Persist directories from the build container by creating a zip file".to_string(), parameters: r#"{"type":"object","required":["directories"],"properties":{"directories":{"type":"array","items":{"type":"string"},"description":"List of directory paths to persist"}}}"#.to_string(), input_schema_json: Some(r#"{"type":"object","required":["directories"],"properties":{"directories":{"type":"array","items":{"type":"string"},"description":"List of directory paths to persist"}}}"#.to_string()), } @@ -40,49 +70,1297 @@ impl BuildContainerToolProvider { fn create_done_build_container_tool(&self) -> Tool { Tool { - name: "done_build_container".to_string(), - description: "Notify that work with the build container is complete and it can be torn down.".to_string(), - parameters: r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"metadata":{"type":"object","description":"Additional metadata about project completion"}}}"#.to_string(), - input_schema_json: Some(r#"{"type":"object","required":["project_uuid"],"properties":{"project_uuid":{"type":"string","description":"UUID of the project"},"metadata":{"type":"object","description":"Additional metadata about project completion"}}}"#.to_string()), + name: "done-build-container".to_string(), + description: "Notify that work with the build container is complete and it can be torn down (hosted mode only)".to_string(), + parameters: r#"{"type":"object","properties":{"metadata":{"type":"object","description":"Optional metadata about completion status"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","properties":{"metadata":{"type":"object","description":"Optional metadata about completion status"}}}"#.to_string()), + } + } + + fn create_get_projects_tool(&self) -> Tool { + Tool { + name: "get-projects".to_string(), + description: "Get a mapping of project names to their associated UUIDs".to_string(), + parameters: r#"{"type":"object","properties":{}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","properties":{}}"#.to_string()), } } } impl ToolProvider for BuildContainerToolProvider { fn get_tools(&self, state: &SpiderState) -> Vec { - let mut tools = vec![self.create_init_build_container_tool()]; + let mut tools = Vec::new(); + + // Always provide get-projects tool + tools.push(self.create_get_projects_tool()); + + // Check if we're in self-hosted mode + let is_self_hosted = + !state.build_container_ws_uri.is_empty() && !state.build_container_api_key.is_empty(); + + if !is_self_hosted { + // Hosted mode: show init_build_container + tools.push(self.create_init_build_container_tool()); + } - // Only show other tools if we have an active build container connection - if state.build_container_connection.is_some() { + // Check if we have an active build container connection + let has_connection = state.ws_connections.values().any(|conn| { + conn.server_id.starts_with("build_container_") + || conn.server_id == "build_container_self_hosted" + }); + + // Always show load_project in self-hosted mode, or if we have a connection in hosted mode + if is_self_hosted { + tools.push(self.create_load_project_tool()); + } else if has_connection { + tools.push(self.create_load_project_tool()); + } + + // Show other tools if we have an active build container connection + if has_connection { tools.push(self.create_start_package_tool()); tools.push(self.create_persist_tool()); - tools.push(self.create_done_build_container_tool()); + + if !is_self_hosted { + // Only show done_build_container in hosted mode + tools.push(self.create_done_build_container_tool()); + } } tools } fn should_include_tool(&self, tool_name: &str, state: &SpiderState) -> bool { + let is_self_hosted = + !state.build_container_ws_uri.is_empty() && !state.build_container_api_key.is_empty(); + let has_connection = state + .ws_connections + .values() + .any(|conn| conn.server_id.starts_with("build_container_")); + match tool_name { - "init_build_container" => true, - "start_package" | "persist" | "done_build_container" => { - state.build_container_connection.is_some() - } + "get-projects" => true, // Always available + "init-build-container" => !is_self_hosted, + "load-project" => is_self_hosted || has_connection, + "start-package" | "persist" => has_connection, + "done-build-container" => !is_self_hosted && has_connection, _ => false, } } - fn execute_tool( + fn prepare_execution( &self, - _tool_name: &str, - _parameters: &Value, - _state: &mut SpiderState, - ) -> Result { - // Execution is handled by the main Spider implementation - Err("Tool execution should be handled by the main Spider implementation".to_string()) + tool_name: &str, + parameters: &Value, + state: &SpiderState, + ) -> Result { + match tool_name { + "get-projects" => Ok(ToolExecutionCommand::GetProjects), + "init-build-container" => { + let metadata = parameters.get("metadata").cloned(); + + Ok(ToolExecutionCommand::InitBuildContainer { metadata }) + } + "load-project" => { + let project_uuid = parameters + .get("project_uuid") + .and_then(|v| v.as_str()) + .map(String::from); + + // Name is now required + let name = parameters + .get("name") + .and_then(|v| v.as_str()) + .map(String::from) + .ok_or_else(|| "Project name is required. Please provide a descriptive name for the project.".to_string())?; + + let initial_zip = parameters + .get("initial_zip") + .and_then(|v| v.as_str()) + .map(String::from); + + // Check if we need to establish connection for self-hosted mode + let is_self_hosted = !state.build_container_ws_uri.is_empty() + && !state.build_container_api_key.is_empty(); + let channel_id = if is_self_hosted + && !state.ws_connections.values().any(|conn| { + conn.server_id.starts_with("build_container_") + || conn.server_id == "build_container_self_hosted" + }) { + // Need to establish connection first (this will be handled in execute) + None + } else { + // Find existing build container connection + state + .ws_connections + .iter() + .find(|(_, conn)| { + conn.server_id.starts_with("build_container_") + || conn.server_id == "build_container_self_hosted" + }) + .map(|(id, _)| *id) + }; + + Ok(ToolExecutionCommand::LoadProject { + project_uuid, + name, + initial_zip, + channel_id, + }) + } + "start-package" => { + let package_dir = parameters + .get("package_dir") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing package_dir parameter".to_string())? + .to_string(); + + // Find the build container connection + let channel_id = state + .ws_connections + .iter() + .find(|(_, conn)| conn.server_id.starts_with("build_container_")) + .map(|(id, _)| *id) + .ok_or_else(|| { + "No build container connection found. Call init-build-container first." + .to_string() + })?; + + Ok(ToolExecutionCommand::StartPackage { + channel_id, + package_dir, + }) + } + "persist" => { + let directories = parameters + .get("directories") + .and_then(|v| v.as_array()) + .ok_or_else(|| "Missing directories parameter".to_string())?; + + let dir_strings: Vec = directories + .iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect(); + + if dir_strings.is_empty() { + return Err("No valid directories provided".to_string()); + } + + // Find the build container connection + let channel_id = state + .ws_connections + .iter() + .find(|(_, conn)| conn.server_id.starts_with("build_container_")) + .map(|(id, _)| *id) + .ok_or_else(|| { + "No build container connection found. Call init-build-container first." + .to_string() + })?; + + Ok(ToolExecutionCommand::Persist { + channel_id, + directories: dir_strings, + }) + } + "done-build-container" => { + let metadata = parameters.get("metadata").cloned(); + + // Find any active build container connection + let channel_id = state + .ws_connections + .iter() + .find(|(_, conn)| conn.server_id.starts_with("build_container_")) + .map(|(id, _)| *id); + + Ok(ToolExecutionCommand::DoneBuildContainer { + metadata, + channel_id, + }) + } + _ => Err(format!("Unknown build container tool: {}", tool_name)), + } } fn get_provider_id(&self) -> &str { &self.provider_id } } + +// Extension trait for build container operations +pub trait BuildContainerExt { + async fn execute_init_build_container_impl( + &mut self, + metadata: Option, + ) -> Result; + async fn execute_load_project_impl( + &mut self, + project_uuid: Option, + name: String, // Now required + initial_zip: Option, + channel_id: Option, + ) -> Result; + async fn execute_start_package_impl( + &mut self, + channel_id: u32, + package_dir: String, + ) -> Result; + async fn execute_persist_impl( + &mut self, + channel_id: u32, + directories: Vec, + ) -> Result; + async fn execute_done_build_container_impl( + &mut self, + metadata: Option, + channel_id: Option, + ) -> Result; + async fn connect_to_self_hosted_container(&mut self) -> Result; + fn request_build_container_tools_list(&mut self, channel_id: u32); + fn send_tools_list_request(&mut self, channel_id: u32); + async fn deploy_package_to_app_store( + &self, + package_name: &str, + publisher: &str, + version_hash: &str, + package_zip: &str, + metadata: Value, + ) -> Result<(), String>; +} + +impl BuildContainerExt for SpiderState { + async fn execute_init_build_container_impl( + &mut self, + metadata: Option, + ) -> Result { + use hyperware_process_lib::http::client::send_request_await_response; + use hyperware_process_lib::http::Method; + + // Use hardcoded constructor URL + let constructor_url = format!("{CONSTRUCTOR_SERVER_URL}/init-build-container"); + + // Prepare request body + let body = BuildContainerRequest { + metadata: metadata.clone(), + }; + + // Make HTTP request to constructor + let mut headers = HashMap::new(); + headers.insert("Content-Type".to_string(), "application/json".to_string()); + + let url = url::Url::parse(&constructor_url) + .map_err(|e| format!("Invalid constructor URL: {}", e))?; + + let response = send_request_await_response( + Method::POST, + url, + Some(headers), + 30000, + serde_json::to_string(&body) + .map_err(|e| format!("Failed to serialize request: {}", e))? + .into_bytes(), + ) + .await + .map_err(|e| format!("Failed to initialize build container: {:?}", e))?; + + if !response.status().is_success() { + let error_text = String::from_utf8_lossy(response.body()); + return Err(format!( + "Constructor error (status {}): {}", + response.status(), + error_text + )); + } + + // Parse response + let response_data: Value = serde_json::from_slice(response.body()) + .map_err(|e| format!("Failed to parse constructor response: {}", e))?; + + let ws_uri = response_data + .get("ws_uri") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing ws_uri in response".to_string())?; + + let api_key = response_data + .get("api_key") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing api_key in response".to_string())?; + + // Generate a unique project UUID since we don't require it anymore + let project_uuid = Uuid::new_v4().to_string(); + + // Connect to the build container's ws-mcp server + let channel_id = self.next_channel_id; + self.next_channel_id += 1; + + // Open WebSocket connection + open_ws_connection(ws_uri.to_string(), None, channel_id) + .await + .map_err(|e| format!("Failed to open WS connection to {ws_uri}: {e}"))?; + + // Store connection info for the build container + let server_id = format!("build_container_{}", project_uuid); + self.ws_connections.insert( + channel_id, + WsConnection { + server_id: server_id.clone(), + server_name: format!("Build Container {}", project_uuid), + channel_id, + tools: Vec::new(), + initialized: false, + }, + ); + + // Send authentication message + let auth_request = SpiderAuthRequest { + jsonrpc: "2.0".to_string(), + method: "spider/authorization".to_string(), + params: SpiderAuthParams { + api_key: api_key.to_string(), + }, + id: format!("auth_{}", channel_id), + }; + + let blob = LazyLoadBlob::new( + None::, + serde_json::to_string(&auth_request) + .map_err(|e| format!("Failed to serialize auth request: {}", e))? + .into_bytes(), + ); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Update build container tools to show additional tools now that we're connected + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool("init-build-container", self) + { + let updated_tools = provider.get_tools(self); + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { + server.tools = updated_tools; + } + } + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!( + "✅ Build container initialized successfully!\n- WebSocket: {}\n- Ready for remote compilation", + ws_uri + ), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + + async fn execute_load_project_impl( + &mut self, + project_uuid: Option, + name: String, // Now required + initial_zip: Option, + mut channel_id: Option, + ) -> Result { + // Check if we need to connect to self-hosted container first + let is_self_hosted = + !self.build_container_ws_uri.is_empty() && !self.build_container_api_key.is_empty(); + + if is_self_hosted && channel_id.is_none() { + // Connect to self-hosted container + channel_id = Some(self.connect_to_self_hosted_container().await?); + } + + let channel_id = + channel_id.ok_or_else(|| "No build container connection available".to_string())?; + + // Generate project UUID if not provided + let project_uuid = project_uuid.unwrap_or_else(|| Uuid::new_v4().to_string()); + + // Handle initial_zip - must be a VFS path if provided + let initial_zip_content = if let Some(zip_path) = &initial_zip { + // Validate it's a proper VFS path + if !zip_path.starts_with('/') { + return Err(format!( + "Invalid VFS path '{}'. VFS paths must start with '/' (e.g., /spider:dev.hypr/projects//backup.zip). \ + To load a persisted project, first use 'get-projects' to find available projects, \ + then provide the full VFS path to the backup zip file.", + zip_path + )); + } + + // Load the zip file from VFS + match open_file(zip_path, false, None) { + Ok(file) => { + match file.read() { + Ok(data) => { + if data.is_empty() { + return Err(format!( + "The zip file at '{}' exists but is empty. \ + Please ensure the project was properly persisted with the 'persist' tool.", + zip_path + )); + } + // Encode to base64 for transmission + use base64::{engine::general_purpose, Engine as _}; + Some(general_purpose::STANDARD.encode(&data)) + } + Err(e) => { + return Err(format!( + "Failed to read zip file at '{}': {:?}. \ + Please verify the file exists and you have read permissions. \ + Use 'get-projects' to see available projects.", + zip_path, e + )); + } + } + } + Err(e) => { + // Provide helpful suggestions based on the error + let suggestion = if zip_path.contains("/projects/") { + "Use 'get-projects' to list available projects and their UUIDs, \ + then check the VFS for the correct backup.zip path." + } else { + "Make sure the path follows the format: /spider:dev.hypr/projects//backup.zip" + }; + + return Err(format!( + "Cannot open zip file at '{}': {:?}. \ + {}. The file may not exist or the path may be incorrect.", + zip_path, e, suggestion + )); + } + } + } else { + None + }; + + // Update project name to UUID mapping (name is now always present) + self.project_name_to_uuids + .entry(name.clone()) + .or_insert_with(Vec::new) + .push(project_uuid.clone()); + println!( + "Spider: Added project '{}' with UUID {}", + name, project_uuid + ); + + // Send spider/load-project request over WebSocket + let request_id = format!("load-project-{}", Uuid::new_v4()); + println!( + "Spider: Sending load-project request with id: {}", + request_id + ); + let request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "spider/load-project".to_string(), + params: Some( + serde_json::to_value(LoadProjectParams { + project_uuid: project_uuid.clone(), + name: Some(name.clone()), + initial_zip: initial_zip_content, + }) + .map_err(|e| format!("Failed to serialize params: {}", e))?, + ), + id: request_id.clone(), + }; + + let request_json = serde_json::to_string(&request) + .map_err(|e| format!("Failed to serialize request: {}", e))?; + + println!( + "Spider: Sending request to channel {}: {}", + channel_id, request_json + ); + let blob = LazyLoadBlob::new(None::, request_json.into_bytes()); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Wait for response (with timeout) + let start = std::time::Instant::now(); + let timeout = Duration::from_secs(30); + + println!("Spider: Waiting for response with id: {}", request_id); + loop { + if start.elapsed() > timeout { + println!( + "Spider: Timeout waiting for response, tool_responses keys: {:?}", + self.tool_responses.keys().collect::>() + ); + return Err("Timeout waiting for load-project response".to_string()); + } + + if let Some(response) = self.tool_responses.remove(&request_id) { + println!( + "Spider: Found response for id {}: {:?}", + request_id, response + ); + // Check if response contains an error + if let Some(error) = response.get("error") { + return Err(format!("Failed to load project: {}", error)); + } + + // Extract project_uuid from response + let returned_uuid = response + .get("project_uuid") + .and_then(|v| v.as_str()) + .unwrap_or(&project_uuid); + + // After successful load-project, ws-mcp may have new tools available + // Send tools/list and wait for the response to ensure tools are updated + println!("Spider: Requesting updated tools list after successful load-project"); + let tools_request_id = format!("tools_refresh_{}", channel_id); + let tools_request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "tools/list".to_string(), + params: None, + id: tools_request_id.clone(), + }; + + // Store pending request + if let Some(conn) = self.ws_connections.get(&channel_id) { + self.pending_mcp_requests.insert( + tools_request_id.clone(), + PendingMcpRequest { + request_id: tools_request_id.clone(), + conversation_id: None, + server_id: conn.server_id.clone(), + request_type: McpRequestType::ToolsList, + }, + ); + } + + println!( + "Spider: Sending tools/list request with id: {}", + tools_request_id + ); + let blob = LazyLoadBlob::new( + None::, + serde_json::to_string(&tools_request).unwrap().into_bytes(), + ); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Wait for the tools/list response with a short timeout + let tools_start = std::time::Instant::now(); + let tools_timeout = std::time::Duration::from_secs(5); + + println!("Spider: Waiting for tools/list response after load-project"); + loop { + if tools_start.elapsed() > tools_timeout { + println!( + "Spider: Timeout waiting for tools/list response, continuing anyway" + ); + break; // Don't fail, just continue without updated tools + } + + // Check if the tools have been updated (handle_tools_list_response will update them) + // We just need to wait a bit for the response to be processed + if !self.pending_mcp_requests.contains_key(&tools_request_id) { + println!("Spider: Tools list updated successfully"); + break; + } + + // Sleep briefly before checking again + let _ = sleep(100).await; + } + + return Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!( + "✅ Project loaded successfully!\n- UUID: {}\n- Directory created in container", + returned_uuid + ), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?); + } + + // Sleep briefly before checking again + sleep(100).await; + } + } + + async fn execute_start_package_impl( + &mut self, + channel_id: u32, + package_dir: String, + ) -> Result { + // Send spider/start-package request over WebSocket + let request_id = format!("start-package-{}", Uuid::new_v4()); + let request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "spider/start-package".to_string(), + params: Some( + serde_json::to_value(StartPackageParams { + package_dir: package_dir.clone(), + }) + .map_err(|e| format!("Failed to serialize params: {}", e))?, + ), + id: request_id.clone(), + }; + + let request_json = serde_json::to_string(&request) + .map_err(|e| format!("Failed to serialize request: {}", e))?; + + let blob = LazyLoadBlob::new(None::, request_json.into_bytes()); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Wait for response (with timeout) + let start = std::time::Instant::now(); + let timeout = Duration::from_secs(30); + + loop { + if start.elapsed() > timeout { + return Err("Timeout waiting for start-package response".to_string()); + } + + if let Some(response) = self.tool_responses.remove(&request_id) { + // Check if response contains an error + if let Some(error) = response.get("error") { + return Err(format!("Failed to start package: {}", error)); + } + + // Extract package_zip from response + let Some(package_zip) = response.get("package_zip").and_then(|v| v.as_str()) else { + return Err("No package_zip in response".to_string()); + }; + + // Extract metadata fields from response + let package_name = response + .get("package_name") + .and_then(|v| v.as_str()) + .ok_or_else(|| "No package_name in response".to_string())?; + + let our_node = hyperware_process_lib::our().node.clone(); + let publisher = response + .get("publisher") + .and_then(|v| v.as_str()) + .unwrap_or(&our_node); + + let version_hash = response + .get("version_hash") + .and_then(|v| v.as_str()) + .ok_or_else(|| "No version_hash in response".to_string())?; + + // Get the full metadata object from response + let metadata = response + .get("metadata") + .ok_or_else(|| "No metadata in response".to_string())?; + + // Deploy the package to the Hyperware node using app-store + match self + .deploy_package_to_app_store( + package_name, + publisher, + version_hash, + package_zip, + metadata.clone(), + ) + .await + { + Ok(_) => { + return Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!( + "✅ Package '{}' from {} deployed and installed successfully!\n- Publisher: {}\n- Version hash: {}", + package_name, + package_dir, + publisher, + &version_hash[..8] // Show first 8 chars of hash + ), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?); + } + Err(e) => { + return Err(format!("Failed to deploy package: {}", e)); + } + } + } + + // Sleep briefly before checking again + sleep(100).await; + } + } + + async fn execute_persist_impl( + &mut self, + channel_id: u32, + directories: Vec, + ) -> Result { + use hyperware_process_lib::{ + our, + vfs::{create_drive, open_dir, open_file}, + }; + + // Send spider/persist request over WebSocket + let request_id = format!("persist_{}", Uuid::new_v4()); + let request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "spider/persist".to_string(), + params: Some( + serde_json::to_value(PersistParams { + directories: directories.clone(), + }) + .map_err(|e| format!("Failed to serialize params: {}", e))?, + ), + id: request_id.clone(), + }; + + let request_json = serde_json::to_string(&request) + .map_err(|e| format!("Failed to serialize request: {}", e))?; + + let blob = LazyLoadBlob::new(None::, request_json.into_bytes()); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Wait for response (with timeout) + let start = std::time::Instant::now(); + let timeout = Duration::from_secs(30); + + loop { + if start.elapsed() > timeout { + return Err("Timeout waiting for persist response".to_string()); + } + + if let Some(response) = self.tool_responses.remove(&request_id) { + // Check if response contains persisted_zip + if let Some(persisted_zip) = response.get("persisted_zip").and_then(|v| v.as_str()) + { + // Get project_uuid from response or generate one + let project_uuid = response + .get("project_uuid") + .and_then(|v| v.as_str()) + .unwrap_or_else(|| { + // If no project_uuid in response, try to extract from the first directory path + // Assuming directories are like /home/user//... + directories + .first() + .and_then(|dir| { + let parts: Vec<&str> = dir.split('/').collect(); + // Look for a UUID-like string in the path + parts + .iter() + .find(|part| part.len() == 36 && part.contains('-')) + .copied() + }) + .unwrap_or("unknown") + }) + .to_string(); + + // Create projects drive if it doesn't exist + let projects_drive = match create_drive(our().package_id(), "projects", None) { + Ok(drive_path) => drive_path, + Err(e) => { + println!("Warning: Failed to create projects drive: {:?}", e); + // Still return success but without saving to VFS + return Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!( + "✅ Persisted {} directories successfully! (Note: Could not save backup to VFS)", + directories.len() + ), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?); + } + }; + + // Create project-specific directory path + let project_dir = format!("{}/{}", projects_drive, project_uuid); + + // Create the project directory + match open_dir(&project_dir, true, None) { + Ok(_) => { + println!("Spider: Created/opened project directory: {}", project_dir); + } + Err(e) => { + println!( + "Warning: Failed to create project directory {}: {:?}", + project_dir, e + ); + // Still try to continue - maybe we can write files directly + } + } + + // Generate timestamp for the zip file + let timestamp = chrono::Utc::now().format("%Y%m%d-%H%M%S").to_string(); + let zip_filename = format!("{}/backup-{}.zip", project_dir, timestamp); + let manifest_filename = format!("{}/manifest-{}.json", project_dir, timestamp); + + // Decode the base64 zip data + use base64::{engine::general_purpose, Engine as _}; + let zip_bytes = general_purpose::STANDARD + .decode(persisted_zip) + .map_err(|e| format!("Failed to decode base64 zip: {}", e))?; + + // Save the zip file + match open_file(&zip_filename, true, None) { + Ok(file) => { + file.write(&zip_bytes) + .map_err(|e| format!("Failed to write zip file: {:?}", e))?; + println!("Saved project backup to: {}", zip_filename); + } + Err(e) => { + println!("Warning: Failed to save zip file: {:?}", e); + } + } + + // Create and save manifest + let manifest = serde_json::json!({ + "project_uuid": project_uuid, + "timestamp": timestamp, + "directories": directories, + "zip_file": zip_filename, + "size_bytes": zip_bytes.len(), + }); + + let manifest_json = serde_json::to_string_pretty(&manifest) + .map_err(|e| format!("Failed to serialize manifest: {}", e))?; + + match open_file(&manifest_filename, true, None) { + Ok(file) => { + file.write(manifest_json.as_bytes()) + .map_err(|e| format!("Failed to write manifest: {:?}", e))?; + println!("Saved manifest to: {}", manifest_filename); + } + Err(e) => { + println!("Warning: Failed to save manifest: {:?}", e); + } + } + + return Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!( + "✅ Persisted {} directories successfully!\n📁 Project: {}\n💾 Backup: {}\n📝 Manifest: {}", + directories.len(), + project_uuid, + zip_filename, + manifest_filename + ), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?); + } else if let Some(error) = response.get("error") { + return Err(format!("Failed to persist directories: {}", error)); + } else { + return Err("Invalid response from persist operation".to_string()); + } + } + + // Sleep briefly before checking again + let _ = sleep(100).await; + } + } + + async fn execute_done_build_container_impl( + &mut self, + metadata: Option, + channel_id: Option, + ) -> Result { + use hyperware_process_lib::http::client::send_request_await_response; + use hyperware_process_lib::http::Method; + + if let Some(channel_id) = channel_id { + // Get server_id before removing the connection + let server_id = self + .ws_connections + .get(&channel_id) + .map(|conn| conn.server_id.clone()); + + // Send close message + send_ws_client_push(channel_id, WsMessageType::Close, LazyLoadBlob::default()); + + // Remove the connection + self.ws_connections.remove(&channel_id); + + // Clean up any pending requests for this connection + if let Some(sid) = server_id { + self.pending_mcp_requests + .retain(|_, req| req.server_id != sid); + } + } + + // Use hardcoded constructor URL + let constructor_url = format!("{CONSTRUCTOR_SERVER_URL}/done-build-container"); + + // Prepare request body + let body = BuildContainerRequest { + metadata: metadata.clone(), + }; + + // Make HTTP request to constructor + let mut headers = HashMap::new(); + headers.insert("Content-Type".to_string(), "application/json".to_string()); + + let url = url::Url::parse(&constructor_url) + .map_err(|e| format!("Invalid constructor URL: {}", e))?; + + let response = send_request_await_response( + Method::POST, + url, + Some(headers), + 30000, + serde_json::to_string(&body) + .map_err(|e| format!("Failed to serialize request: {}", e))? + .into_bytes(), + ) + .await + .map_err(|e| format!("Failed to tear down build container: {:?}", e))?; + + if !response.status().is_success() { + let error_text = String::from_utf8_lossy(response.body()); + return Err(format!( + "Constructor error (status {}): {}", + response.status(), + error_text + )); + } + + // Update build container tools to hide additional tools now that we're disconnected + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool("init-build-container", self) + { + let updated_tools = provider.get_tools(self); + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { + server.tools = updated_tools; + } + } + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: "✅ Build container has been torn down successfully!".to_string(), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + + async fn connect_to_self_hosted_container(&mut self) -> Result { + // Check if we already have a connection to self-hosted container + if let Some((channel_id, _)) = self + .ws_connections + .iter() + .find(|(_, conn)| conn.server_id == "build_container_self_hosted") + { + println!( + "Spider: Reusing existing self-hosted build container connection on channel {}", + channel_id + ); + return Ok(*channel_id); + } + + // Connect to self-hosted container using configured WS URI and API key + let channel_id = self.next_channel_id; + self.next_channel_id += 1; + + println!( + "Spider: Opening new WebSocket connection to self-hosted build container on channel {}", + channel_id + ); + + // Open WebSocket connection + open_ws_connection(self.build_container_ws_uri.clone(), None, channel_id) + .await + .map_err(|e| { + format!( + "Failed to open WS connection to {}: {e}", + self.build_container_ws_uri + ) + })?; + + // Store connection info for the build container + let server_id = "build_container_self_hosted".to_string(); + self.ws_connections.insert( + channel_id, + WsConnection { + server_id: server_id.clone(), + server_name: "Self-Hosted Build Container".to_string(), + channel_id, + tools: Vec::new(), + initialized: false, + }, + ); + + // Send authentication message + let auth_id = format!("auth_{}", channel_id); + let auth_request = SpiderAuthRequest { + jsonrpc: "2.0".to_string(), + method: "spider/authorization".to_string(), + params: SpiderAuthParams { + api_key: self.build_container_api_key.clone(), + }, + id: auth_id.clone(), + }; + + println!("Spider: Sending authorization request with id: {}", auth_id); + let blob = LazyLoadBlob::new( + None::, + serde_json::to_string(&auth_request) + .map_err(|e| format!("Failed to serialize auth request: {}", e))? + .into_bytes(), + ); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + + // Wait for authentication response + let start = std::time::Instant::now(); + let timeout = Duration::from_secs(10); + + println!("Spider: Waiting for authorization response..."); + loop { + if start.elapsed() > timeout { + println!("Spider: Authorization timeout after 10 seconds"); + return Err("Timeout waiting for authorization response".to_string()); + } + + if let Some(response) = self.tool_responses.remove(&auth_id) { + println!("Spider: Got authorization response: {:?}", response); + if response.get("status").and_then(|s| s.as_str()) == Some("authenticated") { + println!("Spider: Successfully authenticated with self-hosted build container"); + break; + } else if let Some(error) = response.get("error") { + return Err(format!("Authorization failed: {}", error)); + } else { + return Err("Invalid authorization response".to_string()); + } + } + + // Sleep briefly before checking again + let _ = sleep(100).await; + } + + // Now send initialize request after successful authentication + println!("Spider: Sending initialize request after successful authentication"); + self.request_build_container_tools_list(channel_id); + + // Update build container tools to show additional tools now that we're connected + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool("load-project", self) + { + let updated_tools = provider.get_tools(self); + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "build_container") + { + server.tools = updated_tools; + } + } + + Ok(channel_id) + } + + fn request_build_container_tools_list(&mut self, channel_id: u32) { + use crate::types::{McpRequestType, PendingMcpRequest}; + + // First send initialize request + let init_request_id = format!("init_build_container_{}", channel_id); + let init_request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "initialize".to_string(), + params: Some( + serde_json::to_value(InitializeParams { + protocol_version: "2024-11-05".to_string(), + client_info: McpClientInfo { + name: "spider".to_string(), + version: "1.0.0".to_string(), + }, + capabilities: McpCapabilities {}, + }) + .unwrap_or_else(|_| Value::Null), + ), + id: init_request_id.clone(), + }; + + // Store pending request for initialize + if let Some(conn) = self.ws_connections.get(&channel_id) { + self.pending_mcp_requests.insert( + init_request_id.clone(), + PendingMcpRequest { + request_id: init_request_id.clone(), + conversation_id: None, + server_id: conn.server_id.clone(), + request_type: McpRequestType::Initialize, + }, + ); + } + + println!( + "Spider: Sending initialize request with id: {}", + init_request_id + ); + let init_blob = LazyLoadBlob::new( + None::, + serde_json::to_string(&init_request).unwrap().into_bytes(), + ); + send_ws_client_push(channel_id, WsMessageType::Text, init_blob); + + // Note: The actual tools/list request will be sent when we receive the initialize response + // This is handled in handle_initialize_response in lib.rs which calls request_tools_list + } + + fn send_tools_list_request(&mut self, channel_id: u32) { + use crate::types::{McpRequestType, PendingMcpRequest}; + + let request_id = format!("tools_refresh_{}", channel_id); + let tools_request = JsonRpcRequest { + jsonrpc: "2.0".to_string(), + method: "tools/list".to_string(), + params: None, + id: request_id.clone(), + }; + + // Store pending request + if let Some(conn) = self.ws_connections.get(&channel_id) { + self.pending_mcp_requests.insert( + request_id.clone(), + PendingMcpRequest { + request_id: request_id.clone(), + conversation_id: None, + server_id: conn.server_id.clone(), + request_type: McpRequestType::ToolsList, + }, + ); + } + + println!("Spider: Sending tools/list request with id: {}", request_id); + let blob = LazyLoadBlob::new( + None::, + serde_json::to_string(&tools_request).unwrap().into_bytes(), + ); + send_ws_client_push(channel_id, WsMessageType::Text, blob); + } + + async fn deploy_package_to_app_store( + &self, + package_name: &str, + publisher: &str, + version_hash: &str, + package_zip: &str, + metadata: Value, + ) -> Result<(), String> { + use base64::Engine; + + println!("Spider: Deploying package {} to app-store", package_name); + + // Decode the base64 package zip + let package_bytes = base64::engine::general_purpose::STANDARD + .decode(package_zip) + .map_err(|e| format!("Failed to decode package zip: {}", e))?; + + // Create NewPackage request + let new_package_request = serde_json::json!({ + "NewPackage": { + "package_id": { + "package_name": package_name, + "publisher_node": publisher, + }, + "mirror": true + } + }); + + // Send NewPackage request to app-store with the zip as blob + let blob = LazyLoadBlob::new(None::, package_bytes); + let request = Request::to(("our", "main", "app-store", "sys")) + .body(serde_json::to_vec(&new_package_request).map_err(|e| e.to_string())?) + .blob(blob) + .expects_response(15); + + let response = request + .send_and_await_response(15) + .map_err(|e| format!("Failed to send new-package request: {:?}", e))? + .map_err(|e| format!("New-package request failed: {:?}", e))?; + + // Parse response + let response_body = String::from_utf8(response.body().to_vec()) + .map_err(|e| format!("Failed to parse response body: {}", e))?; + let response_json: Value = serde_json::from_str(&response_body) + .map_err(|e| format!("Failed to parse response JSON: {}", e))?; + + // Check if NewPackage was successful + if let Some(new_package_response) = response_json.get("NewPackageResponse") { + if new_package_response != &serde_json::Value::String("Success".to_string()) { + return Err(format!("Failed to add package: {:?}", new_package_response)); + } + } else { + return Err(format!( + "Unexpected response from app-store: {:?}", + response_json + )); + } + + println!("Spider: Package added successfully, now installing..."); + + // Parse metadata to create OnchainMetadata + let onchain_metadata = serde_json::json!({ + "name": metadata.get("name").and_then(|v| v.as_str()).unwrap_or(package_name), + "description": metadata.get("description").and_then(|v| v.as_str()).unwrap_or(""), + "image": metadata.get("image").and_then(|v| v.as_str()).unwrap_or(""), + "external_url": metadata.get("external_url").and_then(|v| v.as_str()).unwrap_or(""), + "animation_url": metadata.get("animation_url").and_then(|v| v.as_str()), + "properties": { + "package_name": package_name, + "publisher": publisher, + "current_version": metadata.get("current_version").and_then(|v| v.as_str()).unwrap_or("1.0.0"), + "mirrors": metadata.get("mirrors").and_then(|v| v.as_array()).unwrap_or(&vec![]).clone(), + "code_hashes": metadata.get("code_hashes").and_then(|v| v.as_array()).unwrap_or(&vec![]).clone(), + "license": metadata.get("license").and_then(|v| v.as_str()), + "screenshots": metadata.get("screenshots").and_then(|v| v.as_array()).map(|v| v.clone()), + "wit_version": metadata.get("wit_version").and_then(|v| v.as_u64()).map(|v| v as u32), + "dependencies": metadata.get("dependencies").and_then(|v| v.as_array()).map(|v| v.clone()), + "api_includes": metadata.get("api_includes").and_then(|v| v.as_array()).map(|v| v.clone()), + } + }); + + // Create Install request + let install_request = serde_json::json!({ + "Install": { + "package_id": { + "package_name": package_name, + "publisher_node": publisher, + }, + "version_hash": version_hash, + "metadata": onchain_metadata + } + }); + + // Send Install request to app-store + let request = Request::to(("our", "main", "app-store", "sys")) + .body(serde_json::to_vec(&install_request).map_err(|e| e.to_string())?) + .expects_response(15); + + let response = request + .send_and_await_response(15) + .map_err(|e| format!("Failed to send install request: {:?}", e))? + .map_err(|e| format!("Install request failed: {:?}", e))?; + + // Parse response + let response_body = String::from_utf8(response.body().to_vec()) + .map_err(|e| format!("Failed to parse response body: {}", e))?; + let response_json: Value = serde_json::from_str(&response_body) + .map_err(|e| format!("Failed to parse response JSON: {}", e))?; + + // Check if Install was successful + if let Some(install_response) = response_json.get("InstallResponse") { + if install_response == &serde_json::Value::String("Success".to_string()) { + println!("Spider: Package {} installed successfully!", package_name); + Ok(()) + } else { + Err(format!("Failed to install package: {:?}", install_response)) + } + } else { + Err(format!( + "Unexpected response from app-store: {:?}", + response_json + )) + } + } +} diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/hypergrid.rs b/hyperdrive/packages/spider/spider/src/tool_providers/hypergrid.rs index 2c4a65a06..82839468c 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/hypergrid.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/hypergrid.rs @@ -1,6 +1,10 @@ -use crate::tool_providers::ToolProvider; -use crate::types::{SpiderState, Tool}; +use crate::tool_providers::{ToolExecutionCommand, ToolProvider}; +use crate::types::{ + HypergridConnection, HypergridMessage, HypergridMessageType, SpiderState, Tool, + ToolResponseContent, ToolResponseContentItem, +}; use serde_json::Value; +use std::time::Instant; pub struct HypergridToolProvider { server_id: String, @@ -68,18 +72,399 @@ impl ToolProvider for HypergridToolProvider { } } - fn execute_tool( + fn prepare_execution( &self, - _tool_name: &str, - _parameters: &Value, - _state: &mut SpiderState, - ) -> Result { - // This is a placeholder - the actual execution still happens in lib.rs - // The provider is responsible for tool registration and visibility logic only - Err("Tool execution should be handled by the main Spider implementation".to_string()) + tool_name: &str, + parameters: &Value, + state: &SpiderState, + ) -> Result { + match tool_name { + "hypergrid_authorize" => { + let url = parameters + .get("url") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing url parameter".to_string())? + .to_string(); + + let token = parameters + .get("token") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing token parameter".to_string())? + .to_string(); + + let client_id = parameters + .get("client_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing client_id parameter".to_string())? + .to_string(); + + let node = parameters + .get("node") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing node parameter".to_string())? + .to_string(); + + let name = parameters + .get("name") + .and_then(|v| v.as_str()) + .map(String::from); + + Ok(ToolExecutionCommand::HypergridAuthorize { + server_id: self.server_id.clone(), + url, + token, + client_id, + node, + name, + }) + } + "hypergrid_search" => { + // Check if configured + if !state.hypergrid_connections.contains_key(&self.server_id) { + return Err("Hypergrid not configured. Please use hypergrid_authorize first with your credentials.".to_string()); + } + + let query = parameters + .get("query") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing query parameter".to_string())? + .to_string(); + + Ok(ToolExecutionCommand::HypergridSearch { + server_id: self.server_id.clone(), + query, + }) + } + "hypergrid_call" => { + // Check if configured + if !state.hypergrid_connections.contains_key(&self.server_id) { + return Err("Hypergrid not configured. Please use hypergrid_authorize first with your credentials.".to_string()); + } + + let provider_id = parameters + .get("providerId") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing providerId parameter".to_string())? + .to_string(); + + let provider_name = parameters + .get("providerName") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing providerName parameter".to_string())? + .to_string(); + + // Support both "callArgs" (old) and "arguments" (new) parameter names + let call_args = parameters + .get("callArgs") + .or_else(|| parameters.get("arguments")) + .and_then(|v| v.as_array()) + .ok_or_else(|| "Missing callArgs or arguments parameter".to_string())?; + + let args: Vec<(String, String)> = call_args + .iter() + .filter_map(|arg| { + arg.as_array().and_then(|pair| { + if pair.len() == 2 { + let key = pair[0].as_str()?.to_string(); + let value = pair[1].as_str()?.to_string(); + Some((key, value)) + } else { + None + } + }) + }) + .collect(); + + Ok(ToolExecutionCommand::HypergridCall { + server_id: self.server_id.clone(), + provider_id, + provider_name, + call_args: args, + }) + } + _ => Err(format!("Unknown hypergrid tool: {}", tool_name)), + } } fn get_provider_id(&self) -> &str { &self.server_id } } + +// Extension trait for hypergrid operations +pub trait HypergridExt { + async fn execute_hypergrid_authorize_impl( + &mut self, + server_id: String, + url: String, + token: String, + client_id: String, + node: String, + name: Option, + ) -> Result; + async fn execute_hypergrid_search_impl( + &mut self, + server_id: String, + query: String, + ) -> Result; + async fn execute_hypergrid_call_impl( + &mut self, + server_id: String, + provider_id: String, + provider_name: String, + call_args: Vec<(String, String)>, + ) -> Result; + async fn test_hypergrid_connection( + &self, + url: &str, + token: &str, + client_id: &str, + ) -> Result<(), String>; + async fn call_hypergrid_api( + &self, + url: &str, + token: &str, + client_id: &str, + message: &HypergridMessage, + ) -> Result; +} + +impl HypergridExt for SpiderState { + async fn execute_hypergrid_authorize_impl( + &mut self, + server_id: String, + url: String, + token: String, + client_id: String, + node: String, + name: Option, + ) -> Result { + println!( + "Spider: hypergrid_authorize called for server_id: {}", + server_id + ); + println!("Spider: Authorizing hypergrid with:"); + println!(" - URL: {}", url); + println!(" - Token: {}...", &token[..token.len().min(20)]); + println!(" - Client ID: {}", client_id); + println!(" - Node: {}", node); + if let Some(ref n) = name { + println!(" - Name: {}", n); + } + + // Test new connection + println!("Spider: Testing hypergrid connection..."); + self.test_hypergrid_connection(&url, &token, &client_id) + .await?; + println!("Spider: Connection test successful!"); + + // Create or update the hypergrid connection + let hypergrid_conn = HypergridConnection { + server_id: server_id.clone(), + url: url.clone(), + token: token.clone(), + client_id: client_id.clone(), + node: node.clone(), + last_retry: Instant::now(), + retry_count: 0, + connected: true, + }; + + self.hypergrid_connections + .insert(server_id.clone(), hypergrid_conn); + println!("Spider: Stored hypergrid connection in memory"); + + // Update transport config + if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == server_id) { + println!("Spider: Updating server '{}' transport config", server.name); + server.transport.url = Some(url.clone()); + server.transport.hypergrid_token = Some(token.clone()); + server.transport.hypergrid_client_id = Some(client_id.clone()); + server.transport.hypergrid_node = Some(node.clone()); + println!("Spider: Server transport config updated successfully"); + println!("Spider: State should auto-save due to SaveOptions::OnDiff"); + } else { + println!( + "Spider: WARNING - Could not find server with id: {}", + server_id + ); + } + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: format!("✅ Successfully authorized! Hypergrid is now configured with:\n- Node: {}\n- Client ID: {}\n- URL: {}", node, client_id, url), + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + + async fn execute_hypergrid_search_impl( + &mut self, + server_id: String, + query: String, + ) -> Result { + let hypergrid_conn = self.hypergrid_connections.get(&server_id) + .ok_or_else(|| "Hypergrid not configured. Please use hypergrid_authorize first with your credentials.".to_string())?; + + let response = self + .call_hypergrid_api( + &hypergrid_conn.url, + &hypergrid_conn.token, + &hypergrid_conn.client_id, + &HypergridMessage { + request: HypergridMessageType::SearchRegistry(query), + }, + ) + .await?; + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: response, + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + + async fn execute_hypergrid_call_impl( + &mut self, + server_id: String, + provider_id: String, + provider_name: String, + call_args: Vec<(String, String)>, + ) -> Result { + let hypergrid_conn = self.hypergrid_connections.get(&server_id) + .ok_or_else(|| "Hypergrid not configured. Please use hypergrid_authorize first with your credentials.".to_string())?; + + let response = self + .call_hypergrid_api( + &hypergrid_conn.url, + &hypergrid_conn.token, + &hypergrid_conn.client_id, + &HypergridMessage { + request: HypergridMessageType::CallProvider { + provider_id, + provider_name, + arguments: call_args, + }, + }, + ) + .await?; + + Ok(serde_json::to_value(ToolResponseContent { + content: vec![ToolResponseContentItem { + content_type: "text".to_string(), + text: response, + }], + }) + .map_err(|e| format!("Failed to serialize response: {}", e))?) + } + + async fn test_hypergrid_connection( + &self, + url: &str, + token: &str, + client_id: &str, + ) -> Result<(), String> { + use hyperware_process_lib::http::client::send_request_await_response; + use hyperware_process_lib::http::Method; + use std::collections::HashMap; + + println!( + "Spider: test_hypergrid_connection - Testing connection to {}", + url + ); + + let test_message = HypergridMessage { + request: HypergridMessageType::SearchRegistry("test".to_string()), + }; + + let body = serde_json::to_string(&test_message) + .map_err(|e| format!("Failed to serialize test message: {}", e))?; + + let mut headers = HashMap::new(); + headers.insert("Content-Type".to_string(), "application/json".to_string()); + headers.insert("X-Auth-Token".to_string(), token.to_string()); + headers.insert("X-Client-Id".to_string(), client_id.to_string()); + + let parsed_url = url::Url::parse(url).map_err(|e| format!("Invalid URL: {}", e))?; + + println!("Spider: test_hypergrid_connection - Sending test request..."); + let response = send_request_await_response( + Method::POST, + parsed_url, + Some(headers), + 30000, + body.into_bytes(), + ) + .await + .map_err(|e| { + println!( + "Spider: test_hypergrid_connection - Request failed: {:?}", + e + ); + format!("Connection test failed: {:?}", e) + })?; + + if !response.status().is_success() { + let error_text = String::from_utf8_lossy(response.body()); + println!( + "Spider: test_hypergrid_connection - Server returned error: {}", + error_text + ); + return Err(format!( + "Hypergrid server error (status {}): {}", + response.status(), + error_text + )); + } + + println!("Spider: test_hypergrid_connection - Connection test successful!"); + Ok(()) + } + + async fn call_hypergrid_api( + &self, + url: &str, + token: &str, + client_id: &str, + message: &HypergridMessage, + ) -> Result { + use hyperware_process_lib::http::client::send_request_await_response; + use hyperware_process_lib::http::Method; + use std::collections::HashMap; + + let body = serde_json::to_string(message) + .map_err(|e| format!("Failed to serialize message: {}", e))?; + + let mut headers = HashMap::new(); + headers.insert("Content-Type".to_string(), "application/json".to_string()); + headers.insert("X-Auth-Token".to_string(), token.to_string()); + headers.insert("X-Client-Id".to_string(), client_id.to_string()); + + let parsed_url = url::Url::parse(url).map_err(|e| format!("Invalid URL: {}", e))?; + + let response = send_request_await_response( + Method::POST, + parsed_url, + Some(headers), + 30000, + body.into_bytes(), + ) + .await + .map_err(|e| format!("API call failed: {:?}", e))?; + + if !response.status().is_success() { + let error_text = String::from_utf8_lossy(response.body()); + return Err(format!( + "Hypergrid API error (status {}): {}", + response.status(), + error_text + )); + } + + let response_text = String::from_utf8_lossy(response.body()).to_string(); + Ok(response_text) + } +} diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs index 4ff338e70..9710b302d 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs @@ -4,17 +4,64 @@ pub mod hypergrid; use crate::types::{SpiderState, Tool}; use serde_json::Value; +pub enum ToolExecutionCommand { + // Build container commands + InitBuildContainer { + metadata: Option, + }, + LoadProject { + project_uuid: Option, + name: String, // Now required + initial_zip: Option, + channel_id: Option, + }, + StartPackage { + channel_id: u32, + package_dir: String, + }, + Persist { + channel_id: u32, + directories: Vec, + }, + DoneBuildContainer { + metadata: Option, + channel_id: Option, + }, + GetProjects, + // Hypergrid commands + HypergridAuthorize { + server_id: String, + url: String, + token: String, + client_id: String, + node: String, + name: Option, + }, + HypergridSearch { + server_id: String, + query: String, + }, + HypergridCall { + server_id: String, + provider_id: String, + provider_name: String, + call_args: Vec<(String, String)>, + }, + // Direct result (for synchronous operations) + DirectResult(Result), +} + pub trait ToolProvider: Send + Sync { fn get_tools(&self, state: &SpiderState) -> Vec; fn should_include_tool(&self, tool_name: &str, state: &SpiderState) -> bool; - fn execute_tool( + fn prepare_execution( &self, tool_name: &str, parameters: &Value, - state: &mut SpiderState, - ) -> Result; + state: &SpiderState, + ) -> Result; fn get_provider_id(&self) -> &str; } @@ -53,9 +100,13 @@ impl ToolProviderRegistry { tools } - pub fn find_provider_for_tool(&self, tool_name: &str) -> Option<&dyn ToolProvider> { + pub fn find_provider_for_tool( + &self, + tool_name: &str, + state: &SpiderState, + ) -> Option<&dyn ToolProvider> { for provider in &self.providers { - let tools = provider.get_tools(&SpiderState::default()); + let tools = provider.get_tools(state); if tools.iter().any(|t| t.name == tool_name) { return Some(provider.as_ref()); } diff --git a/hyperdrive/packages/spider/spider/src/types.rs b/hyperdrive/packages/spider/spider/src/types.rs index 9b28a0243..4dd98f117 100644 --- a/hyperdrive/packages/spider/spider/src/types.rs +++ b/hyperdrive/packages/spider/spider/src/types.rs @@ -7,6 +7,14 @@ use serde_json::Value; use crate::tool_providers::ToolProviderRegistry; +fn default_empty_string() -> String { + String::new() +} + +fn default_project_mapping() -> HashMap> { + HashMap::new() +} + #[derive(Default, Serialize, Deserialize)] pub struct SpiderState { pub api_keys: Vec<(String, ApiKey)>, @@ -16,6 +24,12 @@ pub struct SpiderState { pub default_llm_provider: String, pub max_tokens: u32, pub temperature: f32, + #[serde(default = "default_empty_string")] + pub build_container_ws_uri: String, + #[serde(default = "default_empty_string")] + pub build_container_api_key: String, + #[serde(default = "default_project_mapping")] + pub project_name_to_uuids: HashMap>, // project name -> list of UUIDs #[serde(skip)] pub ws_connections: HashMap, // channel_id -> connection info #[serde(skip)] @@ -34,8 +48,6 @@ pub struct SpiderState { pub show_trial_key_notification: bool, // Flag to show trial key notification popup #[serde(skip)] pub tool_provider_registry: ToolProviderRegistry, // Registry for modular tool providers - #[serde(skip)] - pub build_container_connection: Option, // Active build container connection } #[derive(Clone, Debug)] @@ -302,6 +314,10 @@ pub(crate) struct UpdateConfigRequest { #[serde(rename = "maxTokens")] pub(crate) max_tokens: Option, pub(crate) temperature: Option, + #[serde(rename = "buildContainerWsUri")] + pub(crate) build_container_ws_uri: Option, + #[serde(rename = "buildContainerApiKey")] + pub(crate) build_container_api_key: Option, #[serde(rename = "authKey")] pub(crate) auth_key: String, } @@ -336,6 +352,10 @@ pub(crate) struct ConfigResponse { #[serde(rename = "maxTokens")] pub(crate) max_tokens: u32, pub(crate) temperature: f32, + #[serde(rename = "buildContainerWsUri")] + pub(crate) build_container_ws_uri: String, + #[serde(rename = "buildContainerApiKey")] + pub(crate) build_container_api_key: String, } #[derive(Serialize, Deserialize, Debug, PartialEq)] @@ -566,13 +586,87 @@ pub(crate) struct OAuthRefreshRequest { pub(crate) refresh_token: String, } -// Build Container types -#[derive(Clone, Debug)] -pub(crate) struct BuildContainerConnection { - pub(crate) project_uuid: String, - pub(crate) ws_uri: String, +// Tool response types +#[derive(Serialize, Deserialize, Debug, Clone)] +pub(crate) struct ToolResponseContent { + pub(crate) content: Vec, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub(crate) struct ToolResponseContentItem { + #[serde(rename = "type")] + pub(crate) content_type: String, + pub(crate) text: String, +} + +// Error response types +#[derive(Serialize, Deserialize, Debug, Clone)] +pub(crate) struct ErrorResponse { + pub(crate) error: Value, +} + +// OAuth request types +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct OAuthCodeExchangeRequest { + pub(crate) code: String, + pub(crate) state: String, + pub(crate) grant_type: String, + pub(crate) client_id: String, + pub(crate) redirect_uri: String, + pub(crate) code_verifier: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct OAuthRefreshTokenRequest { + pub(crate) grant_type: String, + pub(crate) refresh_token: String, + pub(crate) client_id: String, +} + +// Build container request/response types +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct BuildContainerRequest { + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) metadata: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct SpiderAuthRequest { + pub(crate) jsonrpc: String, + pub(crate) method: String, + pub(crate) params: SpiderAuthParams, + pub(crate) id: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct SpiderAuthParams { pub(crate) api_key: String, - pub(crate) channel_id: u32, - pub(crate) connected: bool, - pub(crate) tools: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct LoadProjectParams { + pub(crate) project_uuid: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) initial_zip: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct StartPackageParams { + pub(crate) package_dir: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct PersistParams { + pub(crate) directories: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct InitializeParams { + #[serde(rename = "protocolVersion")] + pub(crate) protocol_version: String, + #[serde(rename = "clientInfo")] + pub(crate) client_info: McpClientInfo, + pub(crate) capabilities: McpCapabilities, } diff --git a/hyperdrive/packages/spider/ui/src/App.css b/hyperdrive/packages/spider/ui/src/App.css index 511a85c12..5da5333ed 100644 --- a/hyperdrive/packages/spider/ui/src/App.css +++ b/hyperdrive/packages/spider/ui/src/App.css @@ -309,6 +309,14 @@ html, body, #root { background: rgba(255, 255, 255, 0.08); } +.form-help-text { + display: block; + margin-top: 0.25rem; + font-size: 0.85rem; + color: #888; + font-style: italic; +} + .api-key-form, .spider-key-form, .mcp-server-form, diff --git a/hyperdrive/packages/spider/ui/src/components/Chat.tsx b/hyperdrive/packages/spider/ui/src/components/Chat.tsx index 6c1ad09d4..4fc5f1fed 100644 --- a/hyperdrive/packages/spider/ui/src/components/Chat.tsx +++ b/hyperdrive/packages/spider/ui/src/components/Chat.tsx @@ -20,11 +20,50 @@ function ToolCallModal({ toolCall, toolResult, onClose }: { onClose: () => void; }) { const copyToClipboard = (text: string) => { - navigator.clipboard.writeText(text).then(() => { - // Could add a toast notification here - }).catch(err => { - console.error('Failed to copy:', err); - }); + // Check if clipboard API is available + if (navigator.clipboard && navigator.clipboard.writeText) { + navigator.clipboard.writeText(text).then(() => { + // Could add a toast notification here + }).catch(err => { + console.error('Failed to copy:', err); + // Fallback to legacy method + fallbackCopyToClipboard(text); + }); + } else { + // Use fallback method + fallbackCopyToClipboard(text); + } + }; + + const fallbackCopyToClipboard = (text: string) => { + // Create a temporary textarea element + const textarea = document.createElement('textarea'); + textarea.value = text; + textarea.style.position = 'fixed'; + textarea.style.top = '0'; + textarea.style.left = '0'; + textarea.style.width = '2em'; + textarea.style.height = '2em'; + textarea.style.padding = '0'; + textarea.style.border = 'none'; + textarea.style.outline = 'none'; + textarea.style.boxShadow = 'none'; + textarea.style.background = 'transparent'; + + document.body.appendChild(textarea); + textarea.focus(); + textarea.select(); + + try { + const successful = document.execCommand('copy'); + if (!successful) { + console.error('Fallback copy failed'); + } + } catch (err) { + console.error('Fallback copy error:', err); + } + + document.body.removeChild(textarea); }; return ( @@ -38,7 +77,7 @@ function ToolCallModal({ toolCall, toolResult, onClose }: {

Tool Call

- + + {showSelfHosting && ( +
+
+ + setBuildContainerWsUri(e.target.value)} + placeholder="ws://localhost:8091" + /> + + WebSocket URI for your self-hosted build container + +
+ +
+ + setBuildContainerApiKey(e.target.value)} + placeholder="Enter API key" + /> + + API key for authenticating with your self-hosted build container + +
+
+ )} +
+
); -} \ No newline at end of file +} diff --git a/hyperdrive/packages/spider/ui/src/store/spider.ts b/hyperdrive/packages/spider/ui/src/store/spider.ts index 7c15b96a5..b28892063 100644 --- a/hyperdrive/packages/spider/ui/src/store/spider.ts +++ b/hyperdrive/packages/spider/ui/src/store/spider.ts @@ -66,6 +66,8 @@ interface SpiderConfig { defaultLlmProvider: string; maxTokens: number; temperature: number; + buildContainerWsUri: string; + buildContainerApiKey: string; } interface SpiderStore { @@ -163,6 +165,8 @@ export const useSpiderStore = create((set, get) => ({ defaultLlmProvider: 'anthropic', maxTokens: 4096, temperature: 0.7, + buildContainerWsUri: '', + buildContainerApiKey: '', }, isLoading: false, error: null, diff --git a/hyperdrive/packages/spider/ui/src/utils/api.ts b/hyperdrive/packages/spider/ui/src/utils/api.ts index 834aa2fdc..9a848bb5d 100644 --- a/hyperdrive/packages/spider/ui/src/utils/api.ts +++ b/hyperdrive/packages/spider/ui/src/utils/api.ts @@ -158,6 +158,8 @@ export async function updateConfig(config: Partial): Promise Date: Sat, 13 Sep 2025 14:14:23 -0700 Subject: [PATCH 13/70] spider: fix build-from-scratch --- hyperdrive/packages/spider/spider/Cargo.toml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/hyperdrive/packages/spider/spider/Cargo.toml b/hyperdrive/packages/spider/spider/Cargo.toml index 26b377352..fc02635fd 100644 --- a/hyperdrive/packages/spider/spider/Cargo.toml +++ b/hyperdrive/packages/spider/spider/Cargo.toml @@ -30,10 +30,6 @@ rev = "4beff93" features = ["derive"] version = "1.0" -[dependencies.spider_dev_caller_utils] -optional = true -path = "../target/spider-dev-caller-utils" - [dependencies.uuid] features = [ "v4", From a81b9eeb2488c37f1535b9ad4c69a0c3513c5bf0 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Sat, 13 Sep 2025 14:17:44 -0700 Subject: [PATCH 14/70] spider: fix build-from-scratch 2 --- hyperdrive/packages/spider/spider/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/hyperdrive/packages/spider/spider/Cargo.toml b/hyperdrive/packages/spider/spider/Cargo.toml index fc02635fd..d97888cf2 100644 --- a/hyperdrive/packages/spider/spider/Cargo.toml +++ b/hyperdrive/packages/spider/spider/Cargo.toml @@ -38,7 +38,6 @@ features = [ version = "1.4.1" [features] -caller-utils = ["spider_dev_caller_utils"] simulation-mode = [] [lib] From 44a830dd2c5ac2036878a55ef9cc759776ec8f5e Mon Sep 17 00:00:00 2001 From: Tobias Merkle Date: Tue, 16 Sep 2025 14:04:02 -0400 Subject: [PATCH 15/70] bay-width --- .../packages/app-store/ui/src/components/NotificationBay.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx b/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx index 6295c712c..cc90553da 100644 --- a/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx +++ b/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx @@ -55,7 +55,8 @@ const NotificationBay: React.FC = () => { {isExpanded && ( -
+
+
{notifications.length === 0 ? (

All clear, no notifications!

) : ( @@ -69,6 +70,7 @@ const NotificationBay: React.FC = () => { )) )}
+
)}
From 4ec31676b36aac89481772068198b5fc5197ef7a Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Wed, 17 Sep 2025 10:42:42 -0700 Subject: [PATCH 16/70] fix README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4d08bce27..7a6188991 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,7 @@ Rust must be between versions 1.81 and 1.85.1. ```bash # Clone the repo. -git clone --recurse-submodules git@github.com:hyperware-ai/hyperware.git +git clone --recurse-submodules git@github.com:hyperware-ai/hyperdrive.git # Install Rust and some `cargo` tools so we can build the runtime and Wasm. From f5a837007896da58261a4d43f3eebc8837c975f3 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Thu, 18 Sep 2025 11:46:54 -0700 Subject: [PATCH 17/70] state: make code a little cleaner (let else instead of if let else) --- hyperdrive/src/state.rs | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/hyperdrive/src/state.rs b/hyperdrive/src/state.rs index 710886bf9..eb1e4eb30 100644 --- a/hyperdrive/src/state.rs +++ b/hyperdrive/src/state.rs @@ -545,20 +545,18 @@ async fn bootstrap( params: "\"messaging\"".into(), }, serde_json::Value::Object(map) => { - if let Some(process_name) = map.get("process") { - if let Some(params) = map.get("params") { - Capability { - issuer: Address { - node: our_name.to_string(), - process: process_name.as_str().unwrap().parse().unwrap(), - }, - params: params.to_string(), - } - } else { - continue; - } - } else { + let Some(process_name) = map.get("process") else { continue; + }; + let Some(params) = map.get("params") else { + continue; + }; + Capability { + issuer: Address { + node: our_name.to_string(), + process: process_name.as_str().unwrap().parse().unwrap(), + }, + params: params.to_string(), } } _ => { From 0930da31ec40d075758f7e0f28ad695caf8fa8d1 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 19 Sep 2025 13:15:28 -0700 Subject: [PATCH 18/70] spider: wait until hypermap-cacher is ready to allow successful fetching of trial api key --- hyperdrive/packages/spider/pkg/manifest.json | 2 + hyperdrive/packages/spider/spider/src/lib.rs | 62 ++++++++++++++++++- hyperdrive/packages/spider/ui/index.html | 2 +- .../packages/spider/ui/package-lock.json | 4 +- hyperdrive/packages/spider/ui/package.json | 2 +- 5 files changed, 66 insertions(+), 6 deletions(-) diff --git a/hyperdrive/packages/spider/pkg/manifest.json b/hyperdrive/packages/spider/pkg/manifest.json index edd1d9d19..61427bad5 100644 --- a/hyperdrive/packages/spider/pkg/manifest.json +++ b/hyperdrive/packages/spider/pkg/manifest.json @@ -8,6 +8,7 @@ "homepage:homepage:sys", "http-client:distro:sys", "http-server:distro:sys", + "hypermap-cacher:hypermap-cacher:sys", "operator:hypergrid:ware.hypr", "timer:distro:sys", "vfs:distro:sys" @@ -16,6 +17,7 @@ "homepage:homepage:sys", "http-client:distro:sys", "http-server:distro:sys", + "hypermap-cacher:hypermap-cacher:sys", "operator:hypergrid:ware.hypr", "timer:distro:sys", "vfs:distro:sys" diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 27216616f..34076dc7e 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -14,10 +14,10 @@ use hyperware_process_lib::{ server::{send_ws_push, WsMessageType}, }, hyperapp::source, - our, println, Address, LazyLoadBlob, ProcessId, + our, println, Address, LazyLoadBlob, ProcessId, Request, }; #[cfg(not(feature = "simulation-mode"))] -use spider_dev_caller_utils::anthropic_api_key_manager::request_api_key_remote_rpc; +use spider_caller_utils::anthropic_api_key_manager::request_api_key_remote_rpc; mod provider; use provider::create_llm_provider; @@ -88,6 +88,64 @@ const HYPERGRID: &str = "operator:hypergrid:ware.hypr"; impl SpiderState { #[init] async fn initialize(&mut self) { + // Wait for hypermap-cacher to be ready + let cacher_address = Address::new("our", ("hypermap-cacher", "hypermap-cacher", "sys")); + let mut attempt = 1; + const RETRY_DELAY_S: u64 = 2; + const TIMEOUT_S: u64 = 15; + + println!("Spider: Waiting for hypermap-cacher to be ready..."); + + loop { + // Create GetStatus request JSON + let cacher_request = r#""GetStatus""#; + + match Request::to(cacher_address.clone()) + .body(cacher_request.as_bytes().to_vec()) + .send_and_await_response(TIMEOUT_S) + { + Ok(Ok(response)) => { + // Try to parse the response as JSON + if let Ok(response_str) = String::from_utf8(response.body().to_vec()) { + // Check if it's IsStarting response + if response_str.contains("IsStarting") || response_str.contains(r#""IsStarting""#) { + println!( + "Spider: hypermap-cacher is still starting (attempt {}). Retrying in {}s...", + attempt, RETRY_DELAY_S + ); + std::thread::sleep(std::time::Duration::from_secs(RETRY_DELAY_S)); + attempt += 1; + continue; + } + // Check if it's GetStatus response + if response_str.contains("GetStatus") || response_str.contains("last_cached_block") { + println!("Spider: hypermap-cacher is ready!"); + break; + } + } + // If we get here, we got some response we don't understand, but cacher is at least responding + println!("Spider: hypermap-cacher responded, proceeding with initialization"); + break; + } + Ok(Err(e)) => { + println!( + "Spider: Error response from hypermap-cacher (attempt {}): {:?}", + attempt, e + ); + std::thread::sleep(std::time::Duration::from_secs(RETRY_DELAY_S)); + attempt += 1; + } + Err(e) => { + println!( + "Spider: Failed to contact hypermap-cacher (attempt {}): {:?}", + attempt, e + ); + std::thread::sleep(std::time::Duration::from_secs(RETRY_DELAY_S)); + attempt += 1; + } + } + } + add_to_homepage("Spider", Some(ICON), Some("/"), None); self.default_llm_provider = "anthropic".to_string(); diff --git a/hyperdrive/packages/spider/ui/index.html b/hyperdrive/packages/spider/ui/index.html index 218e80475..789fba20c 100644 --- a/hyperdrive/packages/spider/ui/index.html +++ b/hyperdrive/packages/spider/ui/index.html @@ -8,7 +8,7 @@ - Skeleton App - Hyperware + Spider - Hyperware
diff --git a/hyperdrive/packages/spider/ui/package-lock.json b/hyperdrive/packages/spider/ui/package-lock.json index eee3a35dc..c86af2d7c 100644 --- a/hyperdrive/packages/spider/ui/package-lock.json +++ b/hyperdrive/packages/spider/ui/package-lock.json @@ -1,11 +1,11 @@ { - "name": "skeleton-app-ui", + "name": "spider-ui", "version": "0.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "skeleton-app-ui", + "name": "spider-ui", "version": "0.1.0", "dependencies": { "react": "^18.3.1", diff --git a/hyperdrive/packages/spider/ui/package.json b/hyperdrive/packages/spider/ui/package.json index e8be791f9..f9e0a3f30 100644 --- a/hyperdrive/packages/spider/ui/package.json +++ b/hyperdrive/packages/spider/ui/package.json @@ -1,5 +1,5 @@ { - "name": "skeleton-app-ui", + "name": "spider-ui", "private": true, "version": "0.1.0", "type": "module", From 9956cc70cd6706d00f51cab18cbfed8f4e41eac6 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 19 Sep 2025 13:19:16 -0700 Subject: [PATCH 19/70] spider: wait a little longer --- hyperdrive/packages/spider/spider/src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 34076dc7e..e889e21ae 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -146,6 +146,9 @@ impl SpiderState { } } + // wait an additional 2s to allow hns to get ready + std::thread::sleep(std::time::Duration::from_secs(RETRY_DELAY_S)); + add_to_homepage("Spider", Some(ICON), Some("/"), None); self.default_llm_provider = "anthropic".to_string(); From c1d2c2a1df4e310dc2be0cd957220c81217f8311 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 19 Sep 2025 20:19:48 +0000 Subject: [PATCH 20/70] Format Rust code using rustfmt --- hyperdrive/packages/spider/spider/src/lib.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index e889e21ae..2933ac2e5 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -108,7 +108,9 @@ impl SpiderState { // Try to parse the response as JSON if let Ok(response_str) = String::from_utf8(response.body().to_vec()) { // Check if it's IsStarting response - if response_str.contains("IsStarting") || response_str.contains(r#""IsStarting""#) { + if response_str.contains("IsStarting") + || response_str.contains(r#""IsStarting""#) + { println!( "Spider: hypermap-cacher is still starting (attempt {}). Retrying in {}s...", attempt, RETRY_DELAY_S @@ -118,7 +120,9 @@ impl SpiderState { continue; } // Check if it's GetStatus response - if response_str.contains("GetStatus") || response_str.contains("last_cached_block") { + if response_str.contains("GetStatus") + || response_str.contains("last_cached_block") + { println!("Spider: hypermap-cacher is ready!"); break; } From 4d2a164036e48b4e798f6bfbb7f39bdd91ae131b Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Fri, 19 Sep 2025 23:28:51 -0700 Subject: [PATCH 21/70] spider: add hyperware search, get, and call tools --- .../crates/hyperware-parse-wit/.gitignore | 1 + .../crates/hyperware-parse-wit/Cargo.lock | 809 ++++++++++++++++++ .../crates/hyperware-parse-wit/Cargo.toml | 10 + .../hyperware-parse-wit/examples/parse_zip.rs | 38 + .../hyperware-parse-wit/src/hyperware.wit | 224 +++++ .../crates/hyperware-parse-wit/src/lib.rs | 116 +++ hyperdrive/packages/spider/spider/src/lib.rs | 18 + .../spider/src/tool_providers/hyperware.rs | 313 +++++++ .../spider/spider/src/tool_providers/mod.rs | 14 + 9 files changed, 1543 insertions(+) create mode 100644 hyperdrive/packages/spider/crates/hyperware-parse-wit/.gitignore create mode 100644 hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.lock create mode 100644 hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml create mode 100644 hyperdrive/packages/spider/crates/hyperware-parse-wit/examples/parse_zip.rs create mode 100644 hyperdrive/packages/spider/crates/hyperware-parse-wit/src/hyperware.wit create mode 100644 hyperdrive/packages/spider/crates/hyperware-parse-wit/src/lib.rs create mode 100644 hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/.gitignore b/hyperdrive/packages/spider/crates/hyperware-parse-wit/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/.gitignore @@ -0,0 +1 @@ +/target diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.lock b/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.lock new file mode 100644 index 000000000..eaa74afe3 --- /dev/null +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.lock @@ -0,0 +1,809 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "bitflags" +version = "2.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bzip2" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" +dependencies = [ + "bzip2-sys", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.13+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "cc" +version = "1.2.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80f41ae168f955c12fb8960b057d70d0ca153fb83182b57d86380443527be7e9" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "deflate64" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" + +[[package]] +name = "deranged" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d630bccd429a5bb5a64b5e94f693bfc48c9f8566418fda4c494cc94f911f87cc" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive_arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "find-msvc-tools" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" + +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "hyperware-parse-wit" +version = "0.1.0" +dependencies = [ + "anyhow", + "serde_json", + "wit-parser", + "zip", +] + +[[package]] +name = "id-arena" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" + +[[package]] +name = "indexmap" +version = "2.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +dependencies = [ + "equivalent", + "hashbrown", + "serde", + "serde_core", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.175" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "log" +version = "0.4.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" + +[[package]] +name = "lzma-rs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" +dependencies = [ + "byteorder", + "crc", +] + +[[package]] +name = "lzma-sys" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest", + "hmac", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "proc-macro2" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + +[[package]] +name = "serde" +version = "1.0.225" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6c24dee235d0da097043389623fb913daddf92c76e9f5a1db88607a0bcbd1d" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_core" +version = "1.0.225" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "659356f9a0cb1e529b24c01e43ad2bdf520ec4ceaf83047b83ddcc2251f96383" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.225" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ea936adf78b1f766949a4977b91d2f5595825bd6ec079aa9543ad2685fc4516" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.145" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", + "serde_core", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.3.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +dependencies = [ + "deranged", + "num-conv", + "powerfmt", + "serde", + "time-core", +] + +[[package]] +name = "time-core" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "unicode-ident" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasi" +version = "0.14.7+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasmparser" +version = "0.220.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d07b6a3b550fefa1a914b6d54fc175dd11c3392da11eee604e6ffc759805d25" +dependencies = [ + "bitflags", + "indexmap", + "semver", +] + +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "wit-parser" +version = "0.220.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae2a7999ed18efe59be8de2db9cb2b7f84d88b27818c79353dfc53131840fe1a" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "xz2" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" +dependencies = [ + "lzma-sys", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zip" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50" +dependencies = [ + "aes", + "arbitrary", + "bzip2", + "constant_time_eq", + "crc32fast", + "crossbeam-utils", + "deflate64", + "displaydoc", + "flate2", + "getrandom", + "hmac", + "indexmap", + "lzma-rs", + "memchr", + "pbkdf2", + "sha1", + "thiserror", + "time", + "xz2", + "zeroize", + "zopfli", + "zstd", +] + +[[package]] +name = "zopfli" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7" +dependencies = [ + "bumpalo", + "crc32fast", + "log", + "simd-adler32", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml b/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml new file mode 100644 index 000000000..df302c9e3 --- /dev/null +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "hyperware-parse-wit" +version = "0.1.0" +edition = "2021" + +[dependencies] +anyhow = "1.0" +serde_json = "1.0" +wit-parser = { version = "0.220.0", features = ["serde"] } +zip = { version = "2.2", default-features = false } diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/examples/parse_zip.rs b/hyperdrive/packages/spider/crates/hyperware-parse-wit/examples/parse_zip.rs new file mode 100644 index 000000000..ef26a2eb5 --- /dev/null +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/examples/parse_zip.rs @@ -0,0 +1,38 @@ +use anyhow::Result; +use hyperware_parse_wit::parse_wit_from_zip; +use std::env; +use std::fs; + +fn main() -> Result<()> { + // Get the zip file path from command line arguments + let args: Vec = env::args().collect(); + if args.len() < 2 || args.len() > 3 { + eprintln!( + "Usage: {} [path-to-fallback-wit]", + args[0] + ); + eprintln!(" If no fallback WIT is provided, uses built-in hyperware.wit"); + std::process::exit(1); + } + + let zip_path = &args[1]; + + // Read the zip file into memory + let zip_bytes = fs::read(zip_path).expect(&format!("Failed to read zip file: {}", zip_path)); + + // Optionally read custom fallback WIT + let fallback_wit = if args.len() == 3 { + Some(fs::read(&args[2]).expect(&format!("Failed to read fallback WIT file: {}", args[2]))) + } else { + None + }; + + // Parse WIT from the zip and get JSON + // If no package header is found in the zip, it will use the fallback WIT + let json = parse_wit_from_zip(&zip_bytes, fallback_wit)?; + + // Print the JSON output + println!("{}", json); + + Ok(()) +} diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/src/hyperware.wit b/hyperdrive/packages/spider/crates/hyperware-parse-wit/src/hyperware.wit new file mode 100644 index 000000000..db96383b5 --- /dev/null +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/src/hyperware.wit @@ -0,0 +1,224 @@ +package hyperware:process@1.0.0; + +interface standard { + + // ˗ˏˋ ♡ ˎˊ˗ + // System Types + // ˗ˏˋ ♡ ˎˊ˗ + + /// JSON is passed over Wasm boundary as a string. + type json = string; + + /// In types passed from kernel, node-id will be a valid Kimap entry. + type node-id = string; + + /// Context, like a message body, is a protocol-defined serialized byte + /// array. It is used when building a Request to save information that + /// will not be part of a Response, in order to more easily handle + /// ("contextualize") that Response. + type context = list; + + record process-id { + process-name: string, + package-name: string, + publisher-node: node-id, + } + + record package-id { + package-name: string, + publisher-node: node-id, + } + + record address { + node: node-id, + process: process-id, + } + + record lazy-load-blob { + mime: option, + bytes: list, + } + + record request { + // set in order to inherit lazy-load-blob from parent message, and if + // expects-response is none, direct response to source of parent. + // also carries forward certain aspects of parent message in kernel, + // see documentation for formal spec and examples: + // https://docs.rs/hyperware_process_lib/latest/hyperware_process_lib/struct.Request.html + inherit: bool, + // if some, request expects a response in the given number of seconds + expects-response: option, + body: list, + metadata: option, + capabilities: list, + // to grab lazy-load-blob, use get_blob() + } + + record response { + inherit: bool, + body: list, + metadata: option, + capabilities: list, + // to grab lazy-load-blob, use get_blob() + } + + /// A message can be a request or a response. Within a response, there is + /// a result which surfaces any error that happened because of a request. + /// A successful response will contain the context of the request it + /// matches, if any was set. + variant message { + request(request), + response(tuple>), + } + + record capability { + issuer: address, + params: json, + } + + /// On-exit is a setting that determines what happens when a process + /// panics, completes, or otherwise "ends". + /// NOTE: requests will always have expects-response set to false by kernel. + variant on-exit { + none, + restart, + requests(list>>), + } + + /// Send errors come from trying to send a message to another process, + /// either locally or on another node. + /// A message can fail by timing out, or by the node being entirely + /// unreachable (offline or can't be found in PKI). In either case, + /// the message is not delivered and the process that sent it receives + /// that message back along with any assigned context and/or lazy-load-blob, + /// and is free to handle it as it sees fit. + /// In the local case, only timeout errors are possible and also cover the case + /// in which a process is not running or does not exist. + record send-error { + kind: send-error-kind, + target: address, + message: message, + lazy-load-blob: option, + } + + enum send-error-kind { + offline, + timeout, + } + + enum spawn-error { + name-taken, + no-file-at-path, + } + + // ˗ˏˋ ♡ ˎˊ˗ + // System Utils + // ˗ˏˋ ♡ ˎˊ˗ + + /// Prints to the terminal at a given verbosity level. + /// Higher verbosity levels print more information. + /// Level 0 is always printed -- use sparingly. + print-to-terminal: func(verbosity: u8, message: string); + + /// Returns the address of the process. + our: func() -> address; + + // ˗ˏˋ ♡ ˎˊ˗ + // Process Management + // ˗ˏˋ ♡ ˎˊ˗ + + get-on-exit: func() -> on-exit; + + set-on-exit: func(on-exit: on-exit); + + get-state: func() -> option>; + + set-state: func(bytes: list); + + clear-state: func(); + + spawn: func( + // name is optional. if not provided, name will be a random u64. + name: option, + // wasm-path must be located within package's drive + wasm-path: string, + on-exit: on-exit, + // requested capabilities must be owned by the caller + request-capabilities: list, + // granted capabilities will be generated by the child process + // and handed out to the indicated process-id. + grant-capabilities: list>, + public: bool + ) -> result; + + // ˗ˏˋ ♡ ˎˊ˗ + // Capabilities Management + // ˗ˏˋ ♡ ˎˊ˗ + + /// Saves the capabilities to persisted process state. + save-capabilities: func(caps: list); + + /// Deletes the capabilities from persisted process state. + drop-capabilities: func(caps: list); + + /// Gets all capabilities from persisted process state. + our-capabilities: func() -> list; + + // ˗ˏˋ ♡ ˎˊ˗ + // Message I/O + // ˗ˏˋ ♡ ˎˊ˗ + + /// Ingest next message when it arrives along with its source. + /// Almost all long-running processes will call this in a loop. + receive: func() -> + result, tuple>>; + + /// Returns whether or not the current message has a blob. + has-blob: func() -> bool; + + /// Returns the blob of the current message, if any. + get-blob: func() -> option; + + /// Returns the last blob this process received. + last-blob: func() -> option; + + /// Send request to target. + send-request: func( + target: address, + request: request, + context: option, + lazy-load-blob: option + ); + + /// Send requests to targets. + send-requests: func( + requests: list, + option>> + ); + + /// Send response to the request currently being handled. + send-response: func( + response: response, + lazy-load-blob: option + ); + + /// Send a single request, then block (internally) until its response. The + /// type returned is Message but will always contain Response. + send-and-await-response: func( + target: address, + request: request, + lazy-load-blob: option + ) -> result, send-error>; +} + +world lib { + import standard; +} + +world process-v1 { + include lib; + + export init: func(our: string); +} diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/src/lib.rs b/hyperdrive/packages/spider/crates/hyperware-parse-wit/src/lib.rs new file mode 100644 index 000000000..84409dfee --- /dev/null +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/src/lib.rs @@ -0,0 +1,116 @@ +use anyhow::{Context, Result}; +use std::io::Read; +use std::path::Path; +use wit_parser::Resolve; +use zip::ZipArchive; + +// Include the default hyperware.wit file +const DEFAULT_HYPERWARE_WIT: &str = include_str!("hyperware.wit"); + +/// Parse WIT files from a zip archive and return JSON representation +/// +/// # Arguments +/// * `zip_bytes` - The bytes of the zip file containing WIT files +/// * `fallback_wit` - Optional WIT content to use when package header is missing. +/// If None, uses the built-in hyperware.wit +pub fn parse_wit_from_zip(zip_bytes: &[u8], fallback_wit: Option>) -> Result { + let resolve = parse_wit_from_zip_to_resolve(zip_bytes, fallback_wit)?; + let json = serde_json::to_string_pretty(&resolve).context("failed to serialize to JSON")?; + Ok(json) +} + +/// Parse WIT files from a zip archive and return parsed Resolve +/// +/// # Arguments +/// * `zip_bytes` - The bytes of the zip file containing WIT files +/// * `fallback_wit` - Optional WIT content to use when package header is missing. +/// If None, uses the built-in hyperware.wit +pub fn parse_wit_from_zip_to_resolve( + zip_bytes: &[u8], + fallback_wit: Option>, +) -> Result { + // Open the zip archive from bytes + let cursor = std::io::Cursor::new(zip_bytes); + let mut archive = ZipArchive::new(cursor).context("failed to open zip archive")?; + + let mut resolve = Resolve::default(); + let mut has_package_header = false; + let mut wit_files = Vec::new(); + + // First pass: collect all WIT files and check for package headers + for i in 0..archive.len() { + let mut file = archive.by_index(i).context("failed to access zip entry")?; + let name = file.name().to_string(); + + // Skip directories + if name.ends_with('/') { + continue; + } + + // Only process .wit files + if !name.ends_with(".wit") { + continue; + } + + // Read the file contents + let mut contents = String::new(); + file.read_to_string(&mut contents) + .with_context(|| format!("failed to read file: {}", name))?; + + // Check if this file contains a package header + if contents.contains("package ") && contents.contains("@") { + has_package_header = true; + } + + wit_files.push((name, contents)); + } + + // If no package header found, combine all WIT files with the fallback package header + if !has_package_header && !wit_files.is_empty() { + let fallback_content = match fallback_wit { + Some(bytes) => { + String::from_utf8(bytes).context("fallback WIT content is not valid UTF-8")? + } + None => DEFAULT_HYPERWARE_WIT.to_string(), + }; + + // Combine all WIT files into a single document with the package header + let mut combined_wit = fallback_content; + combined_wit.push_str("\n\n"); + + for (_, contents) in &wit_files { + combined_wit.push_str(&contents); + combined_wit.push_str("\n\n"); + } + + // Parse the combined WIT document + resolve + .push_str(Path::new("combined.wit"), &combined_wit) + .context("failed to parse combined WIT package")?; + } else { + // Parse each file individually if package headers are present + for (name, contents) in wit_files { + let path = Path::new(&name); + resolve + .push_str(path, &contents) + .with_context(|| format!("failed to parse WIT file: {}", name))?; + } + } + + Ok(resolve) +} + +/// Parse WIT files from a zip archive and return serde_json::Value +/// +/// # Arguments +/// * `zip_bytes` - The bytes of the zip file containing WIT files +/// * `fallback_wit` - Optional WIT content to use when package header is missing. +/// If None, uses the built-in hyperware.wit +pub fn parse_wit_from_zip_to_value( + zip_bytes: &[u8], + fallback_wit: Option>, +) -> Result { + let resolve = parse_wit_from_zip_to_resolve(zip_bytes, fallback_wit)?; + let value = serde_json::to_value(&resolve).context("failed to convert to JSON value")?; + Ok(value) +} diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 2933ac2e5..1c9023f13 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -48,6 +48,7 @@ mod tool_providers; use tool_providers::{ build_container::{BuildContainerExt, BuildContainerToolProvider}, hypergrid::{HypergridExt, HypergridToolProvider}, + hyperware::HyperwareToolProvider, ToolProvider, }; @@ -229,6 +230,11 @@ impl SpiderState { self.tool_provider_registry .register(Box::new(hypergrid_provider)); + // Register Hyperware tool provider + let hyperware_provider = HyperwareToolProvider::new(); + self.tool_provider_registry + .register(Box::new(hyperware_provider)); + // Check if hypergrid server exists let has_hypergrid = self .mcp_servers @@ -3029,6 +3035,18 @@ impl SpiderState { self.execute_hypergrid_call_impl(server_id, provider_id, provider_name, call_args) .await } + ToolExecutionCommand::HyperwareSearchApis { query } => { + tool_providers::hyperware::search_apis(&query).await + } + ToolExecutionCommand::HyperwareGetApi { package_id } => { + tool_providers::hyperware::get_api(&package_id).await + } + ToolExecutionCommand::HyperwareCallApi { + package_id, + method, + args, + timeout, + } => tool_providers::hyperware::call_api(&package_id, &method, &args, timeout).await, ToolExecutionCommand::DirectResult(result) => result, } } diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs b/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs new file mode 100644 index 000000000..c43ac678d --- /dev/null +++ b/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs @@ -0,0 +1,313 @@ +use crate::tool_providers::{ToolExecutionCommand, ToolProvider}; +use crate::types::{SpiderState, Tool}; +use hyperware_parse_wit::parse_wit_from_zip_to_resolve; +use hyperware_process_lib::{get_blob, hyperapp::send, ProcessId, ProcessIdParseError, Request}; +use serde_json::{json, Value}; +use wit_parser::Docs; + +pub struct HyperwareToolProvider; + +impl HyperwareToolProvider { + pub fn new() -> Self { + Self + } + + fn create_search_apis_tool(&self) -> Tool { + Tool { + name: "hyperware_search_apis".to_string(), + description: "Search available APIs on Hyperware by querying the app store and filtering based on a search term.".to_string(), + parameters: r#"{"type":"object","required":["query"],"properties":{"query":{"type":"string","description":"Search term to filter available APIs (e.g., 'weather', 'database', 'auth')"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["query"],"properties":{"query":{"type":"string","description":"Search term to filter available APIs (e.g., 'weather', 'database', 'auth')"}}}"#.to_string()), + } + } + + fn create_get_api_tool(&self) -> Tool { + Tool { + name: "hyperware_get_api".to_string(), + description: "Get the detailed API documentation for a specific package, including all available types and methods.".to_string(), + parameters: r#"{"type":"object","required":["package_id"],"properties":{"package_id":{"type":"string","description":"The package ID in the format 'package-name:publisher-node' (e.g., 'weather-app-9000:foo.os')"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["package_id"],"properties":{"package_id":{"type":"string","description":"The package ID in the format 'package-name:publisher-node' (e.g., 'weather-app-9000:foo.os')"}}}"#.to_string()), + } + } + + fn create_call_api_tool(&self) -> Tool { + Tool { + name: "hyperware_call_api".to_string(), + description: "Call a specific API method on a Hyperware process to execute functionality.".to_string(), + parameters: r#"{"type":"object","required":["process_id","method","args"],"properties":{"process_id":{"type":"string","description":"The process ID in the format 'process-name:package-name:publisher-node'"},"method":{"type":"string","description":"The method name to call on the package"},"args":{"type":"string","description":"JSON string of arguments to pass to the method"},"timeout":{"type":"number","description":"Optional timeout in seconds (default: 15)"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["package_id","method","args"],"properties":{"package_id":{"type":"string","description":"The package ID in the format 'package_name:publisher_node'"},"method":{"type":"string","description":"The method name to call on the package"},"args":{"type":"string","description":"JSON string of arguments to pass to the method"},"timeout":{"type":"number","description":"Optional timeout in seconds (default: 15)"}}}"#.to_string()), + } + } +} + +impl ToolProvider for HyperwareToolProvider { + fn get_tools(&self, _state: &SpiderState) -> Vec { + vec![ + self.create_search_apis_tool(), + self.create_get_api_tool(), + self.create_call_api_tool(), + ] + } + + fn should_include_tool(&self, tool_name: &str, _state: &SpiderState) -> bool { + match tool_name { + "hyperware_search_apis" | "hyperware_get_api" | "hyperware_call_api" => true, + _ => false, + } + } + + fn prepare_execution( + &self, + tool_name: &str, + parameters: &Value, + _state: &SpiderState, + ) -> Result { + match tool_name { + "hyperware_search_apis" => { + let query = parameters + .get("query") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing query parameter".to_string())? + .to_string(); + + Ok(ToolExecutionCommand::HyperwareSearchApis { query }) + } + "hyperware_get_api" => { + let package_id = parameters + .get("package_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing package_id parameter".to_string())? + .to_string(); + + Ok(ToolExecutionCommand::HyperwareGetApi { package_id }) + } + "hyperware_call_api" => { + let package_id = parameters + .get("package_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing package_id parameter".to_string())? + .to_string(); + + let method = parameters + .get("method") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing method parameter".to_string())? + .to_string(); + + let args = parameters + .get("args") + .and_then(|v| v.as_str()) + .ok_or_else(|| "Missing args parameter".to_string())? + .to_string(); + + let timeout = parameters + .get("timeout") + .and_then(|v| v.as_u64()) + .unwrap_or(15); + + Ok(ToolExecutionCommand::HyperwareCallApi { + package_id, + method, + args, + timeout, + }) + } + _ => Err(format!("Unknown tool: {}", tool_name)), + } + } + + fn get_provider_id(&self) -> &str { + "hyperware" + } +} + +// Helper functions for executing Hyperware operations + +pub async fn search_apis(query: &str) -> Result { + // First, get the list of all APIs from app-store + let apis_request = serde_json::to_vec(&json!("Apis")).unwrap(); + let request = Request::to(("our", "main", "app-store", "sys")) + .body(apis_request) + .expects_response(5); + + let apis_response: Value = send(request) + .await + .map_err(|e| format!("Failed to get APIs list: {:?}", e))?; + + //let body = String::from_utf8(response.body().to_vec()) + // .map_err(|e| format!("Failed to parse response body: {:?}", e))?; + + //let apis_response: Value = serde_json::from_str(&body) + // .map_err(|e| format!("Failed to parse JSON response: {:?}", e))?; + + // Extract the APIs list + let apis = apis_response + .get("ApisResponse") + .and_then(|r| r.get("apis")) + .and_then(|a| a.as_array()) + .ok_or_else(|| "Invalid APIs response format".to_string())?; + + // Process each API to get its documentation + let mut results: Vec<(String, Option)> = Vec::new(); + + for api in apis { + let package_name = api + .get("package_name") + .and_then(|n| n.as_str()) + .ok_or_else(|| "Missing package_name".to_string())?; + let publisher_node = api + .get("publisher_node") + .and_then(|n| n.as_str()) + .ok_or_else(|| "Missing publisher_node".to_string())?; + let package_id = format!("{}:{}", package_name, publisher_node); + + // Skip if package doesn't match the query (case-insensitive) + let query_lower = query.to_lowercase(); + if !package_id.to_lowercase().contains(&query_lower) { + continue; + } + + // Try to get the package documentation + match get_api_documentation(&package_id).await { + Ok(docs) => { + results.push((package_id, Some(docs))); + } + Err(_) => { + // If we can't get docs, still include the package without docs + results.push((package_id, None)); + } + } + } + + Ok(json!(results)) +} + +pub async fn get_api(package_id: &str) -> Result { + // Split package_id into package_name and publisher_node + let parts: Vec<&str> = package_id.splitn(2, ':').collect(); + if parts.len() != 2 { + return Err(format!( + "Invalid package_id format. Expected 'package_name:publisher_node', got '{}'", + package_id + )); + } + let package_name = parts[0]; + let publisher_node = parts[1]; + + // Request API zip from app-store + let get_api_request = serde_json::to_vec(&json!({ + "GetApi": { + "package_name": package_name, + "publisher_node": publisher_node, + } + })) + .unwrap(); + + let request = Request::to(("our", "main", "app-store", "sys")) + .body(get_api_request) + .expects_response(5); + + let _response = send(request) + .await + .map_err(|e| format!("Failed to get API: {:?}", e))?; + + // Check if we got a blob (zip file) + let blob = get_blob(); + if blob.is_none() { + return Err(format!("No API zip found for package {}", package_id)); + } + + let blob_bytes = blob.ok_or_else(|| "No blob received".to_string())?.bytes; + + // Parse the WIT files from the zip + let resolve = parse_wit_from_zip_to_resolve(&blob_bytes, None) + .map_err(|e| format!("Failed to parse WIT files: {:?}", e))?; + + // Extract type information with documentation + let mut types_with_docs: Vec<(String, Option)> = Vec::new(); + + // Iterate through all packages in the resolve + for (_, package) in resolve.packages.iter() { + // Add interfaces + for (_, iface_id) in &package.interfaces { + let iface = &resolve.interfaces[*iface_id]; + let type_name = iface.name.as_deref().unwrap_or("unnamed_interface"); + let docs = extract_docs(&iface.docs); + types_with_docs.push((type_name.to_string(), docs)); + + // Add functions within the interface + for (func_name, _) in &iface.functions { + let full_name = format!("{}.{}", type_name, func_name); + types_with_docs.push((full_name, None)); // Function-level docs if available + } + } + + // Add worlds + for (_, world_id) in &package.worlds { + let world = &resolve.worlds[*world_id]; + let type_name = world.name.to_string(); + //let type_name = world.name.as_ref().map(|s| s.as_str()).unwrap_or("unnamed_world"); + let docs = extract_docs(&world.docs); + types_with_docs.push((type_name.to_string(), docs)); + } + } + + Ok(json!(types_with_docs)) +} + +pub async fn call_api( + process_id: &str, + method: &str, + args: &str, + timeout: u64, +) -> Result { + let process_id: ProcessId = process_id + .parse() + .map_err(|e: ProcessIdParseError| e.to_string())?; + + // Create request body with method and args + let request_body = serde_json::to_vec(&json!({ + method: serde_json::from_str::(args).unwrap_or_else(|_| json!(args)) + })) + .unwrap(); + + // Send the request to the package + let request = Request::to(("our", process_id)) + .body(request_body) + .expects_response(timeout); + + let response: Value = send(request) + .await + .map_err(|e| format!("Failed to call API: {:?}", e))?; + + Ok(response) + //let body = String::from_utf8(response.body().to_vec()) + // .map_err(|e| format!("Failed to parse response body: {:?}", e))?; + + //// Try to parse as JSON, otherwise return as string + //let result = serde_json::from_str::(&body).unwrap_or_else(|_| json!(body)); + + //Ok(result) +} + +async fn get_api_documentation(package_id: &str) -> Result { + // This is a simplified version that just returns the package_id + // In a full implementation, we would fetch and parse the actual documentation + let parts: Vec<&str> = package_id.splitn(2, ':').collect(); + if parts.len() != 2 { + return Err("Invalid package_id format".to_string()); + } + + // Try to get the API and extract package-level documentation + match get_api(package_id).await { + Ok(_api_data) => { + // For now, just return a basic description + Ok(format!("API package: {}", parts[0])) + } + Err(_) => Ok(format!("Package: {}", parts[0])), + } +} + +fn extract_docs(docs: &Docs) -> Option { + docs.contents.clone() +} diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs index 9710b302d..5136ceeb4 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs @@ -1,5 +1,6 @@ pub mod build_container; pub mod hypergrid; +pub mod hyperware; use crate::types::{SpiderState, Tool}; use serde_json::Value; @@ -47,6 +48,19 @@ pub enum ToolExecutionCommand { provider_name: String, call_args: Vec<(String, String)>, }, + // Hyperware commands + HyperwareSearchApis { + query: String, + }, + HyperwareGetApi { + package_id: String, + }, + HyperwareCallApi { + package_id: String, + method: String, + args: String, + timeout: u64, + }, // Direct result (for synchronous operations) DirectResult(Result), } From 21c341ff6b064d8b7e45ffebf375724b26bbd441 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 22 Sep 2025 21:16:01 -0700 Subject: [PATCH 22/70] spider: get it working --- .../crates/hyperware-parse-wit/Cargo.toml | 2 +- hyperdrive/packages/spider/pkg/manifest.json | 2 + hyperdrive/packages/spider/spider/Cargo.toml | 7 + hyperdrive/packages/spider/spider/src/lib.rs | 62 +++- .../spider/src/tool_providers/hyperware.rs | 326 +++++++++++++++--- .../spider/spider/src/tool_providers/mod.rs | 2 +- 6 files changed, 346 insertions(+), 55 deletions(-) diff --git a/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml b/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml index df302c9e3..3a52e2d2d 100644 --- a/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml +++ b/hyperdrive/packages/spider/crates/hyperware-parse-wit/Cargo.toml @@ -7,4 +7,4 @@ edition = "2021" anyhow = "1.0" serde_json = "1.0" wit-parser = { version = "0.220.0", features = ["serde"] } -zip = { version = "2.2", default-features = false } +zip = { version = "2.2", default-features = false, features = ["deflate"] } diff --git a/hyperdrive/packages/spider/pkg/manifest.json b/hyperdrive/packages/spider/pkg/manifest.json index 61427bad5..3fb3d4fa8 100644 --- a/hyperdrive/packages/spider/pkg/manifest.json +++ b/hyperdrive/packages/spider/pkg/manifest.json @@ -5,6 +5,7 @@ "on_exit": "Restart", "request_networking": true, "request_capabilities": [ + "main:app-store:sys", "homepage:homepage:sys", "http-client:distro:sys", "http-server:distro:sys", @@ -14,6 +15,7 @@ "vfs:distro:sys" ], "grant_capabilities": [ + "main:app-store:sys", "homepage:homepage:sys", "http-client:distro:sys", "http-server:distro:sys", diff --git a/hyperdrive/packages/spider/spider/Cargo.toml b/hyperdrive/packages/spider/spider/Cargo.toml index d97888cf2..5a6107154 100644 --- a/hyperdrive/packages/spider/spider/Cargo.toml +++ b/hyperdrive/packages/spider/spider/Cargo.toml @@ -21,6 +21,9 @@ rev = "66884c0" git = "https://github.com/hyperware-ai/hyperware-anthropic-sdk" rev = "c0cbd5e" +[dependencies.hyperware-parse-wit] +path = "../crates/hyperware-parse-wit" + [dependencies.hyperware_process_lib] features = ["hyperapp"] git = "https://github.com/hyperware-ai/process_lib" @@ -37,6 +40,10 @@ features = [ ] version = "1.4.1" +[dependencies.wit-parser] +features = ["serde"] +version = "0.220.0" + [features] simulation-mode = [] diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index 1c9023f13..f69dd087a 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -235,6 +235,52 @@ impl SpiderState { self.tool_provider_registry .register(Box::new(hyperware_provider)); + // Check if hyperware server exists + let has_hyperware = self + .mcp_servers + .iter() + .any(|s| s.transport.transport_type == "hyperware"); + if !has_hyperware { + // Create new hyperware server + let hyperware_provider = HyperwareToolProvider::new(); + let hyperware_tools = hyperware_provider.get_tools(self); + let hyperware_server = McpServer { + id: "hyperware".to_string(), + name: "Hyperware".to_string(), + transport: types::TransportConfig { + transport_type: "hyperware".to_string(), + command: None, + args: None, + url: None, + hypergrid_token: None, + hypergrid_client_id: None, + hypergrid_node: None, + }, + tools: hyperware_tools, + connected: true, // Always mark as connected + }; + self.mcp_servers.push(hyperware_server); + println!("Spider: Hyperware MCP server initialized"); + } else { + // Server exists, refresh its tools from the provider + println!("Spider: Refreshing Hyperware tools on startup"); + // Get fresh tools from provider + let hyperware_provider = HyperwareToolProvider::new(); + let fresh_tools = hyperware_provider.get_tools(self); + // Update the existing server's tools + if let Some(server) = self + .mcp_servers + .iter_mut() + .find(|s| s.id == "hyperware") + { + server.tools = fresh_tools; + println!( + "Spider: Hyperware tools refreshed with {} tools", + server.tools.len() + ); + } + } + // Check if hypergrid server exists let has_hypergrid = self .mcp_servers @@ -2688,6 +2734,18 @@ impl SpiderState { Err(format!("Unknown build container tool: {}", tool_name)) } } + "hyperware" => { + // Native hyperware tools are handled by the tool provider + if let Some(provider) = self + .tool_provider_registry + .find_provider_for_tool(tool_name, self) + { + let command = provider.prepare_execution(tool_name, parameters, self)?; + self.execute_tool_command(command, conversation_id).await + } else { + Err(format!("Unknown hyperware tool: {}", tool_name)) + } + } "stdio" | "websocket" => { // Find the WebSocket connection for this server let channel_id = self @@ -3042,11 +3100,11 @@ impl SpiderState { tool_providers::hyperware::get_api(&package_id).await } ToolExecutionCommand::HyperwareCallApi { - package_id, + process_id, method, args, timeout, - } => tool_providers::hyperware::call_api(&package_id, &method, &args, timeout).await, + } => tool_providers::hyperware::call_api(&process_id, &method, &args, timeout).await, ToolExecutionCommand::DirectResult(result) => result, } } diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs b/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs index c43ac678d..4a498732f 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs @@ -34,8 +34,8 @@ impl HyperwareToolProvider { Tool { name: "hyperware_call_api".to_string(), description: "Call a specific API method on a Hyperware process to execute functionality.".to_string(), - parameters: r#"{"type":"object","required":["process_id","method","args"],"properties":{"process_id":{"type":"string","description":"The process ID in the format 'process-name:package-name:publisher-node'"},"method":{"type":"string","description":"The method name to call on the package"},"args":{"type":"string","description":"JSON string of arguments to pass to the method"},"timeout":{"type":"number","description":"Optional timeout in seconds (default: 15)"}}}"#.to_string(), - input_schema_json: Some(r#"{"type":"object","required":["package_id","method","args"],"properties":{"package_id":{"type":"string","description":"The package ID in the format 'package_name:publisher_node'"},"method":{"type":"string","description":"The method name to call on the package"},"args":{"type":"string","description":"JSON string of arguments to pass to the method"},"timeout":{"type":"number","description":"Optional timeout in seconds (default: 15)"}}}"#.to_string()), + parameters: r#"{"type":"object","required":["process_id","method","args"],"properties":{"process_id":{"type":"string","description":"The process ID in the format 'process-name:package-name:publisher-node'"},"method":{"type":"string","description":"The method name to call on the process. By convention UpperCamelCase"},"args":{"type":"string","description":"JSON string of arguments to pass to the method"},"timeout":{"type":"number","description":"Optional timeout in seconds (default: 15)"}}}"#.to_string(), + input_schema_json: Some(r#"{"type":"object","required":["process_id","method","args"],"properties":{"process_id":{"type":"string","description":"The process ID in the format 'process-name:package-name:publisher-node'"},"method":{"type":"string","description":"The method name to call on the process. By convention UpperCamelCase"},"args":{"type":"string","description":"JSON string of arguments to pass to the method"},"timeout":{"type":"number","description":"Optional timeout in seconds (default: 15)"}}}"#.to_string()), } } } @@ -82,8 +82,8 @@ impl ToolProvider for HyperwareToolProvider { Ok(ToolExecutionCommand::HyperwareGetApi { package_id }) } "hyperware_call_api" => { - let package_id = parameters - .get("package_id") + let process_id = parameters + .get("process_id") .and_then(|v| v.as_str()) .ok_or_else(|| "Missing package_id parameter".to_string())? .to_string(); @@ -106,7 +106,7 @@ impl ToolProvider for HyperwareToolProvider { .unwrap_or(15); Ok(ToolExecutionCommand::HyperwareCallApi { - package_id, + process_id, method, args, timeout, @@ -134,12 +134,6 @@ pub async fn search_apis(query: &str) -> Result { .await .map_err(|e| format!("Failed to get APIs list: {:?}", e))?; - //let body = String::from_utf8(response.body().to_vec()) - // .map_err(|e| format!("Failed to parse response body: {:?}", e))?; - - //let apis_response: Value = serde_json::from_str(&body) - // .map_err(|e| format!("Failed to parse JSON response: {:?}", e))?; - // Extract the APIs list let apis = apis_response .get("ApisResponse") @@ -168,7 +162,7 @@ pub async fn search_apis(query: &str) -> Result { } // Try to get the package documentation - match get_api_documentation(&package_id).await { + match get_package_documentation(&package_id).await { Ok(docs) => { results.push((package_id, Some(docs))); } @@ -207,7 +201,7 @@ pub async fn get_api(package_id: &str) -> Result { .body(get_api_request) .expects_response(5); - let _response = send(request) + let _response: Value = send(request) .await .map_err(|e| format!("Failed to get API: {:?}", e))?; @@ -223,36 +217,104 @@ pub async fn get_api(package_id: &str) -> Result { let resolve = parse_wit_from_zip_to_resolve(&blob_bytes, None) .map_err(|e| format!("Failed to parse WIT files: {:?}", e))?; - // Extract type information with documentation - let mut types_with_docs: Vec<(String, Option)> = Vec::new(); + // Extract type information with full definitions and documentation + let mut types_with_definitions: Vec = Vec::new(); + let mut seen_types = std::collections::HashSet::new(); // Iterate through all packages in the resolve - for (_, package) in resolve.packages.iter() { - // Add interfaces - for (_, iface_id) in &package.interfaces { + for (_pkg_id, package) in resolve.packages.iter() { + // Process interfaces + for (iface_name, iface_id) in &package.interfaces { let iface = &resolve.interfaces[*iface_id]; - let type_name = iface.name.as_deref().unwrap_or("unnamed_interface"); - let docs = extract_docs(&iface.docs); - types_with_docs.push((type_name.to_string(), docs)); - - // Add functions within the interface - for (func_name, _) in &iface.functions { - let full_name = format!("{}.{}", type_name, func_name); - types_with_docs.push((full_name, None)); // Function-level docs if available + + // Skip standard or lib interfaces but include important types + if iface_name == "standard" || iface_name == "lib" { + for (type_name, type_id) in &iface.types { + // Only include certain standard types that are commonly used + if matches!(type_name.as_str(), "address" | "process-id" | "package-id" | + "node-id" | "capability" | "request" | "response" | "message") { + let rust_type_name = to_upper_camel_case(type_name); + if seen_types.insert(rust_type_name.clone()) { + let type_def = &resolve.types[*type_id]; + let docs = extract_docs(&type_def.docs); + let type_schema = type_to_json_schema(type_def, &resolve); + types_with_definitions.push(json!({ + "name": rust_type_name, + "definition": type_schema, + "documentation": docs + })); + } + } + } + continue; + } + + // Keep process name in kebab-case + let process_name = iface_name.clone(); + + // Add types within the interface + for (type_name, type_id) in &iface.types { + let type_name_camel = to_upper_camel_case(type_name); + + // Skip types ending with SignatureHttp or SignatureRemote + if type_name_camel.ends_with("SignatureHttp") || type_name_camel.ends_with("SignatureRemote") { + continue; + } + + if seen_types.insert(format!("{}::{}", process_name, type_name_camel)) { + let type_def = &resolve.types[*type_id]; + let docs = extract_docs(&type_def.docs); + let type_schema = type_to_json_schema(type_def, &resolve); + types_with_definitions.push(json!({ + "name": type_name_camel, + "process_name": process_name, + "definition": type_schema, + "documentation": docs + })); + } } - } - // Add worlds - for (_, world_id) in &package.worlds { - let world = &resolve.worlds[*world_id]; - let type_name = world.name.to_string(); - //let type_name = world.name.as_ref().map(|s| s.as_str()).unwrap_or("unnamed_world"); - let docs = extract_docs(&world.docs); - types_with_docs.push((type_name.to_string(), docs)); + // Add functions within the interface with parameter/return type info + for (func_name, func) in &iface.functions { + let func_name_formatted = func_name.clone(); + + if seen_types.insert(format!("{}::{}", process_name, func_name_formatted)) { + let docs = extract_docs(&func.docs); + let params_schema = func.params.iter().map(|(param_name, param_type)| { + json!({ + "name": to_snake_case(param_name), + "type": type_ref_to_json(¶m_type, &resolve) + }) + }).collect::>(); + + let returns_schema = match &func.results { + wit_parser::Results::Named(named) => { + named.iter().map(|(name, type_ref)| { + json!({ + "name": to_snake_case(name), + "type": type_ref_to_json(&type_ref, &resolve) + }) + }).collect::>() + } + wit_parser::Results::Anon(type_ref) => { + vec![json!({"type": type_ref_to_json(&type_ref, &resolve)})] + } + }; + + types_with_definitions.push(json!({ + "name": func_name_formatted, + "process_name": process_name, + "type": "function", + "parameters": params_schema, + "returns": returns_schema, + "documentation": docs + })); + } + } } } - Ok(json!(types_with_docs)) + Ok(json!(types_with_definitions)) } pub async fn call_api( @@ -281,33 +343,195 @@ pub async fn call_api( .map_err(|e| format!("Failed to call API: {:?}", e))?; Ok(response) - //let body = String::from_utf8(response.body().to_vec()) - // .map_err(|e| format!("Failed to parse response body: {:?}", e))?; - - //// Try to parse as JSON, otherwise return as string - //let result = serde_json::from_str::(&body).unwrap_or_else(|_| json!(body)); - - //Ok(result) } -async fn get_api_documentation(package_id: &str) -> Result { - // This is a simplified version that just returns the package_id - // In a full implementation, we would fetch and parse the actual documentation +async fn get_package_documentation(package_id: &str) -> Result { + // Split package_id into package_name and publisher_node let parts: Vec<&str> = package_id.splitn(2, ':').collect(); if parts.len() != 2 { return Err("Invalid package_id format".to_string()); } + let package_name = parts[0]; + let publisher_node = parts[1]; - // Try to get the API and extract package-level documentation - match get_api(package_id).await { - Ok(_api_data) => { - // For now, just return a basic description - Ok(format!("API package: {}", parts[0])) + // Request API zip from app-store to get package-level docs + let get_api_request = serde_json::to_vec(&json!({ + "GetApi": { + "package_name": package_name, + "publisher_node": publisher_node, } - Err(_) => Ok(format!("Package: {}", parts[0])), + })) + .unwrap(); + + let request = Request::to(("our", "main", "app-store", "sys")) + .body(get_api_request) + .expects_response(5); + + let _response: Value = send(request) + .await + .map_err(|e| format!("Failed to get API: {:?}", e))?; + + // Check if we got a blob (zip file) + let blob = get_blob(); + if blob.is_none() { + return Err(format!("No API zip found for package {}", package_id)); } + + let blob_bytes = blob.ok_or_else(|| "No blob received".to_string())?.bytes; + + // Parse the WIT files from the zip + let resolve = parse_wit_from_zip_to_resolve(&blob_bytes, None) + .map_err(|e| format!("Failed to parse WIT files: {:?}", e))?; + + // Try to find package-level documentation + // Look through packages for one matching our package name + for (_pkg_id, package) in resolve.packages.iter() { + let pkg_name = &package.name; + if pkg_name.name.contains(package_name) { + // Check if package has documentation + // Note: wit-parser Package doesn't have a docs field directly + // Package docs would typically be in a README or main interface + // For now, return formatted package info + return Ok(format!("Package: {} - Provides API interfaces and types", pkg_name.name)); + } + } + + // Fallback to basic description + Ok(format!("API package: {}", package_name)) } fn extract_docs(docs: &Docs) -> Option { docs.contents.clone() } + +// Helper function to convert snake_case to UpperCamelCase +fn to_upper_camel_case(s: &str) -> String { + s.split('-') + .map(|word| { + let mut chars = word.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_uppercase().collect::() + chars.as_str(), + } + }) + .collect::() +} + +// Helper function to convert kebab-case to snake_case +fn to_snake_case(s: &str) -> String { + s.replace('-', "_") +} + +// Convert a WIT type definition to a JSON schema representation +fn type_to_json_schema(type_def: &wit_parser::TypeDef, resolve: &wit_parser::Resolve) -> Value { + use wit_parser::TypeDefKind; + + match &type_def.kind { + TypeDefKind::Record(record) => { + let fields = record.fields.iter().map(|field| { + ( + to_snake_case(&field.name), + type_ref_to_json(&field.ty, resolve) + ) + }).collect::>(); + + json!({ + "type": "object", + "properties": fields + }) + } + TypeDefKind::Variant(variant) => { + let cases = variant.cases.iter().map(|case| { + let case_schema = match &case.ty { + Some(ty) => type_ref_to_json(ty, resolve), + None => json!("null") + }; + json!({ + "name": case.name, + "type": case_schema + }) + }).collect::>(); + + json!({ + "type": "variant", + "cases": cases + }) + } + TypeDefKind::Enum(enum_def) => { + let cases = enum_def.cases.iter().map(|case| &case.name).collect::>(); + json!({ + "type": "enum", + "values": cases + }) + } + TypeDefKind::List(ty) => { + json!({ + "type": "array", + "items": type_ref_to_json(ty, resolve) + }) + } + TypeDefKind::Tuple(tuple) => { + let types = tuple.types.iter().map(|ty| type_ref_to_json(ty, resolve)).collect::>(); + json!({ + "type": "tuple", + "items": types + }) + } + TypeDefKind::Option(ty) => { + json!({ + "type": "option", + "value": type_ref_to_json(ty, resolve) + }) + } + TypeDefKind::Result(result) => { + json!({ + "type": "result", + "ok": result.ok.as_ref().map(|ty| type_ref_to_json(ty, resolve)), + "err": result.err.as_ref().map(|ty| type_ref_to_json(ty, resolve)) + }) + } + TypeDefKind::Flags(flags) => { + let flag_names = flags.flags.iter().map(|flag| &flag.name).collect::>(); + json!({ + "type": "flags", + "flags": flag_names + }) + } + TypeDefKind::Type(ty) => type_ref_to_json(ty, resolve), + _ => json!("unknown") + } +} + +// Convert a WIT type reference to a JSON representation +fn type_ref_to_json(type_ref: &wit_parser::Type, resolve: &wit_parser::Resolve) -> Value { + use wit_parser::Type; + + match type_ref { + Type::Bool => json!("bool"), + Type::U8 => json!("u8"), + Type::U16 => json!("u16"), + Type::U32 => json!("u32"), + Type::U64 => json!("u64"), + Type::S8 => json!("s8"), + Type::S16 => json!("s16"), + Type::S32 => json!("s32"), + Type::S64 => json!("s64"), + Type::F32 => json!("f32"), + Type::F64 => json!("f64"), + Type::Char => json!("char"), + Type::String => json!("string"), + Type::Id(id) => { + // Look up the referenced type + if let Some(type_def) = resolve.types.get(*id) { + // If it has a name, use the name; otherwise, inline the definition + if let Some(name) = &type_def.name { + json!(to_upper_camel_case(name)) + } else { + type_to_json_schema(type_def, resolve) + } + } else { + json!("unknown") + } + } + } +} diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs index 5136ceeb4..85816d330 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/mod.rs @@ -56,7 +56,7 @@ pub enum ToolExecutionCommand { package_id: String, }, HyperwareCallApi { - package_id: String, + process_id: String, method: String, args: String, timeout: u64, From 7c47ba9b92370f14626f16d57e1583374dc5025a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 23 Sep 2025 04:16:27 +0000 Subject: [PATCH 23/70] Format Rust code using rustfmt --- hyperdrive/packages/spider/spider/src/lib.rs | 6 +- .../spider/src/tool_providers/hyperware.rs | 103 ++++++++++++------ 2 files changed, 72 insertions(+), 37 deletions(-) diff --git a/hyperdrive/packages/spider/spider/src/lib.rs b/hyperdrive/packages/spider/spider/src/lib.rs index f69dd087a..fe984a202 100644 --- a/hyperdrive/packages/spider/spider/src/lib.rs +++ b/hyperdrive/packages/spider/spider/src/lib.rs @@ -268,11 +268,7 @@ impl SpiderState { let hyperware_provider = HyperwareToolProvider::new(); let fresh_tools = hyperware_provider.get_tools(self); // Update the existing server's tools - if let Some(server) = self - .mcp_servers - .iter_mut() - .find(|s| s.id == "hyperware") - { + if let Some(server) = self.mcp_servers.iter_mut().find(|s| s.id == "hyperware") { server.tools = fresh_tools; println!( "Spider: Hyperware tools refreshed with {} tools", diff --git a/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs b/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs index 4a498732f..80cbae0a5 100644 --- a/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs +++ b/hyperdrive/packages/spider/spider/src/tool_providers/hyperware.rs @@ -231,8 +231,17 @@ pub async fn get_api(package_id: &str) -> Result { if iface_name == "standard" || iface_name == "lib" { for (type_name, type_id) in &iface.types { // Only include certain standard types that are commonly used - if matches!(type_name.as_str(), "address" | "process-id" | "package-id" | - "node-id" | "capability" | "request" | "response" | "message") { + if matches!( + type_name.as_str(), + "address" + | "process-id" + | "package-id" + | "node-id" + | "capability" + | "request" + | "response" + | "message" + ) { let rust_type_name = to_upper_camel_case(type_name); if seen_types.insert(rust_type_name.clone()) { let type_def = &resolve.types[*type_id]; @@ -257,7 +266,9 @@ pub async fn get_api(package_id: &str) -> Result { let type_name_camel = to_upper_camel_case(type_name); // Skip types ending with SignatureHttp or SignatureRemote - if type_name_camel.ends_with("SignatureHttp") || type_name_camel.ends_with("SignatureRemote") { + if type_name_camel.ends_with("SignatureHttp") + || type_name_camel.ends_with("SignatureRemote") + { continue; } @@ -280,22 +291,27 @@ pub async fn get_api(package_id: &str) -> Result { if seen_types.insert(format!("{}::{}", process_name, func_name_formatted)) { let docs = extract_docs(&func.docs); - let params_schema = func.params.iter().map(|(param_name, param_type)| { - json!({ - "name": to_snake_case(param_name), - "type": type_ref_to_json(¶m_type, &resolve) + let params_schema = func + .params + .iter() + .map(|(param_name, param_type)| { + json!({ + "name": to_snake_case(param_name), + "type": type_ref_to_json(¶m_type, &resolve) + }) }) - }).collect::>(); + .collect::>(); let returns_schema = match &func.results { - wit_parser::Results::Named(named) => { - named.iter().map(|(name, type_ref)| { + wit_parser::Results::Named(named) => named + .iter() + .map(|(name, type_ref)| { json!({ "name": to_snake_case(name), "type": type_ref_to_json(&type_ref, &resolve) }) - }).collect::>() - } + }) + .collect::>(), wit_parser::Results::Anon(type_ref) => { vec![json!({"type": type_ref_to_json(&type_ref, &resolve)})] } @@ -392,7 +408,10 @@ async fn get_package_documentation(package_id: &str) -> Result { // Note: wit-parser Package doesn't have a docs field directly // Package docs would typically be in a README or main interface // For now, return formatted package info - return Ok(format!("Package: {} - Provides API interfaces and types", pkg_name.name)); + return Ok(format!( + "Package: {} - Provides API interfaces and types", + pkg_name.name + )); } } @@ -428,12 +447,16 @@ fn type_to_json_schema(type_def: &wit_parser::TypeDef, resolve: &wit_parser::Res match &type_def.kind { TypeDefKind::Record(record) => { - let fields = record.fields.iter().map(|field| { - ( - to_snake_case(&field.name), - type_ref_to_json(&field.ty, resolve) - ) - }).collect::>(); + let fields = record + .fields + .iter() + .map(|field| { + ( + to_snake_case(&field.name), + type_ref_to_json(&field.ty, resolve), + ) + }) + .collect::>(); json!({ "type": "object", @@ -441,16 +464,20 @@ fn type_to_json_schema(type_def: &wit_parser::TypeDef, resolve: &wit_parser::Res }) } TypeDefKind::Variant(variant) => { - let cases = variant.cases.iter().map(|case| { - let case_schema = match &case.ty { - Some(ty) => type_ref_to_json(ty, resolve), - None => json!("null") - }; - json!({ - "name": case.name, - "type": case_schema + let cases = variant + .cases + .iter() + .map(|case| { + let case_schema = match &case.ty { + Some(ty) => type_ref_to_json(ty, resolve), + None => json!("null"), + }; + json!({ + "name": case.name, + "type": case_schema + }) }) - }).collect::>(); + .collect::>(); json!({ "type": "variant", @@ -458,7 +485,11 @@ fn type_to_json_schema(type_def: &wit_parser::TypeDef, resolve: &wit_parser::Res }) } TypeDefKind::Enum(enum_def) => { - let cases = enum_def.cases.iter().map(|case| &case.name).collect::>(); + let cases = enum_def + .cases + .iter() + .map(|case| &case.name) + .collect::>(); json!({ "type": "enum", "values": cases @@ -471,7 +502,11 @@ fn type_to_json_schema(type_def: &wit_parser::TypeDef, resolve: &wit_parser::Res }) } TypeDefKind::Tuple(tuple) => { - let types = tuple.types.iter().map(|ty| type_ref_to_json(ty, resolve)).collect::>(); + let types = tuple + .types + .iter() + .map(|ty| type_ref_to_json(ty, resolve)) + .collect::>(); json!({ "type": "tuple", "items": types @@ -491,14 +526,18 @@ fn type_to_json_schema(type_def: &wit_parser::TypeDef, resolve: &wit_parser::Res }) } TypeDefKind::Flags(flags) => { - let flag_names = flags.flags.iter().map(|flag| &flag.name).collect::>(); + let flag_names = flags + .flags + .iter() + .map(|flag| &flag.name) + .collect::>(); json!({ "type": "flags", "flags": flag_names }) } TypeDefKind::Type(ty) => type_ref_to_json(ty, resolve), - _ => json!("unknown") + _ => json!("unknown"), } } From f495c0aba34b9a12a581f5fd8fffc00a4adbf137 Mon Sep 17 00:00:00 2001 From: Tobias Merkle Date: Wed, 24 Sep 2025 11:15:22 -0400 Subject: [PATCH 24/70] fix #852 --- .../src/components/Home/components/Widget.tsx | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/hyperdrive/packages/homepage/ui/src/components/Home/components/Widget.tsx b/hyperdrive/packages/homepage/ui/src/components/Home/components/Widget.tsx index 44a091c14..cbc531680 100644 --- a/hyperdrive/packages/homepage/ui/src/components/Home/components/Widget.tsx +++ b/hyperdrive/packages/homepage/ui/src/components/Home/components/Widget.tsx @@ -1,4 +1,4 @@ -import React, { useState, useRef } from 'react'; +import React, { useState, useRef, useMemo } from 'react'; import type { HomepageApp } from '../../../types/app.types'; import { usePersistenceStore } from '../../../stores/persistenceStore'; import { Draggable } from './Draggable'; @@ -48,7 +48,7 @@ export const Widget: React.FC = ({ app, index, totalWidgets, childr } }; - const size = settings.size || calculateSize(); + const size = useMemo(() => settings.size || calculateSize(), [settings.size]); // Calculate responsive position based on index const calculatePosition = () => { @@ -67,7 +67,7 @@ export const Widget: React.FC = ({ app, index, totalWidgets, childr } }; - const position = settings.position || calculatePosition(); + const position = useMemo(() => settings.position || calculatePosition(), [settings.position]); // Widgets can either have widget HTML content or be loaded from their app URL const isHtmlWidget = app.widget && app.widget !== 'true' && app.widget.includes('<'); @@ -132,10 +132,13 @@ export const Widget: React.FC = ({ app, index, totalWidgets, childr > - {isExpanded && ( -
-
+ {isExpanded &&
+
{notifications.length === 0 ? (

All clear, no notifications!

) : ( @@ -70,8 +73,7 @@ const NotificationBay: React.FC = () => { )) )}
-
- )} +
}
{modalContent && ( From a9f90d2e9bf2c6b555f1350555e89309577ad584 Mon Sep 17 00:00:00 2001 From: Tobias Merkle Date: Mon, 29 Sep 2025 12:11:53 -0400 Subject: [PATCH 29/70] styling --- .../packages/app-store/ui/src/components/NotificationBay.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx b/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx index a82db0d9d..82b3e5263 100644 --- a/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx +++ b/hyperdrive/packages/app-store/ui/src/components/NotificationBay.tsx @@ -41,7 +41,7 @@ const NotificationBay: React.FC = () => { return ( <> -
+
{isExpanded &&
Date: Mon, 29 Sep 2025 20:04:50 -0700 Subject: [PATCH 30/70] http-server: allow header to specify timeout --- hyperdrive/src/http/server.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/hyperdrive/src/http/server.rs b/hyperdrive/src/http/server.rs index ee8d008ea..3c47e5653 100644 --- a/hyperdrive/src/http/server.rs +++ b/hyperdrive/src/http/server.rs @@ -765,6 +765,12 @@ async fn http_handler( } } + let timeout = headers + .get("X-Hyperware-Http-Server-Timeout") + .and_then(|t| t.to_str().ok()) + .and_then(|t| t.parse().ok()) + .unwrap_or_else(|| HTTP_SELF_IMPOSED_TIMEOUT); + // RPC functionality: if path is /rpc:distro:sys/message, // we extract message from base64 encoded bytes in data // and send it to the correct app. @@ -796,7 +802,7 @@ async fn http_handler( rsvp: None, message: Message::Request(Request { inherit: false, - expects_response: Some(HTTP_SELF_IMPOSED_TIMEOUT), + expects_response: Some(timeout.clone()), body: serde_json::to_vec(&HttpServerRequest::Http(IncomingHttpRequest { source_socket_addr: socket_addr.map(|addr| addr.to_string()), method: method.to_string(), @@ -836,7 +842,7 @@ async fn http_handler( message.send(&send_to_loop).await; - let timeout_duration = tokio::time::Duration::from_secs(HTTP_SELF_IMPOSED_TIMEOUT); + let timeout_duration = tokio::time::Duration::from_secs(timeout); let result = tokio::time::timeout(timeout_duration, response_receiver).await; let (http_response, body) = match result { From e00c23bb285458be0ab27c03da124b5e5febda38 Mon Sep 17 00:00:00 2001 From: hosted-fornet Date: Mon, 29 Sep 2025 21:22:56 -0700 Subject: [PATCH 31/70] kernel: dont crash on bad wasm file --- hyperdrive/src/kernel/process.rs | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/hyperdrive/src/kernel/process.rs b/hyperdrive/src/kernel/process.rs index 9bc4b567c..b1a1c27aa 100644 --- a/hyperdrive/src/kernel/process.rs +++ b/hyperdrive/src/kernel/process.rs @@ -127,17 +127,27 @@ async fn make_component_v1( home_directory_path: PathBuf, process_state: ProcessState, ) -> anyhow::Result<(ProcessV1, Store, MemoryOutputPipe)> { - let component = - Component::new(&engine, wasm_bytes.to_vec()).expect("make_component: couldn't read file"); + let our_process_id = process_state.metadata.our.process.clone(); + let send_to_terminal = process_state.send_to_terminal.clone(); + + let component = match Component::new(&engine, wasm_bytes.to_vec()) { + Ok(c) => c, + Err(e) => { + t::Printout::new( + 0, + t::KERNEL_PROCESS_ID.clone(), + format!("kernel: process {our_process_id} invalid wasm file: {e:?}"), + ) + .send(&send_to_terminal) + .await; + return Err(e); + } + }; let mut linker = Linker::new(&engine); ProcessV1::add_to_linker(&mut linker, |state: &mut ProcessWasiV1| state).unwrap(); let (table, wasi, wasi_stderr) = make_table_and_wasi(home_directory_path, &process_state).await; wasmtime_wasi::p2::add_to_linker_async(&mut linker).unwrap(); - - let our_process_id = process_state.metadata.our.process.clone(); - let send_to_terminal = process_state.send_to_terminal.clone(); - let mut store = Store::new( &engine, ProcessWasiV1 { From c6a52c32ac74dc1d6157a7cb37f1399d34c076bc Mon Sep 17 00:00:00 2001 From: Hallmane Date: Tue, 30 Sep 2025 12:30:29 +0200 Subject: [PATCH 32/70] fixed gas caps for bundler's gas response --- .eth_providers | 1 + .../src/integrations/erc4337_operations.rs | 3 +- .../src/integrations/gas_optimization.rs | 46 +++++++++++++------ .../hyperwallet/src/integrations/mod.rs | 7 ++- 4 files changed, 38 insertions(+), 19 deletions(-) create mode 100644 .eth_providers diff --git a/.eth_providers b/.eth_providers new file mode 100644 index 000000000..267b8435d --- /dev/null +++ b/.eth_providers @@ -0,0 +1 @@ +[{"chain_id":1,"trusted":false,"provider":{"Node":{"hns_update":{"name":"eth-provider.hypr","public_key":"0xb2d90311d4666cc5a5ec6fb827dbbbcb3e884620470c18f5650ee166522a8089","ips":["15.204.59.64"],"ports":{"ws":9005},"routers":[]},"use_as_provider":true}}},{"chain_id":10,"trusted":false,"provider":{"Node":{"hns_update":{"name":"optimism-provider.hypr","public_key":"0xda9c10f1c53ab3bd3069bdfa5e86170b5997aa4fe1f33208c6b3d0b3c06c6c47","ips":["15.204.59.64"],"ports":{"ws":9006},"routers":[]},"use_as_provider":true}}},{"chain_id":8453,"trusted":false,"provider":{"Node":{"hns_update":{"name":"base-provider.hypr","public_key":"0xc30395d7d5d0273faed4bb63c9a277bd33040bf1d82f4199a6d825b0f94527a0","ips":["15.204.59.64"],"ports":{"ws":9007},"routers":[]},"use_as_provider":true}}},{"chain_id":11155111,"trusted":false,"provider":{"Node":{"hns_update":{"name":"sepolia-provider.hypr","public_key":"0x6ae839dfda9e20138afd5db17dabfba7e1b029f57f79fe3f9174c33087890872","ips":["15.204.59.64"],"ports":{"ws":9008},"routers":[]},"use_as_provider":true}}}] \ No newline at end of file diff --git a/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/erc4337_operations.rs b/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/erc4337_operations.rs index a09e6dbfa..3735d5ccc 100644 --- a/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/erc4337_operations.rs +++ b/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/erc4337_operations.rs @@ -312,8 +312,7 @@ fn estimate_optimized_gas(context: &OperationContext) -> Result ( U256::from(80_000), // ERC20 transfer + TBA overhead U256::from(150_000), // TBA validation is complex - U256::from(47_000), // Minimum for TBA calldata + U256::from(60_000), // Reasonable default for TBA calldata ), Self::Erc721Transfer => (U256::from(100_000), U256::from(60_000), U256::from(40_000)), Self::SimpleExecute => (U256::from(100_000), U256::from(70_000), U256::from(40_000)), @@ -44,7 +44,7 @@ impl OperationType { Self::Erc20Transfer => ( U256::from(100_000), // Hard cap for ERC20 U256::from(220_000), // TBAs need high verification gas - U256::from(55_000), // Enough for TBA calldata + U256::from(65_000), // Buffer for TBA calldata (bundler may require more) ), Self::Erc721Transfer => (U256::from(130_000), U256::from(80_000), U256::from(50_000)), _ => (U256::from(250_000), U256::from(150_000), U256::from(70_000)), @@ -109,17 +109,13 @@ pub fn apply_smart_gas_limits( .map(|est| est.min(max_call)) .unwrap_or(opt_call); - // For verification gas: TBAs genuinely need high verification gas - // Trust the estimate but cap at maximum to prevent extreme cases + // For verification gas: TBAs need high verification gas let verification_gas = estimated_verification .map(|est| est.min(max_verif)) .unwrap_or(opt_verif); - // For pre-verification gas: ensure we meet minimum requirements - // Use the maximum of our optimized value and the estimate (bundler's minimum) let pre_verification_gas = estimated_pre_verification - .map(|est| opt_pre.max(est).min(max_pre)) - .unwrap_or(opt_pre); + .unwrap_or_else(|| opt_pre.max(max_pre)); // Fallback should never be reached (we error earlier) (call_gas, verification_gas, pre_verification_gas) } @@ -161,14 +157,14 @@ mod tests { let (call_gas, verif_gas, pre_verif_gas) = apply_smart_gas_limits( Some(U256::from(300_000)), // Inflated estimate Some(U256::from(200_000)), // Reasonable for TBA - Some(U256::from(100_000)), // Inflated estimate + Some(U256::from(100_000)), // Bundler's estimate &op_type, ); - // Should be capped to reasonable values - assert!(call_gas <= U256::from(100_000)); // Capped - assert!(verif_gas <= U256::from(220_000)); // TBAs need more - assert!(pre_verif_gas <= U256::from(100_000)); // Uses max of optimized and estimate + // Should cap call/verification but trust pre-verification + assert_eq!(call_gas, U256::from(100_000)); // Capped at max + assert_eq!(verif_gas, U256::from(200_000)); // Within cap, so unchanged + assert_eq!(pre_verif_gas, U256::from(100_000)); // Trust bundler's estimate } #[test] @@ -179,9 +175,29 @@ mod tests { let (call_gas, verif_gas, pre_verif_gas) = apply_smart_gas_limits(None, None, None, &op_type); - // Should use optimized defaults + // Should use optimized defaults for call/verification, max for pre-verification + assert_eq!(call_gas, U256::from(80_000)); + assert_eq!(verif_gas, U256::from(150_000)); + assert_eq!(pre_verif_gas, U256::from(65_000)); // max(60k optimized, 65k max) = 65k + } + + #[test] + fn test_gas_optimization_respects_bundler_pre_verification_gas() { + let op_type = OperationType::Erc20Transfer; + + // Test with the exact scenario from production: bundler requires 58,531 (0xe4a3) + let (call_gas, verif_gas, pre_verif_gas) = apply_smart_gas_limits( + Some(U256::from(80_000)), // Reasonable call gas + Some(U256::from(150_000)), // Reasonable verification + Some(U256::from(58_531)), // Bundler's exact requirement: 0xe4a3 + &op_type, + ); + + // CRITICAL: Pre-verification gas must NOT be capped below bundler's requirement + assert_eq!(pre_verif_gas, U256::from(58_531)); // Must use bundler's exact value + + // Call and verification can still be capped assert_eq!(call_gas, U256::from(80_000)); assert_eq!(verif_gas, U256::from(150_000)); - assert_eq!(pre_verif_gas, U256::from(47_000)); } } diff --git a/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/mod.rs b/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/mod.rs index 737367526..3dac00028 100644 --- a/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/mod.rs +++ b/hyperdrive/packages/hyperwallet/hyperwallet/src/integrations/mod.rs @@ -6,7 +6,10 @@ pub mod erc4337_operations; pub mod gas_optimization; pub mod hypermap; -// Re-export for convenience -pub use erc4337_bundler::*; +// Re-export public APIs pub use erc4337_operations::*; pub use hypermap::*; + +// Internal modules (not re-exported) +// - erc4337_bundler: Internal bundler client implementation +// - gas_optimization: Internal gas limit optimization logic From ac90f3fd015372f9737199872c42ab5060f42db1 Mon Sep 17 00:00:00 2001 From: dev Date: Tue, 30 Sep 2025 10:11:21 -0700 Subject: [PATCH 33/70] added minting upgradable custom accounts + multicall initialization --- .../packages/app-store/ui/src/abis/index.ts | 4 + .../app-store/ui/src/pages/PublishPage.tsx | 2 +- .../app-store/ui/src/utils/predictTBA.ts | 12 +- hyperdrive/src/register-ui/src/App.tsx | 2 + .../src/register-ui/src/abis/helpers.ts | 10 +- hyperdrive/src/register-ui/src/abis/index.ts | 8 +- .../src/components/UpgradableCheckbox.tsx | 24 ++ hyperdrive/src/register-ui/src/lib/types.ts | 2 + .../register-ui/src/pages/CommitDotOsName.tsx | 3 - .../src/register-ui/src/pages/MintCustom.tsx | 263 ++++++++++-------- .../register-ui/src/pages/MintDotOsName.tsx | 11 +- .../src/register-ui/src/pages/ResetName.tsx | 4 +- .../src/register-ui/src/utils/predictTBA.ts | 12 +- 13 files changed, 215 insertions(+), 142 deletions(-) create mode 100644 hyperdrive/src/register-ui/src/components/UpgradableCheckbox.tsx diff --git a/hyperdrive/packages/app-store/ui/src/abis/index.ts b/hyperdrive/packages/app-store/ui/src/abis/index.ts index a4b3cdabf..c8708ae88 100644 --- a/hyperdrive/packages/app-store/ui/src/abis/index.ts +++ b/hyperdrive/packages/app-store/ui/src/abis/index.ts @@ -6,6 +6,10 @@ export const HYPERMAP: `0x${string}` = "0x000000000044C6B8Cb4d8f0F889a3E47664EAe export const MULTICALL: `0x${string}` = "0xcA11bde05977b3631167028862bE2a173976CA11"; export const HYPER_ACCOUNT_IMPL: `0x${string}` = "0x0000000000EDAd72076CBe7b9Cfa3751D5a85C97"; export const HYPER_ACCOUNT_UPGRADABLE_IMPL: `0x${string}` = "0x0000000000691b70A051CFAF82F9622E150369f3"; +export const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758'; + +// HyperAccountProxy creation code +export const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b50604051610506380380610506833981016040819052602b916044565b6001600160a01b03166080525f805460ff19169055606f565b5f602082840312156053575f5ffd5b81516001600160a01b03811681146068575f5ffd5b9392505050565b6080516104806100865f395f606201526104805ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610363565b610057565b610055610050610111565b610148565b565b336001600160a01b037f000000000000000000000000000000000000000000000000000000000000000016146100be575f5460ff161561009e57610099610045565b6100be565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b5f5460ff16156100e05760405162dc149f60e41b815260040160405180910390fd5b5f6100e9610111565b6001600160a01b03160361010d575f805460ff1916600117905561010d8282610166565b5050565b5f6101437f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610162573d5ff35b3d5ffd5b61016f826101c0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101b8576101b38282610236565b505050565b61010d6102a8565b806001600160a01b03163b5f036101f557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100b5565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102529190610434565b5f60405180830381855af49150503d805f811461028a576040519150601f19603f3d011682016040523d82523d5f602084013e61028f565b606091505b509150915061029f8583836102c7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102dc576102d782610326565b61031f565b81511580156102f357506001600160a01b0384163b155b1561031c57604051639996b31560e01b81526001600160a01b03851660048201526024016100b5565b50805b9392505050565b8051156103365780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610374575f5ffd5b82356001600160a01b038116811461038a575f5ffd5b9150602083013567ffffffffffffffff8111156103a5575f5ffd5b8301601f810185136103b5575f5ffd5b803567ffffffffffffffff8111156103cf576103cf61034f565b604051601f8201601f19908116603f0116810167ffffffffffffffff811182821017156103fe576103fe61034f565b604052818152828201602001871015610415575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea2646970667358221220117e3d359e3a2bb948f5bdb70d5aeec72a9db4978309bed659036d34fc5d4f6c64736f6c634300081b0033'; export const multicallAbi = parseAbi([ diff --git a/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx b/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx index 891403cbb..fbd46671e 100644 --- a/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx +++ b/hyperdrive/packages/app-store/ui/src/pages/PublishPage.tsx @@ -232,7 +232,7 @@ export default function PublishPage() { // When creating a new package, predict the TBA address that will be created const predictedTBA = !isUpdate ? predictTBAAddress(currentTBA || HYPERMAP, packageName, publicClient?.chain?.id || 8453) : undefined; - const multicall = encodeMulticalls(metadataUrl, metadata, predictedTBA); + const multicall = encodeMulticalls(metadataUrl, metadata, predictedTBA?.predictedAddress); const args = isUpdate ? multicall : encodeIntoMintCall(multicall, address, packageName); writeContract({ diff --git a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts index c849db538..00b030dc0 100644 --- a/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts +++ b/hyperdrive/packages/app-store/ui/src/utils/predictTBA.ts @@ -1,20 +1,20 @@ import { encodePacked, keccak256, getAddress, encodeAbiParameters, type Address, type Hex } from 'viem'; import { hyperhash } from './hyperhash'; - -const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; +import { PROXY_CREATION_CODE, ERC6551_REGISTRY } from "../abis"; export function predictTBAAddress( hypermapAddr: Address, label: string, chainId: number = 8453 // Base chain ID -): Address { +): { predictedAddress: Address, predictedTokenId: BigInt } { // Calculate the namehash for the label const namehash = hyperhash(label); - // First compute the proxy address const proxyAddr = computeProxyAddress(hypermapAddr, hypermapAddr, namehash); console.log("proxyAddr", proxyAddr); - return computeAccount(proxyAddr, namehash, BigInt(chainId), hypermapAddr, BigInt(namehash)); + const predictedTokenId = BigInt(namehash) + const predictedAddress = computeAccount(proxyAddr, namehash, BigInt(chainId), hypermapAddr, predictedTokenId); + return { predictedAddress, predictedTokenId }; } function computeAccount( @@ -68,8 +68,6 @@ function computeProxyAddress( hypermapAddr: Address, salt: string ): Address { - // HyperAccountProxy creation code with constructor argument - const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea26469706673582212205c8437c90a52b26afb62a6e21b8baa0d106dcc547054521f0074dea229fd630f64736f6c634300081c0033'; const proxyCreationCodeHash = keccak256( encodePacked( diff --git a/hyperdrive/src/register-ui/src/App.tsx b/hyperdrive/src/register-ui/src/App.tsx index a6d54658e..fbce93750 100644 --- a/hyperdrive/src/register-ui/src/App.tsx +++ b/hyperdrive/src/register-ui/src/App.tsx @@ -23,6 +23,7 @@ function App() { const [keyFileName, setKeyFileName] = useState(''); const [reset, setReset] = useState(false); const [direct, setDirect] = useState(false); + const [upgradable, setUpgradable] = useState(false); const [hnsName, setHnsName] = useState(''); const [networkingKey, setNetworkingKey] = useState(''); const [ipAddress, setIpAddress] = useState(0); @@ -81,6 +82,7 @@ function App() { // just pass all the props each time since components won't mind extras // todo, most of these can be removed... const props = { + upgradable, setUpgradable, direct, setDirect, key, keyFileName, setKeyFileName, diff --git a/hyperdrive/src/register-ui/src/abis/helpers.ts b/hyperdrive/src/register-ui/src/abis/helpers.ts index a471f081b..2a00f0bd2 100644 --- a/hyperdrive/src/register-ui/src/abis/helpers.ts +++ b/hyperdrive/src/register-ui/src/abis/helpers.ts @@ -2,7 +2,7 @@ import { NetworkingInfo } from "../lib/types"; import { hyperhash } from "../utils/hyperhash"; import { ipToBytes, portToBytes } from "../utils/hns_encoding"; -import { multicallAbi, hypermapAbi, mechAbi, HYPERMAP, MULTICALL } from "./"; +import { multicallAbi, hypermapAbi, mechAbi, HYPERMAP, MULTICALL } from "."; import { encodeFunctionData, encodePacked, stringToHex, bytesToHex } from "viem"; // Function to encode router names into keccak256 hashes @@ -13,6 +13,7 @@ const encodeRouters = (routers: string[]): `0x${string}` => { }; export const generateNetworkingKeys = async ({ + upgradable, direct, setNetworkingKey, setWsPort, @@ -21,6 +22,7 @@ export const generateNetworkingKeys = async ({ reset, tbaAddress, }: { + upgradable: boolean, direct: boolean, label: string, our_address: `0x${string}`, @@ -109,8 +111,8 @@ export const generateNetworkingKeys = async ({ }); // Add initialize call if TBA address is provided - const initializeCall = tbaAddress ? encodeFunctionData({ - abi: [{"inputs":[],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"}], + const initializeCall = upgradable && tbaAddress ? encodeFunctionData({ + abi: [{ "inputs": [], "name": "initialize", "outputs": [], "stateMutability": "nonpayable", "type": "function" }], functionName: 'initialize', args: [] }) : null; @@ -125,7 +127,7 @@ export const generateNetworkingKeys = async ({ { target: HYPERMAP, callData: router_call }, ]; - const calls = initializeCall && tbaAddress ? + const calls = upgradable && initializeCall && tbaAddress ? [{ target: tbaAddress, callData: initializeCall }, ...baseCalls] : baseCalls; diff --git a/hyperdrive/src/register-ui/src/abis/index.ts b/hyperdrive/src/register-ui/src/abis/index.ts index 72f8e3ea2..6f8f73f2b 100644 --- a/hyperdrive/src/register-ui/src/abis/index.ts +++ b/hyperdrive/src/register-ui/src/abis/index.ts @@ -1,12 +1,15 @@ import { parseAbi } from "viem"; -export { generateNetworkingKeys } from "./helpers"; - // move to constants? // also for anvil/base export const HYPERMAP: `0x${string}` = "0x000000000044C6B8Cb4d8f0F889a3E47664EAeda"; export const MULTICALL: `0x${string}` = "0xcA11bde05977b3631167028862bE2a173976CA11"; export const HYPER_ACCOUNT_IMPL: `0x${string}` = "0x0000000000EDAd72076CBe7b9Cfa3751D5a85C97"; +export const HYPER_ACCOUNT_UPGRADABLE_IMPL: `0x${string}` = "0x0000000000691b70A051CFAF82F9622E150369f3"; export const DOTOS: `0x${string}` = "0x763Ae1AB24c4322b8933E58d76d8D9286f6C0162"; +export const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758'; + +// HyperAccountProxy creation code +export const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b50604051610506380380610506833981016040819052602b916044565b6001600160a01b03166080525f805460ff19169055606f565b5f602082840312156053575f5ffd5b81516001600160a01b03811681146068575f5ffd5b9392505050565b6080516104806100865f395f606201526104805ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610363565b610057565b610055610050610111565b610148565b565b336001600160a01b037f000000000000000000000000000000000000000000000000000000000000000016146100be575f5460ff161561009e57610099610045565b6100be565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b5f5460ff16156100e05760405162dc149f60e41b815260040160405180910390fd5b5f6100e9610111565b6001600160a01b03160361010d575f805460ff1916600117905561010d8282610166565b5050565b5f6101437f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610162573d5ff35b3d5ffd5b61016f826101c0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101b8576101b38282610236565b505050565b61010d6102a8565b806001600160a01b03163b5f036101f557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100b5565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102529190610434565b5f60405180830381855af49150503d805f811461028a576040519150601f19603f3d011682016040523d82523d5f602084013e61028f565b606091505b509150915061029f8583836102c7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102dc576102d782610326565b61031f565b81511580156102f357506001600160a01b0384163b155b1561031c57604051639996b31560e01b81526001600160a01b03851660048201526024016100b5565b50805b9392505050565b8051156103365780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610374575f5ffd5b82356001600160a01b038116811461038a575f5ffd5b9150602083013567ffffffffffffffff8111156103a5575f5ffd5b8301601f810185136103b5575f5ffd5b803567ffffffffffffffff8111156103cf576103cf61034f565b604051601f8201601f19908116603f0116810167ffffffffffffffff811182821017156103fe576103fe61034f565b604052818152828201602001871015610415575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea2646970667358221220117e3d359e3a2bb948f5bdb70d5aeec72a9db4978309bed659036d34fc5d4f6c64736f6c634300081b0033'; export const multicallAbi = parseAbi([ `function aggregate(Call[] calls) external payable returns (uint256 blockNumber, bytes[] returnData)`, @@ -14,6 +17,7 @@ export const multicallAbi = parseAbi([ ]); export const hypermapAbi = parseAbi([ + "function leaf(bytes32 parenthash, bytes memory label) public pure returns (bytes32)", "function mint(address, bytes calldata, bytes calldata, bytes calldata, address) external returns (address tba)", "function note(bytes calldata,bytes calldata) external returns (bytes32)", "function get(bytes32 node) external view returns (address tokenBoundAccount, address tokenOwner, bytes memory note)", diff --git a/hyperdrive/src/register-ui/src/components/UpgradableCheckbox.tsx b/hyperdrive/src/register-ui/src/components/UpgradableCheckbox.tsx new file mode 100644 index 000000000..486ebdaa1 --- /dev/null +++ b/hyperdrive/src/register-ui/src/components/UpgradableCheckbox.tsx @@ -0,0 +1,24 @@ +import { FaSquareCheck, FaRegSquare } from "react-icons/fa6"; + +interface UpgradableCheckboxProps { + upgradable: boolean; + setUpgradable: (upgradable: boolean) => void; +} + +export default function UpgradableCheckbox({ upgradable, setUpgradable }: UpgradableCheckboxProps) { + return ( +
+ +
+ Upgradable + Allows opeator to upgrade implementation +
+
+ ); +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/lib/types.ts b/hyperdrive/src/register-ui/src/lib/types.ts index a47f5f304..68c8a14c1 100644 --- a/hyperdrive/src/register-ui/src/lib/types.ts +++ b/hyperdrive/src/register-ui/src/lib/types.ts @@ -12,6 +12,8 @@ export interface PageProps { setRouters: React.Dispatch>, direct: boolean, setDirect: React.Dispatch>, + upgradable: boolean, + setUpgradable: React.Dispatch>, hnsName: string, setHnsName: React.Dispatch>, key: string, diff --git a/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx b/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx index df2d06150..5653a0de2 100644 --- a/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx +++ b/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx @@ -4,10 +4,7 @@ import { toAscii } from "idna-uts46-hx"; import EnterHnsName from "../components/EnterHnsName"; import Loader from "../components/Loader"; import { PageProps } from "../lib/types"; - import DirectNodeCheckbox from "../components/DirectCheckbox"; -import { Tooltip } from "../components/Tooltip"; - import { useAccount, useWaitForTransactionReceipt, useWriteContract } from "wagmi"; import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit" import { dotOsAbi, DOTOS } from "../abis"; diff --git a/hyperdrive/src/register-ui/src/pages/MintCustom.tsx b/hyperdrive/src/register-ui/src/pages/MintCustom.tsx index 54baacff9..0e77600db 100644 --- a/hyperdrive/src/register-ui/src/pages/MintCustom.tsx +++ b/hyperdrive/src/register-ui/src/pages/MintCustom.tsx @@ -2,18 +2,23 @@ import { useState, useEffect, FormEvent, useCallback } from "react"; import { useNavigate } from "react-router-dom"; import Loader from "../components/Loader"; import { PageProps } from "../lib/types"; - import DirectNodeCheckbox from "../components/DirectCheckbox"; - -import { useAccount, useWaitForTransactionReceipt, useSendTransaction } from "wagmi"; -import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit" -import { tbaMintAbi, generateNetworkingKeys, HYPER_ACCOUNT_IMPL, HYPERMAP } from "../abis"; +import UpgradableCheckbox from "../components/UpgradableCheckbox"; +import { useAccount, useWaitForTransactionReceipt, useSendTransaction, useConfig } from "wagmi"; +import { readContract } from "wagmi/actions"; +import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit"; +import { tbaMintAbi, HYPER_ACCOUNT_IMPL, HYPER_ACCOUNT_UPGRADABLE_IMPL, HYPERMAP, mechAbi, hypermapAbi } from "../abis"; +import { generateNetworkingKeys } from "../abis/helpers"; import { encodePacked, encodeFunctionData, stringToHex } from "viem"; import BackButton from "../components/BackButton"; import { predictTBAAddress } from "../utils/predictTBA"; +import { hyperhash } from "../utils/hyperhash"; + interface MintCustomNameProps extends PageProps { } function MintCustom({ + upgradable, + setUpgradable, direct, setDirect, hnsName, @@ -24,30 +29,32 @@ function MintCustom({ setTcpPort, setRouters, }: MintCustomNameProps) { - let { address } = useAccount(); - let navigate = useNavigate(); - let { openConnectModal } = useConnectModal(); + const { address } = useAccount(); + const navigate = useNavigate(); + const { openConnectModal } = useConnectModal(); + const config = useConfig(); + const [validationError, setValidationError] = useState(""); const { data: hash, sendTransaction, isPending, isError, error } = useSendTransaction({ mutation: { onSuccess: (data) => { addRecentTransaction({ hash: data, description: `Mint ${hnsName}` }); - } - } + }, + }, + }); + + const { isLoading: isConfirming, isSuccess: isConfirmed } = useWaitForTransactionReceipt({ + hash, }); - const { isLoading: isConfirming, isSuccess: isConfirmed } = - useWaitForTransactionReceipt({ - hash, - }); const addRecentTransaction = useAddRecentTransaction(); - const [triggerNameCheck, setTriggerNameCheck] = useState(false) + const [triggerNameCheck, setTriggerNameCheck] = useState(false); useEffect(() => { - document.title = "Mint" - }, []) + document.title = "Mint"; + }, []); - useEffect(() => setTriggerNameCheck(!triggerNameCheck), [address]) + useEffect(() => setTriggerNameCheck(!triggerNameCheck), [address]); useEffect(() => { if (!address) { @@ -55,116 +62,152 @@ function MintCustom({ } }, [address, openConnectModal]); - let handleMint = useCallback(async (e: FormEvent) => { - e.preventDefault() - e.stopPropagation() + useEffect(() => { + if (isConfirmed) { + navigate("/set-password"); + } + }, [isConfirmed, address, navigate]); - const formData = new FormData(e.target as HTMLFormElement) + const handleMint = useCallback( + async (e: FormEvent) => { + e.preventDefault(); + e.stopPropagation(); - if (!address) { - openConnectModal?.() - return - } + const formData = new FormData(e.target as HTMLFormElement); - const name = formData.get('name') as string - const tbaAddr = formData.get('tba') as `0x${string}` || HYPERMAP; - const fullLabel = `${name}.${tbaAddr === HYPERMAP ? '' : tbaAddr}`; + if (!address) { + openConnectModal?.(); + return; + } + + const tbaAddr = (formData.get("tba") as `0x${string}`) || HYPERMAP; + const fullHnsName = formData.get("full-hns-name") as string; - // Predict the TBA address that will be created - const predictedTBA = predictTBAAddress(tbaAddr, name); + if (!fullHnsName || !fullHnsName.includes(".")) { + setValidationError("Full HNS name must contain a dot, e.g., foo.bar"); + return; + } - const initCall = await generateNetworkingKeys({ + // Derive name from the first part before the dot + const name = fullHnsName.split(".")[0]; + const rootName = fullHnsName.replace(`${name}.`, ""); + try { + const tokenData = (await readContract(config, { + address: tbaAddr, + abi: mechAbi, + functionName: "token", + })) as readonly [bigint, `0x${string}`, bigint]; + const tokenId = tokenData[2]; + const rootNameHash = hyperhash(rootName); + if (tokenId !== BigInt(rootNameHash)) { + setValidationError(`The name '${rootName}' is not associated with the provided TBA address`); + return; + } + // Predict the TBA address that will be created + const predictedTBA = predictTBAAddress(HYPERMAP, fullHnsName); + console.log("predictedTBA", predictedTBA); + + const initCall = await generateNetworkingKeys({ + upgradable, + direct, + our_address: address, + label: hnsName, + setNetworkingKey, + setIpAddress, + setWsPort, + setTcpPort, + setRouters, + reset: false, + tbaAddress: predictedTBA.predictedAddress, + }); + + setHnsName(fullHnsName); + + const impl = upgradable ? HYPER_ACCOUNT_UPGRADABLE_IMPL : HYPER_ACCOUNT_IMPL; + const data = encodeFunctionData({ + abi: tbaMintAbi, + functionName: "mint", + args: [ + address, + encodePacked(["bytes"], [stringToHex(name)]), + initCall, + impl, + ], + }); + + // Send the transaction + sendTransaction({ + to: tbaAddr, + data: data, + gas: 1000000n, + }); + } catch (err) { + console.error("Failed to read contract or send transaction:", err); + setValidationError("Internal error, check console for details"); + } + }, + [ + config, + upgradable, direct, - our_address: address, - label: hnsName, + address, + sendTransaction, setNetworkingKey, setIpAddress, setWsPort, setTcpPort, setRouters, - reset: false, - tbaAddress: predictedTBA, - }); - - setHnsName(formData.get('full-hns-name') as string) - - console.log("full hns name", formData.get('full-hns-name')) - console.log("name", name) - console.log("predicted TBA", predictedTBA) - - const data = encodeFunctionData({ - abi: tbaMintAbi, - functionName: 'mint', - args: [ - address, - encodePacked(["bytes"], [stringToHex(name)]), - initCall, - HYPER_ACCOUNT_IMPL, - ], - }) - - // use data to write to contract -- do NOT use writeContract - // writeContract will NOT generate the correct selector for some reason - // probably THEIR bug.. no abi works - try { - sendTransaction({ - to: formData.get('tba') as `0x${string}`, - data: data, - gas: 1000000n, - }) - } catch (error) { - console.error('Failed to send transaction:', error) - } - }, [direct, address, sendTransaction, setNetworkingKey, setIpAddress, setWsPort, setTcpPort, setRouters, openConnectModal]) - - useEffect(() => { - if (isConfirmed) { - navigate("/set-password"); - } - }, [isConfirmed, address, navigate]); + openConnectModal, + hnsName, + ] + ); return (
- { -
- {isPending || isConfirming ? ( - - ) : ( - <> -

- - Register a name on a different top-level zone -- this will likely fail if that zone's requirements are not met - -

- - - -
- Advanced Options - -
-
- - -
- - )} - {isError && ( -

- Error: {error?.message || 'There was an error minting your name, please try again.'} + + {isPending || isConfirming ? ( + + ) : ( + <> +

+ + Register a name on a different top-level zone — this may fail if that zone's requirements are not met +

- )} - - } + + +
+ Advanced Options + + +
+
+ + +
+ + )} + {validationError && ( +

+ {validationError} +

+ )} + {isError && ( +

+ Error: {error?.message || "There was an error minting your name, please try again."} +

+ )} +
); } -export default MintCustom; +export default MintCustom; \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx b/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx index 77cf5cb55..7636e76f7 100644 --- a/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx +++ b/hyperdrive/src/register-ui/src/pages/MintDotOsName.tsx @@ -5,10 +5,10 @@ import { PageProps } from "../lib/types"; import { useAccount, useWaitForTransactionReceipt, useWriteContract } from "wagmi"; import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit" -import { generateNetworkingKeys, HYPER_ACCOUNT_IMPL, DOTOS, tbaMintAbi } from "../abis"; +import { HYPER_ACCOUNT_IMPL, DOTOS, tbaMintAbi } from "../abis"; +import { generateNetworkingKeys } from "../abis/helpers"; import { createPublicClient, encodePacked, http, stringToHex, BaseError, ContractFunctionRevertedError } from "viem"; import { base } from 'viem/chains' -import { predictTBAAddress } from "../utils/predictTBA"; interface RegisterOsNameProps extends PageProps { } @@ -64,10 +64,8 @@ function MintDotOsName({ // strip .os suffix const name = hnsName.replace(/\.os$/, ''); - // Predict the TBA address that will be created - const predictedTBA = predictTBAAddress(DOTOS, name, base.id); - const initCall = await generateNetworkingKeys({ + upgradable: false, direct, our_address: address, label: hnsName, @@ -76,8 +74,7 @@ function MintDotOsName({ setWsPort, setTcpPort, setRouters, - reset: false, - tbaAddress: predictedTBA, + reset: false }); const publicClient = createPublicClient({ diff --git a/hyperdrive/src/register-ui/src/pages/ResetName.tsx b/hyperdrive/src/register-ui/src/pages/ResetName.tsx index b92c18f92..5a2b7be38 100644 --- a/hyperdrive/src/register-ui/src/pages/ResetName.tsx +++ b/hyperdrive/src/register-ui/src/pages/ResetName.tsx @@ -7,7 +7,8 @@ import { import { useNavigate } from "react-router-dom"; import Loader from "../components/Loader"; import { PageProps } from "../lib/types"; -import { MULTICALL, generateNetworkingKeys, mechAbi } from "../abis"; +import { MULTICALL, mechAbi } from "../abis"; +import { generateNetworkingKeys } from "../abis/helpers"; import DirectNodeCheckbox from "../components/DirectCheckbox"; import EnterHnsName from "../components/EnterHnsName"; @@ -78,6 +79,7 @@ function ResetHnsName({ try { const data = await generateNetworkingKeys({ + upgradable: false, direct, label: name, our_address: address, diff --git a/hyperdrive/src/register-ui/src/utils/predictTBA.ts b/hyperdrive/src/register-ui/src/utils/predictTBA.ts index c849db538..00b030dc0 100644 --- a/hyperdrive/src/register-ui/src/utils/predictTBA.ts +++ b/hyperdrive/src/register-ui/src/utils/predictTBA.ts @@ -1,20 +1,20 @@ import { encodePacked, keccak256, getAddress, encodeAbiParameters, type Address, type Hex } from 'viem'; import { hyperhash } from './hyperhash'; - -const ERC6551_REGISTRY = '0x000000006551c19487814612e58FE06813775758' as const; +import { PROXY_CREATION_CODE, ERC6551_REGISTRY } from "../abis"; export function predictTBAAddress( hypermapAddr: Address, label: string, chainId: number = 8453 // Base chain ID -): Address { +): { predictedAddress: Address, predictedTokenId: BigInt } { // Calculate the namehash for the label const namehash = hyperhash(label); - // First compute the proxy address const proxyAddr = computeProxyAddress(hypermapAddr, hypermapAddr, namehash); console.log("proxyAddr", proxyAddr); - return computeAccount(proxyAddr, namehash, BigInt(chainId), hypermapAddr, BigInt(namehash)); + const predictedTokenId = BigInt(namehash) + const predictedAddress = computeAccount(proxyAddr, namehash, BigInt(chainId), hypermapAddr, predictedTokenId); + return { predictedAddress, predictedTokenId }; } function computeAccount( @@ -68,8 +68,6 @@ function computeProxyAddress( hypermapAddr: Address, salt: string ): Address { - // HyperAccountProxy creation code with constructor argument - const PROXY_CREATION_CODE = '0x60a0604052348015600e575f5ffd5b5060405161051d38038061051d833981016040819052602b91603b565b6001600160a01b03166080526066565b5f60208284031215604a575f5ffd5b81516001600160a01b0381168114605f575f5ffd5b9392505050565b6080516104a061007d5f395f607a01526104a05ff3fe608060405260043610610021575f3560e01c8063d1f578941461003257610028565b3661002857005b610030610045565b005b610030610040366004610383565b610057565b610055610050610132565b610169565b565b7f7d0893b5fe6077fb4cf083ec3487b8eece7e03b4ab6e888f7a8a1758010f8c007f00000000000000000000000000000000000000000000000000000000000000006001600160a01b031633146100df57805460ff16156100bf576100ba610045565b6100df565b60405163572190d160e01b81523360048201526024015b60405180910390fd5b805460ff16156101015760405162dc149f60e41b815260040160405180910390fd5b5f61010a610132565b6001600160a01b03160361012d57805460ff1916600117815561012d8383610187565b505050565b5f6101647f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc546001600160a01b031690565b905090565b365f5f375f5f365f845af43d5f5f3e808015610183573d5ff35b3d5ffd5b610190826101e0565b6040516001600160a01b038316907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b905f90a28051156101d45761012d8282610256565b6101dc6102c8565b5050565b806001600160a01b03163b5f0361021557604051634c9c8ce360e01b81526001600160a01b03821660048201526024016100d6565b7f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc80546001600160a01b0319166001600160a01b0392909216919091179055565b60605f5f846001600160a01b0316846040516102729190610454565b5f60405180830381855af49150503d805f81146102aa576040519150601f19603f3d011682016040523d82523d5f602084013e6102af565b606091505b50915091506102bf8583836102e7565b95945050505050565b34156100555760405163b398979f60e01b815260040160405180910390fd5b6060826102fc576102f782610346565b61033f565b815115801561031357506001600160a01b0384163b155b1561033c57604051639996b31560e01b81526001600160a01b03851660048201526024016100d6565b50805b9392505050565b8051156103565780518082602001fd5b60405163d6bda27560e01b815260040160405180910390fd5b634e487b7160e01b5f52604160045260245ffd5b5f5f60408385031215610394575f5ffd5b82356001600160a01b03811681146103aa575f5ffd5b9150602083013567ffffffffffffffff8111156103c5575f5ffd5b8301601f810185136103d5575f5ffd5b803567ffffffffffffffff8111156103ef576103ef61036f565b604051601f8201601f19908116603f0116810167ffffffffffffffff8111828210171561041e5761041e61036f565b604052818152828201602001871015610435575f5ffd5b816020840160208301375f602083830101528093505050509250929050565b5f82518060208501845e5f92019182525091905056fea26469706673582212205c8437c90a52b26afb62a6e21b8baa0d106dcc547054521f0074dea229fd630f64736f6c634300081c0033'; const proxyCreationCodeHash = keccak256( encodePacked( From 9162e4159d61bdd065b4db0930f272675b21065b Mon Sep 17 00:00:00 2001 From: Tobias Merkle Date: Wed, 1 Oct 2025 10:51:08 -0400 Subject: [PATCH 34/70] fix #855 --- .../components/Home/components/OmniButton.tsx | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/hyperdrive/packages/homepage/ui/src/components/Home/components/OmniButton.tsx b/hyperdrive/packages/homepage/ui/src/components/Home/components/OmniButton.tsx index 59b12badd..4a46fd17d 100644 --- a/hyperdrive/packages/homepage/ui/src/components/Home/components/OmniButton.tsx +++ b/hyperdrive/packages/homepage/ui/src/components/Home/components/OmniButton.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useState, useRef } from 'react'; +import React, { useEffect, useState, useRef, useCallback } from 'react'; import { useNavigationStore } from '../../../stores/navigationStore'; import classNames from 'classnames'; import { usePersistenceStore } from '../../../stores/persistenceStore'; @@ -72,7 +72,7 @@ export const OmniButton: React.FC = () => { }); }; - const handleMouseMove = (e: MouseEvent) => { + const handleMouseMove = useCallback((e: MouseEvent) => { if (isMobile()) return; console.log('omnibutton handleMouseMove', e); e.stopPropagation(); @@ -90,9 +90,9 @@ export const OmniButton: React.FC = () => { const newY = Math.max(30, Math.min(window.innerHeight - 30, dragStart.buttonY + deltaY)); setOmnibuttonPosition({ x: newX, y: newY }); } - }; + }, [dragStart, isDragging, setOmnibuttonPosition]); - const handleMouseUp = () => { + const handleMouseUp = useCallback(() => { if (isMobile()) return; console.log('omnibutton handleMouseUp'); if (!isDragging && dragStart) { @@ -101,19 +101,19 @@ export const OmniButton: React.FC = () => { } setDragStart(null); setIsDragging(false); - }; + }, [isDragging, dragStart, isRecentAppsOpen, toggleRecentApps, closeAllOverlays]); // Mouse event listeners useEffect(() => { if (dragStart) { - document.addEventListener('mousemove', handleMouseMove), { passive: false }; - document.addEventListener('mouseup', handleMouseUp), { passive: false }; + document.addEventListener('mousemove', handleMouseMove); + document.addEventListener('mouseup', handleMouseUp); return () => { - document.removeEventListener('mousemove', handleMouseMove), { passive: false }; - document.removeEventListener('mouseup', handleMouseUp), { passive: false }; + document.removeEventListener('mousemove', handleMouseMove); + document.removeEventListener('mouseup', handleMouseUp); }; } - }, [dragStart, isDragging]); + }, [dragStart, isDragging, handleMouseMove, handleMouseUp]); // Handle window resize to keep button in bounds useEffect(() => { From c1a1a8d5c6097a3164eb2fdd41b1e83ccfb508fb Mon Sep 17 00:00:00 2001 From: dev Date: Fri, 3 Oct 2025 07:05:38 -0700 Subject: [PATCH 35/70] fixed unexpected submit bug on clicking back button --- hyperdrive/src/register-ui/src/components/BackButton.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hyperdrive/src/register-ui/src/components/BackButton.tsx b/hyperdrive/src/register-ui/src/components/BackButton.tsx index 92232e1be..6d898a006 100644 --- a/hyperdrive/src/register-ui/src/components/BackButton.tsx +++ b/hyperdrive/src/register-ui/src/components/BackButton.tsx @@ -3,7 +3,7 @@ import classNames from "classnames"; export default function BackButton({ mode = "wide", className }: { mode?: "narrow" | "wide", className?: string }) { return ( - -
- Register as a direct node. - If you are unsure, leave unchecked. -
- -
- ); +export default function DirectNodeCheckbox({ direct, setDirect, initiallyChecked }: DNCBProps) { + const getHelpText = () => { + if (initiallyChecked === undefined) { + return "If you are unsure, leave unchecked."; + } + return initiallyChecked + ? "If you are unsure, leave checked." + : "If you are unsure, leave unchecked."; + }; + + return ( +
+ +
+ Register as a direct node. + {getHelpText()} +
+ +
+ ); } \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/components/RouterTooltip.tsx b/hyperdrive/src/register-ui/src/components/RouterTooltip.tsx new file mode 100644 index 000000000..983b8ba32 --- /dev/null +++ b/hyperdrive/src/register-ui/src/components/RouterTooltip.tsx @@ -0,0 +1,14 @@ +import React from "react"; +import { Tooltip } from "./Tooltip"; + +export const RouterTooltip: React.FC = () => { + return ( + + ); +}; \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/components/RpcProviderEditor.tsx b/hyperdrive/src/register-ui/src/components/RpcProviderEditor.tsx new file mode 100644 index 000000000..0005f1c5e --- /dev/null +++ b/hyperdrive/src/register-ui/src/components/RpcProviderEditor.tsx @@ -0,0 +1,217 @@ +import { useState } from 'react'; +import classNames from 'classnames'; + +export interface RpcProviderData { + url: string; + auth: { + type: 'Basic' | 'Bearer' | 'Raw' | null; + value: string; + } | null; +} + +interface RpcProviderEditorProps { + providers: RpcProviderData[]; + onChange: (providers: RpcProviderData[]) => void; + label?: string; +} + +export function RpcProviderEditor({ providers, onChange, label }: RpcProviderEditorProps) { + const [showAuthValues, setShowAuthValues] = useState>({}); + + const addProvider = () => { + onChange([...providers, { url: '', auth: null }]); + }; + + const removeProvider = (index: number) => { + onChange(providers.filter((_, i) => i !== index)); + // Clean up the showAuthValues state for this index + const newShowAuthValues = { ...showAuthValues }; + delete newShowAuthValues[index]; + setShowAuthValues(newShowAuthValues); + }; + + const updateProvider = (index: number, updates: Partial) => { + const newProviders = [...providers]; + newProviders[index] = { ...newProviders[index], ...updates }; + onChange(newProviders); + }; + + const updateAuth = (index: number, authType: 'Basic' | 'Bearer' | 'Raw' | null, authValue: string = '') => { + const newProviders = [...providers]; + if (authType === null) { + newProviders[index].auth = null; + } else { + newProviders[index].auth = { type: authType, value: authValue }; + } + onChange(newProviders); + }; + + const toggleAuthVisibility = (index: number) => { + setShowAuthValues(prev => ({ + ...prev, + [index]: !prev[index] + })); + }; + + // Validate individual provider + const validateProvider = (provider: RpcProviderData): string | null => { + if (!provider.url.trim()) { + return 'WebSocket URL is required'; + } + if (!provider.url.startsWith('wss://')) { + return 'URL must be a secure WebSocket URL starting with wss://'; + } + if (provider.auth && !provider.auth.value.trim()) { + return 'Auth value is required when auth type is specified'; + } + return null; + }; + + // Get validation errors for all providers + const validationErrors = providers.map(validateProvider); + const hasErrors = validationErrors.some(error => error !== null); + + return ( +
+ {label && ( + + )} + + {providers.map((provider, index) => { + const error = validationErrors[index]; + return ( +
+
+
+ {/* URL Field */} +
+ + updateProvider(index, { url: e.target.value })} + placeholder="wss://base-mainnet.infura.io/ws/v3/YOUR-API-KEY" + className={classNames("input text-sm", { + 'border-red-500 focus:border-red-500': error !== null + })} + /> +
+ + {/* Auth Type Selector */} +
+ + +
+ + {/* Auth Value Field (conditional) */} + {provider.auth && ( +
+ + +
+ updateAuth(index, provider.auth!.type, e.target.value)} + placeholder={ + provider.auth.type === 'Bearer' ? 'your-bearer-token' : + provider.auth.type === 'Basic' ? 'user:pass (base64 encoded)' : + 'custom-header-value' + } + className={classNames("input text-sm", { + 'border-red-500 focus:border-red-500': provider.auth && !provider.auth.value.trim() + })} + style={{ paddingRight: '88px' }} + autoComplete="off" + /> + +
+
+ )} +
+ + {/* Remove Button */} + +
+ + {/* Error message for this specific provider */} + {error && ( + + {error} + + )} +
+ ); + })} + + {/* Add Provider Button */} + + + {/* Overall validation summary */} + {providers.length > 0 && !hasErrors && ( + + {providers.length} provider{providers.length !== 1 ? 's' : ''} to be added + + )} +
+ ); +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/components/SpecifyBaseL2AccessProvidersCheckbox.tsx b/hyperdrive/src/register-ui/src/components/SpecifyBaseL2AccessProvidersCheckbox.tsx new file mode 100644 index 000000000..f44456048 --- /dev/null +++ b/hyperdrive/src/register-ui/src/components/SpecifyBaseL2AccessProvidersCheckbox.tsx @@ -0,0 +1,33 @@ +import { BaseL2AccessProviderTooltip } from "./BaseL2AccessProviderTooltip"; +import { FaSquareCheck, FaRegSquare } from "react-icons/fa6"; + +interface SpecifyBaseL2AccessProvidersProps { + specifyBaseL2AccessProviders: boolean; + setSpecifyBaseL2AccessProviders: (specifyBaseL2AccessProviders: boolean) => void; + initiallyChecked?: boolean; +} + +export default function SpecifyBaseL2AccessProvidersCheckbox({ + specifyBaseL2AccessProviders, + setSpecifyBaseL2AccessProviders, + initiallyChecked = false + }: SpecifyBaseL2AccessProvidersProps) { + return ( +
+ +
+ Add Base L2 access providers. + + If you are unsure, leave {initiallyChecked ? 'checked' : 'unchecked'}. + +
+ +
+ ); +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/components/SpecifyCacheSourcesCheckbox.tsx b/hyperdrive/src/register-ui/src/components/SpecifyCacheSourcesCheckbox.tsx new file mode 100644 index 000000000..fad52290f --- /dev/null +++ b/hyperdrive/src/register-ui/src/components/SpecifyCacheSourcesCheckbox.tsx @@ -0,0 +1,41 @@ + +import { CacheSourceTooltip } from "./CacheSourceTooltip"; +import { FaSquareCheck, FaRegSquare } from "react-icons/fa6"; + +interface SpecifyCacheSourcesCheckboxProps { + specifyCacheSources: boolean; + setSpecifyCacheSources: (specifyCacheSources: boolean) => void; + initiallyChecked?: boolean; +} + +export default function SpecifyCacheSourcesCheckbox({ + specifyCacheSources, + setSpecifyCacheSources, + initiallyChecked + }: SpecifyCacheSourcesCheckboxProps) { + const getHelpText = () => { + if (initiallyChecked === undefined) { + return "If you are unsure, leave unchecked."; + } + return initiallyChecked + ? "If you are unsure, leave checked." + : "If you are unsure, leave unchecked."; + }; + + return ( +
+ +
+ Specify cache sources. + {getHelpText()} +
+ +
+ ); +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/components/SpecifyRoutersCheckbox.tsx b/hyperdrive/src/register-ui/src/components/SpecifyRoutersCheckbox.tsx new file mode 100644 index 000000000..5c9c011c9 --- /dev/null +++ b/hyperdrive/src/register-ui/src/components/SpecifyRoutersCheckbox.tsx @@ -0,0 +1,43 @@ +import { RouterTooltip } from "./RouterTooltip"; +import { FaSquareCheck, FaRegSquare } from "react-icons/fa6"; + +interface SRCBProps { + specifyRouters: boolean; + setSpecifyRouters: (specifyRouters: boolean) => void; + initiallyChecked?: boolean; +} + +export default function SpecifyRoutersCheckbox({ + specifyRouters, + setSpecifyRouters, + initiallyChecked + }: SRCBProps) { + const getHelpText = () => { + if (initiallyChecked === undefined) { + return "If you are unsure, leave unchecked."; + } + return initiallyChecked + ? "If you are unsure, leave checked." + : "If you are unsure, leave unchecked."; + }; + + return ( +
+ +
+ Register as indirect node with non-default routers. + {getHelpText()} +
+ +
+ ); +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/lib/types.ts b/hyperdrive/src/register-ui/src/lib/types.ts index a47f5f304..f3afbbacc 100644 --- a/hyperdrive/src/register-ui/src/lib/types.ts +++ b/hyperdrive/src/register-ui/src/lib/types.ts @@ -43,3 +43,19 @@ export type UnencryptedIdentity = { name: string, allowed_routers: string[] } + +export type InfoResponse = { + name?: string; + allowed_routers?: string[]; + initial_cache_sources: string[]; + initial_base_l2_providers: string[]; +} + +export interface RpcProviderConfig { + url: string; + auth: { + Basic?: string; + Bearer?: string; + Raw?: string; + } | null; +} \ No newline at end of file diff --git a/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx b/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx index df2d06150..23f7c8bb3 100644 --- a/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx +++ b/hyperdrive/src/register-ui/src/pages/CommitDotOsName.tsx @@ -6,7 +6,7 @@ import Loader from "../components/Loader"; import { PageProps } from "../lib/types"; import DirectNodeCheckbox from "../components/DirectCheckbox"; -import { Tooltip } from "../components/Tooltip"; +import SpecifyRoutersCheckbox from "../components/SpecifyRoutersCheckbox"; import { useAccount, useWaitForTransactionReceipt, useWriteContract } from "wagmi"; import { useConnectModal, useAddRecentTransaction } from "@rainbow-me/rainbowkit" @@ -16,16 +16,19 @@ import { base } from 'viem/chains' import BackButton from "../components/BackButton"; interface RegisterOsNameProps extends PageProps { } +// Regex for valid router names (domain format) +const ROUTER_NAME_REGEX = /^[a-z0-9](?:[a-z0-9-]*[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]*[a-z0-9])?)*$/; + function CommitDotOsName({ - direct, - setDirect, - setHnsName, - setNetworkingKey, - setIpAddress, - setWsPort, - setTcpPort, - setRouters, -}: RegisterOsNameProps) { + direct, + setDirect, + setHnsName, + setNetworkingKey, + setIpAddress, + setWsPort, + setTcpPort, + setRouters, + }: RegisterOsNameProps) { let { address } = useAccount(); let navigate = useNavigate(); let { openConnectModal } = useConnectModal(); @@ -47,6 +50,70 @@ function CommitDotOsName({ const [nameValidities, setNameValidities] = useState([]) const [triggerNameCheck, setTriggerNameCheck] = useState(false) const [isConfirmed, setIsConfirmed] = useState(false) + const [specifyRouters, setSpecifyRouters] = useState(false) + const [customRouters, setCustomRouters] = useState('') + const [routerValidationErrors, setRouterValidationErrors] = useState([]) + + // Modified setDirect function - no longer clears custom routers + const handleSetDirect = (value: boolean) => { + setDirect(value); + if (value) { + setSpecifyRouters(false); + } + }; + + // Modified setSpecifyRouters function - no longer clears custom routers + const handleSetSpecifyRouters = (value: boolean) => { + setSpecifyRouters(value); + if (value) { + setDirect(false); + } + }; + + // Validate custom routers against the regex + const validateRouters = (routersText: string): string[] => { + if (!routersText.trim()) return []; + + const routers = routersText + .split('\n') + .map(router => router.trim()) + .filter(router => router.length > 0); + + const errors: string[] = []; + routers.forEach((router, index) => { + if (!ROUTER_NAME_REGEX.test(router)) { + errors.push(`Line ${index + 1}: "${router}" is not a valid router name`); + } + }); + + return errors; + }; + + // Handle custom routers change with validation + const handleCustomRoutersChange = (value: string) => { + setCustomRouters(value); + if (specifyRouters && value.trim()) { + const errors = validateRouters(value); + setRouterValidationErrors(errors); + } else { + setRouterValidationErrors([]); + } + }; + + // Add a validation function for custom routers + const getValidCustomRouters = () => { + if (!specifyRouters) return []; + return customRouters + .split('\n') + .map(router => router.trim()) + .filter(router => router.length > 0 && ROUTER_NAME_REGEX.test(router)); + }; + + const isCustomRoutersValid = () => { + if (!specifyRouters) return true; // Not required if checkbox is unchecked + const validRouters = getValidCustomRouters(); + return validRouters.length > 0 && routerValidationErrors.length === 0; + }; useEffect(() => { document.title = "Register" @@ -71,6 +138,16 @@ function CommitDotOsName({ } setName(toAscii(name)); console.log("committing to .os name: ", name) + + // Process custom routers only if the checkbox is checked + if (specifyRouters && customRouters.trim()) { + const routersToUse = getValidCustomRouters(); + setRouters(routersToUse); + } else { + // Clear routers in app state if not specifying custom routers + setRouters([]); + } + const commit = keccak256( encodeAbiParameters( parseAbiParameters('bytes memory, address'), @@ -107,7 +184,7 @@ function CommitDotOsName({ throw err; } - }, [name, direct, address, writeContract, setNetworkingKey, setIpAddress, setWsPort, setTcpPort, setRouters, openConnectModal]) + }, [name, specifyRouters, customRouters, direct, address, writeContract, setNetworkingKey, setIpAddress, setWsPort, setTcpPort, setRouters, openConnectModal]) useEffect(() => { if (txConfirmed) { @@ -116,10 +193,15 @@ function CommitDotOsName({ setTimeout(() => { setIsConfirmed(true); setHnsName(`${name}.os`); + + if (specifyRouters && customRouters.trim()) { + const routersToUse = getValidCustomRouters(); + setRouters(routersToUse); + } navigate("/mint-os-name"); }, 16000) } - }, [txConfirmed, address, name, setHnsName, navigate]); + }, [txConfirmed, address, name, setHnsName, navigate, specifyRouters, customRouters, setRouters]); return (
@@ -142,27 +224,67 @@ function CommitDotOsName({

- Advanced Options - + Advanced Network Options +
+ + + {specifyRouters && ( +
+ +