diff --git a/.github/workflows/auto-llms-txt.yml b/.github/workflows/auto-llms-txt.yml new file mode 100644 index 00000000..6073cdf0 --- /dev/null +++ b/.github/workflows/auto-llms-txt.yml @@ -0,0 +1,117 @@ +name: Auto Update LLMs.txt Files + +on: + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + paths: + - 'docs/base-account/**' + - 'docs/base-app/**' + - 'docs/base-chain/**' + - 'docs/cookbook/**' + - 'docs/get-started/**' + - 'docs/learn/**' + - 'docs/mini-apps/**' + - 'docs/onchainkit/**' + +permissions: + contents: write + pull-requests: write + +jobs: + update-llms-txt: + if: ${{ !startsWith(github.head_ref, 'auto-llms/') }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Install Cursor CLI + run: | + curl https://cursor.com/install -fsS | bash + echo "$HOME/.cursor/bin" >> $GITHUB_PATH + + - name: Configure git + run: | + git config user.name "Cursor Agent" + git config user.email "cursoragent@cursor.com" + + - name: Update LLMs.txt files + timeout-minutes: 20 + continue-on-error: true + id: agent_run + env: + MODEL: gpt-5 + CURSOR_API_KEY: ${{ secrets.CURSOR_API_KEY }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BRANCH_PREFIX: auto-llms + run: | + cursor-agent -p "You are operating in a GitHub Actions runner to automatically update llms.txt and llms-full.txt files. + + The GitHub CLI is available as \`gh\` and authenticated via \`GH_TOKEN\`. Git is available. You have write access to repository contents and can comment on pull requests, but you must not create or edit PRs directly. + + # Context: + - Repo: ${{ github.repository }} + - Owner: ${{ github.repository_owner }} + - PR Number: ${{ github.event.pull_request.number }} + - Base Ref: ${{ github.base_ref }} + - Head Ref: ${{ github.head_ref }} + - Branch Prefix: ${{ env.BRANCH_PREFIX }} + + # Documentation Structure: + This is a docs repository with the following main directories that each contain llms.txt and llms-full.txt files: + - docs/base-account/ + - docs/base-app/ + - docs/base-chain/ + - docs/cookbook/ + - docs/get-started/ + - docs/learn/ + - docs/mini-apps/ + - docs/onchainkit/ + + # Goal: + Analyze changes in the original PR and determine if they are significant enough to warrant updating the llms.txt and llms-full.txt files for the affected directories. If so, create a separate PR with the updates. + + # Requirements: + 1) Use \`gh pr diff\` to analyze what changed in the original PR + 2) Determine which documentation directories are affected by the changes + 3) Assess if the changes are significant enough to update the llms.txt files (new content, major restructuring, new features, etc.) + 4) If changes are significant: + - Create or update a persistent branch named \`${{ env.BRANCH_PREFIX }}/pr-${{ github.event.pull_request.number }}\` + - Update the relevant llms.txt and llms-full.txt files to reflect the new documentation structure and content + - Push the changes to the branch + - Comment on the original PR with a summary and a compare link to create the PR + 5) If changes are not significant: + - Comment on the original PR explaining that no llms.txt updates are needed + + # File Format Guidelines: + - llms.txt files should be concise, focused summaries with key links + - llms-full.txt files should be comprehensive with all documentation links + - Follow the existing format and style of the current files + - Include proper section headers and descriptions + - Use relative links in the format: https://docs.base.org/[path] + + # Significance Criteria: + Consider changes significant if they involve: + - New documentation files or sections + - Major content restructuring + - New features or API endpoints + - Changes to core concepts or workflows + - New guides or tutorials + + # Deliverables: + - If updates needed: Pushed commits to the persistent branch and a PR comment with compare link + - If no updates needed: A brief comment explaining why no updates are required + - Keep all changes minimal and consistent with existing documentation style + " --force --model "$MODEL" --output-format=text + + - name: Handle timeout or completion + if: always() + run: | + if [ "${{ steps.agent_run.outcome }}" == "failure" ]; then + echo "⏰ Agent timed out after 20 minutes - this is expected for long-running analysis" + else + echo "✅ Agent completed successfully" + fi \ No newline at end of file diff --git a/docs/base-account/guides/advanced-authentication.mdx b/docs/base-account/guides/advanced-authentication.mdx new file mode 100644 index 00000000..54f7d1a1 --- /dev/null +++ b/docs/base-account/guides/advanced-authentication.mdx @@ -0,0 +1,475 @@ +--- +title: "Advanced Authentication Patterns" +description: "Implement sophisticated authentication flows with Base Account including multi-factor authentication, session management, and enterprise integrations." +--- + +# Advanced Authentication Patterns + +Base Account provides powerful authentication capabilities beyond basic wallet connection. This guide covers advanced patterns for building secure, enterprise-ready authentication systems. + +## Multi-Factor Authentication (MFA) + +### Passkey + Biometric Authentication + +Combine passkeys with device biometrics for enhanced security: + +```typescript +import { createBaseAccount } from '@base-account/core' + +const account = await createBaseAccount({ + authentication: { + primary: 'passkey', + secondary: 'biometric', + fallback: 'recovery-phrase' + } +}) + +// Authenticate with both factors +const session = await account.authenticate({ + requireBoth: true, + biometricPrompt: 'Confirm your identity' +}) +``` + +### Time-Based One-Time Passwords (TOTP) + +Integrate TOTP for additional security: + +```typescript +import { generateTOTPSecret, verifyTOTP } from '@base-account/totp' + +class TOTPAuthenticator { + async setupTOTP(account: BaseAccount) { + const secret = generateTOTPSecret() + + // Store encrypted secret + await account.secureStorage.set('totp_secret', secret, { + encrypted: true, + requireAuth: true + }) + + return { + secret, + qrCode: this.generateQRCode(secret, account.address) + } + } + + async verifyTOTP(account: BaseAccount, token: string) { + const secret = await account.secureStorage.get('totp_secret') + return verifyTOTP(secret, token) + } +} +``` + +## Session Management + +### Secure Session Tokens + +Implement secure, time-limited sessions: + +```typescript +interface SessionConfig { + duration: number // milliseconds + refreshable: boolean + deviceBinding: boolean + ipRestriction?: string[] +} + +class SessionManager { + async createSession( + account: BaseAccount, + config: SessionConfig + ): Promise { + const sessionId = crypto.randomUUID() + const expiresAt = Date.now() + config.duration + + const session = { + id: sessionId, + accountAddress: account.address, + expiresAt, + deviceFingerprint: config.deviceBinding ? + await this.getDeviceFingerprint() : null, + allowedIPs: config.ipRestriction + } + + // Store session securely + await this.storeSession(session) + + return this.generateJWT(session) + } + + async refreshSession(token: SessionToken): Promise { + const session = await this.validateAndGetSession(token) + + if (!session.refreshable) { + throw new Error('Session not refreshable') + } + + return this.createSession(session.account, { + duration: session.originalDuration, + refreshable: true, + deviceBinding: session.deviceBinding + }) + } +} +``` + +### Cross-Device Session Sync + +Synchronize sessions across user devices: + +```typescript +class CrossDeviceSessionManager { + async syncSessions(account: BaseAccount) { + // Get all active sessions for account + const sessions = await this.getActiveSessions(account.address) + + // Sync to secure cloud storage + await account.cloudSync.set('active_sessions', sessions, { + encrypted: true, + requireConsent: true + }) + } + + async authorizeNewDevice( + account: BaseAccount, + deviceInfo: DeviceInfo + ): Promise { + // Require approval from existing device + const approval = await this.requestDeviceApproval( + account, + deviceInfo + ) + + if (approval.approved) { + await this.addAuthorizedDevice(account, deviceInfo) + return true + } + + return false + } +} +``` + +## Enterprise Integration + +### SAML SSO Integration + +Integrate with enterprise SAML providers: + +```typescript +import { SAMLProvider } from '@base-account/enterprise' + +class EnterpriseAuth { + async configureSAML(config: SAMLConfig) { + const provider = new SAMLProvider({ + entityId: config.entityId, + ssoUrl: config.ssoUrl, + certificate: config.certificate, + attributeMapping: { + email: 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress', + name: 'http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name', + department: 'http://schemas.example.com/department' + } + }) + + return provider + } + + async authenticateWithSAML( + samlResponse: string, + account: BaseAccount + ) { + const provider = this.getSAMLProvider() + const assertion = await provider.validateResponse(samlResponse) + + // Link SAML identity to Base Account + await account.linkIdentity('saml', { + nameId: assertion.nameId, + attributes: assertion.attributes, + sessionIndex: assertion.sessionIndex + }) + + return assertion + } +} +``` + +### OAuth 2.0 / OpenID Connect + +Support OAuth providers: + +```typescript +class OAuthIntegration { + async configureOAuth(provider: OAuthProvider) { + return { + clientId: provider.clientId, + redirectUri: provider.redirectUri, + scopes: ['openid', 'profile', 'email'], + pkceEnabled: true + } + } + + async handleOAuthCallback( + code: string, + state: string, + account: BaseAccount + ) { + // Exchange code for tokens + const tokens = await this.exchangeCodeForTokens(code, state) + + // Get user info + const userInfo = await this.getUserInfo(tokens.accessToken) + + // Link to Base Account + await account.linkIdentity('oauth', { + provider: this.providerName, + subject: userInfo.sub, + profile: userInfo + }) + + return { tokens, userInfo } + } +} +``` + +## Advanced Security Features + +### Risk-Based Authentication + +Implement adaptive authentication based on risk factors: + +```typescript +class RiskAssessment { + async assessRisk( + account: BaseAccount, + context: AuthContext + ): Promise { + const factors = { + deviceTrust: await this.assessDeviceTrust(context.device), + locationRisk: await this.assessLocationRisk(context.location), + behaviorPattern: await this.assessBehaviorPattern( + account, + context.behavior + ), + networkRisk: await this.assessNetworkRisk(context.network) + } + + const score = this.calculateRiskScore(factors) + + return { + score, + factors, + recommendation: this.getRecommendation(score) + } + } + + getRecommendation(score: number): AuthRecommendation { + if (score < 0.3) return 'ALLOW' + if (score < 0.7) return 'CHALLENGE' + return 'DENY' + } +} +``` + +### Zero-Knowledge Proofs + +Implement privacy-preserving authentication: + +```typescript +import { generateProof, verifyProof } from '@base-account/zk' + +class ZKAuthentication { + async generateIdentityProof( + account: BaseAccount, + claim: IdentityClaim + ): Promise { + const circuit = await this.loadCircuit('identity-verification') + + const proof = await generateProof(circuit, { + privateInputs: { + identity: account.identity, + secret: account.zkSecret + }, + publicInputs: { + claim: claim, + timestamp: Date.now() + } + }) + + return proof + } + + async verifyIdentityProof(proof: ZKProof): Promise { + const circuit = await this.loadCircuit('identity-verification') + return verifyProof(circuit, proof) + } +} +``` + +## Audit and Compliance + +### Authentication Logging + +Comprehensive audit logging: + +```typescript +class AuthenticationAuditor { + async logAuthEvent(event: AuthEvent) { + const auditLog = { + timestamp: new Date().toISOString(), + eventType: event.type, + accountAddress: event.account, + deviceId: event.deviceId, + ipAddress: event.ipAddress, + userAgent: event.userAgent, + success: event.success, + failureReason: event.failureReason, + riskScore: event.riskScore, + metadata: event.metadata + } + + // Store in immutable audit log + await this.auditStorage.append(auditLog) + + // Alert on suspicious activity + if (this.isSuspicious(event)) { + await this.alertSecurityTeam(auditLog) + } + } + + async generateComplianceReport( + startDate: Date, + endDate: Date + ): Promise { + const logs = await this.auditStorage.query({ + timestamp: { $gte: startDate, $lte: endDate } + }) + + return { + totalAuthentications: logs.length, + successfulAuthentications: logs.filter(l => l.success).length, + failedAuthentications: logs.filter(l => !l.success).length, + uniqueUsers: new Set(logs.map(l => l.accountAddress)).size, + riskDistribution: this.calculateRiskDistribution(logs), + complianceStatus: this.assessCompliance(logs) + } + } +} +``` + +### GDPR Compliance + +Implement privacy controls: + +```typescript +class PrivacyManager { + async handleDataRequest( + account: BaseAccount, + requestType: 'export' | 'delete' | 'rectify' + ) { + switch (requestType) { + case 'export': + return this.exportUserData(account) + case 'delete': + return this.deleteUserData(account) + case 'rectify': + return this.rectifyUserData(account) + } + } + + async exportUserData(account: BaseAccount) { + const data = { + profile: await account.getProfile(), + authHistory: await this.getAuthHistory(account), + linkedIdentities: await account.getLinkedIdentities(), + preferences: await account.getPreferences() + } + + // Anonymize sensitive data + return this.anonymizeExport(data) + } + + async deleteUserData(account: BaseAccount) { + // Soft delete with retention policy + await account.markForDeletion({ + retentionPeriod: 90, // days + preserveAuditTrail: true + }) + + // Remove from active systems + await this.removeFromActiveSystems(account) + } +} +``` + +## Testing Advanced Authentication + +### Unit Tests + +```typescript +describe('Advanced Authentication', () => { + describe('MFA', () => { + it('should require both passkey and TOTP', async () => { + const account = await createTestAccount() + const mfa = new MultiFactorAuth() + + await mfa.setupTOTP(account) + + const result = await mfa.authenticate(account, { + passkey: validPasskey, + totp: validTOTP + }) + + expect(result.success).toBe(true) + expect(result.factors).toHaveLength(2) + }) + }) + + describe('Session Management', () => { + it('should refresh sessions before expiry', async () => { + const sessionManager = new SessionManager() + const session = await sessionManager.createSession(account, { + duration: 3600000, // 1 hour + refreshable: true + }) + + // Fast-forward time + jest.advanceTimersByTime(3000000) // 50 minutes + + const refreshed = await sessionManager.refreshSession(session) + expect(refreshed).toBeDefined() + expect(refreshed.expiresAt).toBeGreaterThan(session.expiresAt) + }) + }) +}) +``` + +## Best Practices + +### Security Guidelines + +1. **Defense in Depth**: Layer multiple authentication factors +2. **Zero Trust**: Verify every request regardless of source +3. **Least Privilege**: Grant minimal necessary permissions +4. **Regular Rotation**: Rotate secrets and tokens regularly + +### Performance Optimization + +1. **Cache Wisely**: Cache non-sensitive authentication data +2. **Async Operations**: Use async patterns for auth flows +3. **Connection Pooling**: Pool database connections +4. **Rate Limiting**: Implement intelligent rate limiting + +### User Experience + +1. **Progressive Enhancement**: Start simple, add complexity gradually +2. **Clear Communication**: Explain security requirements +3. **Fallback Options**: Always provide recovery mechanisms +4. **Responsive Design**: Work across all device types + +## Conclusion + +Advanced authentication patterns enable you to build secure, enterprise-ready applications while maintaining excellent user experience. Start with basic patterns and gradually implement more sophisticated features as your application grows. + +Remember to always prioritize security while keeping the user experience smooth and intuitive. diff --git a/docs/cookbook/build-cross-chain-apps.mdx b/docs/cookbook/build-cross-chain-apps.mdx new file mode 100644 index 00000000..d9fcd473 --- /dev/null +++ b/docs/cookbook/build-cross-chain-apps.mdx @@ -0,0 +1,357 @@ +--- +title: "Build Cross-Chain Applications" +description: "Learn how to build applications that work seamlessly across multiple blockchain networks using Base as your primary chain." +--- + +# Build Cross-Chain Applications + +Building applications that work across multiple blockchain networks is essential for reaching users wherever they are. This guide shows you how to create cross-chain applications using Base as your primary chain while integrating with other networks. + +## Overview + +Cross-chain applications allow users to interact with multiple blockchain networks from a single interface. By building on Base, you get the benefits of low fees and fast transactions while maintaining compatibility with the broader Ethereum ecosystem. + +## Key Benefits + +- **Expanded user base**: Reach users across different blockchain networks +- **Enhanced liquidity**: Access assets from multiple chains +- **Improved user experience**: Seamless interactions without manual bridging +- **Future-proof architecture**: Ready for a multi-chain world + +## Architecture Patterns + +### 1. Hub and Spoke Model + +Use Base as your primary hub while connecting to other chains: + +```typescript +// Base as the primary chain +const baseConfig = { + chainId: 8453, + name: 'Base', + contracts: { + multicall: '0x...', + registry: '0x...' + } +} + +// Connected chains +const supportedChains = [ + { chainId: 1, name: 'Ethereum' }, + { chainId: 137, name: 'Polygon' }, + { chainId: 42161, name: 'Arbitrum' } +] +``` + +### 2. Multi-Chain State Management + +Synchronize state across chains: + +```typescript +interface CrossChainState { + baseState: BaseChainState + remoteStates: Map + pendingBridges: BridgeTransaction[] +} + +class CrossChainManager { + async syncState(chainId: number) { + const remoteState = await this.fetchRemoteState(chainId) + this.updateState(chainId, remoteState) + } +} +``` + +## Implementation Steps + +### Step 1: Set Up Multi-Chain Configuration + +Configure your application to support multiple networks: + +```typescript +import { createConfig, http } from 'wagmi' +import { base, mainnet, polygon, arbitrum } from 'wagmi/chains' + +const config = createConfig({ + chains: [base, mainnet, polygon, arbitrum], + transports: { + [base.id]: http(), + [mainnet.id]: http(), + [polygon.id]: http(), + [arbitrum.id]: http(), + }, +}) +``` + +### Step 2: Implement Bridge Integration + +Integrate with bridge protocols for asset transfers: + +```typescript +import { LayerZero, Stargate } from '@layerzerolabs/sdk' + +class BridgeService { + async bridgeAssets( + fromChain: number, + toChain: number, + asset: string, + amount: bigint + ) { + // Use LayerZero for messaging + const lzEndpoint = this.getLZEndpoint(fromChain) + + // Bridge assets using Stargate + const bridgeTx = await this.stargate.swap( + fromChain, + toChain, + asset, + amount + ) + + return bridgeTx + } +} +``` + +### Step 3: Create Unified User Interface + +Build a single interface that works across chains: + +```typescript +function CrossChainDashboard() { + const [selectedChain, setSelectedChain] = useState(base.id) + const { data: balances } = useMultiChainBalances() + + return ( +
+ + +
+ ) +} +``` + +## Advanced Features + +### Cross-Chain Messaging + +Implement messaging between chains: + +```solidity +// CrossChainMessenger.sol +contract CrossChainMessenger { + mapping(uint16 => address) public trustedRemotes; + + function sendMessage( + uint16 _dstChainId, + bytes memory _message + ) external payable { + _lzSend( + _dstChainId, + _message, + payable(msg.sender), + address(0x0), + bytes(""), + msg.value + ); + } +} +``` + +### Unified Asset Management + +Create a unified view of assets across chains: + +```typescript +class AssetManager { + async getUnifiedPortfolio(userAddress: string) { + const chains = [base.id, mainnet.id, polygon.id] + + const portfolios = await Promise.all( + chains.map(chainId => + this.getChainPortfolio(chainId, userAddress) + ) + ) + + return this.consolidatePortfolios(portfolios) + } +} +``` + +## Best Practices + +### 1. Gas Optimization + +- Use Base for frequent operations due to low gas costs +- Batch transactions when possible +- Implement gas estimation across chains + +### 2. Security Considerations + +- Validate cross-chain messages +- Implement timeouts for bridge transactions +- Use trusted bridge protocols + +### 3. User Experience + +- Show clear bridging status +- Provide gas estimates +- Handle failed transactions gracefully + +## Testing Strategy + +### Unit Tests + +Test individual components: + +```typescript +describe('CrossChainManager', () => { + it('should sync state across chains', async () => { + const manager = new CrossChainManager() + await manager.syncState(polygon.id) + + expect(manager.getState(polygon.id)).toBeDefined() + }) +}) +``` + +### Integration Tests + +Test cross-chain flows: + +```typescript +describe('Bridge Integration', () => { + it('should bridge assets from Base to Ethereum', async () => { + const bridgeService = new BridgeService() + + const tx = await bridgeService.bridgeAssets( + base.id, + mainnet.id, + 'USDC', + parseUnits('100', 6) + ) + + expect(tx).toBeDefined() + }) +}) +``` + +## Monitoring and Analytics + +### Track Cross-Chain Metrics + +```typescript +class CrossChainAnalytics { + trackBridgeTransaction( + fromChain: number, + toChain: number, + asset: string, + amount: bigint + ) { + this.analytics.track('bridge_transaction', { + from_chain: fromChain, + to_chain: toChain, + asset, + amount: amount.toString() + }) + } +} +``` + +### Health Monitoring + +Monitor bridge and RPC health: + +```typescript +class HealthMonitor { + async checkChainHealth(chainId: number) { + try { + const latestBlock = await this.getLatestBlock(chainId) + const blockAge = Date.now() - latestBlock.timestamp * 1000 + + return { + healthy: blockAge < 60000, // 1 minute + blockAge, + chainId + } + } catch (error) { + return { healthy: false, error, chainId } + } + } +} +``` + +## Deployment Considerations + +### Multi-Chain Deployment + +Deploy contracts across supported chains: + +```typescript +// deploy.ts +async function deployMultiChain() { + const chains = [base, polygon, arbitrum] + + for (const chain of chains) { + await deployToChain(chain.id) + } +} +``` + +### Configuration Management + +Manage chain-specific configurations: + +```typescript +const chainConfigs = { + [base.id]: { + rpcUrl: process.env.BASE_RPC_URL, + contracts: { /* Base contracts */ } + }, + [polygon.id]: { + rpcUrl: process.env.POLYGON_RPC_URL, + contracts: { /* Polygon contracts */ } + } +} +``` + +## Common Challenges and Solutions + +### 1. Bridge Delays + +**Challenge**: Bridge transactions can take time +**Solution**: Implement optimistic UX with proper status tracking + +### 2. Gas Price Variations + +**Challenge**: Different gas costs across chains +**Solution**: Dynamic gas estimation and user warnings + +### 3. Chain Reliability + +**Challenge**: RPC endpoints can be unreliable +**Solution**: Implement fallback RPC providers + +## Resources + +- [LayerZero Documentation](https://layerzero.gitbook.io/) +- [Stargate Finance](https://stargate.finance/) +- [Chainlink CCIP](https://docs.chain.link/ccip) +- [Hyperlane Protocol](https://docs.hyperlane.xyz/) + +## Next Steps + +1. Choose your bridge protocol +2. Set up multi-chain configuration +3. Implement core bridging functionality +4. Add monitoring and analytics +5. Test thoroughly across all supported chains +6. Deploy and monitor + +Building cross-chain applications opens up new possibilities for user engagement and liquidity access. Start with Base as your primary chain and gradually expand to other networks as your application grows.