diff --git a/.cursorignore b/.cursorignore new file mode 100644 index 000000000..ed0183f7a --- /dev/null +++ b/.cursorignore @@ -0,0 +1,54 @@ + +.git/ +.vscode/ +.DS_Store +*.log +*.lock +*.sqlite +*.db + +# Deno specific +.deno/ +deno.lock +node_modules/* + +# Build output +dist/ +build/ + +# Environment variables +.env +.env.* + +# Temporary files +*.tmp +*.temp + +# IDE specific files +*.swp +*.swo +*.idea/ +*.vscode/ + +# OS generated files +Thumbs.db + +# Large media files +*.mp4 +*.tiff +*.avi +*.flv +*.mov +*.wmv + +# Compressed files +*.zip +*.rar +*.7z +*.gz + +# Dependency directories (if any) + + +# Fresh specific +_fresh/ \ No newline at end of file diff --git a/.cursorrules b/.cursorrules new file mode 100644 index 000000000..591c6cc21 --- /dev/null +++ b/.cursorrules @@ -0,0 +1,1027 @@ +# Adding version control information for better AI context +version_info: + deno: "2.1.4" + fresh: "1.7.3" + project: "BTCStampsExplorer" + +# Emoji and Unicode handling patterns +emoji_handling: + description: "Standardized approach for handling emoji ticks across the application" + flow: + api_layer: + - accepts_both_formats: ["emoji (🧧)", "unicode escape (\\U0001F9E7)"] + - decodes_uri: "decodeURIComponent for URL-encoded ticks" + - passes_through: "No format conversion at API layer" + repository_layer: + - converts_to_unicode: "For DB operations" + - converts_to_emoji: "For API responses" + - helper_functions: + - ensureUnicodeEscape: "Checks and converts to unicode escape if needed" + - convertResponseToEmoji: "Converts DB response ticks back to emoji" + database_layer: + - stores_unicode: "Always stores in unicode escape format" + examples: + emoji: "🧧" + unicode_escape: "\\U0001F9E7" + url_encoded: "%F0%9F%A7%A7" + rules: + - "API routes accept any format" + - "Repository handles all format conversions" + - "Database always stores unicode escape" + - "Responses always return emoji format" + - "No manual conversions in controllers or services" + +# Expanding code style with project-specific patterns +code_style: + framework: "Deno Fresh 2.1.4" + principles: + - write_concise_typescript + - use_functional_programming + - prefer_composition_over_inheritance + - use_descriptive_variable_names: + examples: + - isLoading + - hasError + # Adding SRC20-specific naming patterns based on your codebase + project_specific: + - prefix_src20_components: "SRC20" + - use_descriptive_suffixes: + - Card + - Tab + - Header + - TX + +# Expanding import conventions based on your actual imports +import_conventions: + patterns: + - use_npm_prefix: "for npm packages" + - use_dollar_prefix: "for project modules ($)" + - follow_deno_std: "for standard library" + - use_import_map: "in deno.json" + - prefer_jsr: + description: "Use JSR imports when available" + examples: + - "@std/assert": "jsr:/@std/assert@^1.0.9" + - "@std/async": "jsr:/@std/async@^1.0.5" + - "@std/crypto": "jsr:@std/crypto@^1.0.3" + - "@std/dotenv": "jsr:/@std/dotenv@^0.225.2" + - import_priority: + - jsr: "First choice for standard libraries and packages" + - deno.land/x: "Second choice if not on JSR" + - npm: "Last resort or for specific packages" + project_aliases: + core: + - "$/" + - "$client/" + - "$components/" + - "$islands/" + - "$lib/" + feature_specific: + - "$handlers/" + - "$constants" + - "$types/" + utility: + - "$globals" + +# Adding specific route patterns from your codebase +routing_patterns: + api_versioning: + - v2_endpoints: "/api/v2/[...path].ts" + - internal_endpoints: "/api/internal/" + dynamic_routes: + - address: "[address].ts" + - block: "[block_index].ts" + - wildcard: "[...path].ts" + +# Expanding directory structure with actual patterns +directory_structure: + root: "BTCStampsExplorer" + directories: + components: + description: "Server-side rendered components (SSR)" + rules: + - no_client_javascript + - server_rendered_html + patterns: + - group_by_feature + - separate_layout_components + islands: + description: "Client-side interactive components" + rules: + - include_browser_javascript + - can_use_hooks_and_state + examples: + - "BlockInfo.tsx - state and useEffect" + - "SRC20TokenMintingCard.tsx" + - "SRC20DetailsTab.tsx" + patterns: + - group_by_feature: + - "src20/cards/" + - "src20/details/" + routes: + api: + - v2 + - internal + patterns: + - feature_based_grouping + - version_prefixing + +# Development workflow with actual tasks from deno.json +development_workflow: + environments: + development: + command: "deno task dev" + options: "--inspect --allow-all --watch" + excludes: + - "dist/" + - "node_modules/" + - "_fresh/" + - ".git/" + - "coverage/" + - "tmp/" + - "tests/" + production: + command: "deno task start" + prerequisites: "check_version" + testing: + commands: + version: "deno test -A --log-level=debug tests/versioning/" + src20: "deno test --allow-net tests/src20/" + patterns: + - group_by_feature + - use_descriptive_names + +# Adding project-specific type patterns +typescript_patterns: + src20: + - use_typescript_interfaces_for_tokens + - implement_strict_type_checking + compiler_options: + strict: true + noUnusedLocals: true + noUnusedParameters: true + noImplicitReturns: true + exactOptionalPropertyTypes: true + noFallthroughCasesInSwitch: true + +# Component patterns based on your actual components +component_patterns: + rules: + - use_function_declarations + - implement_props_interfaces + - use_preact_children_type + - use_fresh_handlers + naming: + - prefix_feature_components + - use_descriptive_suffixes + layouts: + - HowToLayout + - CollectionLayout + - StampLayout + +# Error handling patterns from your codebase +error_handling: + practices: + - use_fresh_error_boundaries + - implement_proper_logging + - return_http_status_codes + - handle_async_errors_with_trycatch + api_responses: + - use_response_util + - implement_status_codes + - provide_error_messages + +# State management based on your implementation +state_management: + preferences: + - use_preact_signals + - prefer_server_side_state + - use_islands_sparingly + patterns: + - isolate_client_state + - use_fresh_context + - implement_handlers + +# Performance optimizations from your code +performance: + optimizations: + - implement_fresh_streaming + - use_partial_hydration + - optimize_assets + - implement_caching + image_handling: + - use_webp_format + - include_size_data + - implement_lazy_loading + +# Fresh specific features used in your project +fresh_specific: + features: + - use_preact_signals + - configure_typescript + - use_tailwind_support + - use_jsx_runtime + - use_plugin_system + plugins: + - tailwind: + config: "tailwind.config.ts" + content: ["{routes,islands,components}/**/*.{ts,tsx}"] + +# Testing patterns from your test files +testing: + frameworks: + - use_deno_testing + - write_unit_tests + - implement_component_tests + commands: + - "deno task test:version" + - "deno task test:src20" + - "deno task test:src20:watch" + patterns: + - test_api_versioning + - test_src20_transactions + - implement_dredd_tests + +# Code quality standards from your configuration +code_quality: + formatting: + line_width: 80 + indent: 2 + use_spaces: true + single_quote: false + commands: + check: "deno task check:fmt" + debug: "deno task check:fmt:debug" + excludes: + - "**/_fresh/**/*" + - "**/server/**/*" + - "**/node_modules/**/*" + - "**/dist/**/*" + - "**/.git/**/*" + - "**/coverage/**/*" + - "**/tmp/**/*" + - "**/.cache/**/*" + - "**/.*/", + - "**/.*" + - "**/build/**/*" + - "**/vendor/**/*" + - "**/*.yml" + - "**/*.yaml" + - "**/*.css" + - "**/*.json" + - "**/*.md" + - "**/*.html" + - "**/static/**/*" + file_types: + include: ["**/*.ts", "**/*.tsx"] + ci: + workflow: "github-actions" + step_name: "Check formatting" + command: "deno task check:fmt" + linting: + rules: + - "fresh" + - "recommended" + exclude: + - "no-explicit-any" + checks: + command: "deno task check" + run: "before commits" + sequence: + - format: "deno task check:fmt" + - lint: "deno task check:lint" + - types: "deno task check:types" + +# Security implementations from your codebase +security: + requirements: + - implement_csrf_protection + - use_fresh_security_headers + - sanitize_user_input + - follow_owasp_guidelines + api_security: + - version_headers + - proper_cors_config + - rate_limiting + +# Documentation patterns +documentation: + api: + - use_openapi_schema + - implement_redocly + - maintain_swagger_docs + validation: + - validate_schema: "deno task validate:schema" + - run_dredd_tests: "deno task dredd" + formats: + - markdown + - yaml + - openapi + +# Environment and configuration +environment: + development: + base_url: "https://dev.bitcoinstamps.xyz" + skip_redis: true + production: + base_url: "https://stampchain.io" + skip_redis: false + variables: + - DENO_ENV + - DEV_BASE_URL + - SKIP_REDIS_CONNECTION + +# SRC20 Optimization Plan +src20_optimization: + overview: + description: "Safe optimization plan for SRC20 data fetching and filtering with focus on API stability" + goals: + - "Maintain backward compatibility for all API routes" + - "Introduce enhanced functionality alongside existing" + - "Optimize data fetching patterns" + - "Support comprehensive filtering" + - "Improve performance monitoring" + + critical_dependencies: + api_routes: + - endpoint: "/api/v2/src20/index" + function: "handleSrc20TransactionsRequest" + notes: "Base SRC20 transaction endpoint - must maintain response format" + + - endpoint: "/api/v2/src20/tick/[tick]/deploy" + function: "handleDeploymentRequest" + notes: "Critical deployment info endpoint" + + - endpoint: "/api/v2/src20/balance/*" + function: "handleSrc20BalanceRequest" + notes: "Balance endpoints - high traffic" + + functions_to_refactor: + queryService: + fetchAndFormatSrc20Data: + strategy: "Parallel versions" + current: "Keep existing implementation untouched" + new_version: + name: "fetchAndFormatSrc20DataV2" + features: + - "Optional data enrichment" + - "Parallel data fetching" + - "Performance monitoring" + - "Fallback safety" + shared_helpers: + - name: "enrichData" + purpose: "Centralized data enrichment logic" + features: + - "Market data enrichment" + - "Mint progress data" + - "Batch processing" + - "Error handling with fallback" + + controller: + fetchSrc20DetailsWithHolders: + strategy: "Gradual migration" + steps: + - "Create V2 version using new query service" + - "Test in parallel with existing" + - "Migrate internal routes first" + - "Monitor performance before full migration" + + testing_strategy: + unit_tests: + queryService: + - name: "fetchAndFormatSrc20DataV2" + cases: + - "Empty response handling" + - "Pagination edge cases" + - "Filter combinations" + - "Market data enrichment" + - "Error fallbacks" + - name: "enrichData" + cases: + - "Batch processing limits" + - "Partial data handling" + - "Cache hits/misses" + - "Error recovery" + + integration_tests: + critical_routes: + - endpoint: "/api/v2/src20/index" + cases: + - "Basic pagination" + - "Filter combinations" + - "Sort orders" + - "Response format stability" + + - endpoint: "/api/v2/src20/tick/[tick]/deploy" + cases: + - "Valid deployment data" + - "Non-existent tick" + - "Emoji tick handling" + - "Response structure" + + - endpoint: "/api/v2/src20/balance/*" + cases: + - "Valid balance data" + - "Multiple holder snapshots" + - "Zero balance handling" + - "Response pagination" + + internal_routes: + - endpoint: "/api/internal/src20/trending" + cases: + - "Trending calculation" + - "Time window filtering" + - "Cache validation" + - "Data enrichment" + + performance_tests: + scenarios: + - name: "High load - GET /api/v2/src20/index" + conditions: + - "1000 concurrent users" + - "Mixed filter combinations" + - "Various page sizes" + metrics: + - "Response time < 500ms" + - "Error rate < 0.1%" + - "Memory usage stable" + + - name: "Cache effectiveness" + conditions: + - "Repeated queries" + - "Market data updates" + - "Trending calculations" + metrics: + - "Cache hit rate > 80%" + - "Stale data < 30s" + + - name: "Data enrichment overhead" + conditions: + - "Large result sets" + - "Multiple data sources" + - "Parallel requests" + metrics: + - "Enrichment time < 100ms" + - "Memory overhead < 50MB" + + validation_suites: + response_format: + - "Schema validation" + - "Type consistency" + - "Nullable handling" + - "Empty state handling" + + data_integrity: + - "Market data consistency" + - "Balance calculation accuracy" + - "Mint progress tracking" + - "Holder count validation" + + error_handling: + - "API error responses" + - "Fallback behaviors" + - "Cache invalidation" + - "Recovery procedures" + + optimization_phases: + phase_1_foundation: + title: "Enhanced Query Service" + steps: + - "Add fetchAndFormatSrc20DataV2 alongside existing" + - "Implement enrichData helper" + - "Add performance monitoring" + - "Comprehensive testing suite" + - "Document all changes" + safety_measures: + - "Type safety for all new functions" + - "Error handling with fallbacks" + - "Performance threshold warnings" + - "Response format validation" + testing: + setup: + - "Create test data fixtures" + - "Set up integration test environment" + - "Define performance baselines" + + validation: + - "Unit test new functions" + - "Integration test critical paths" + - "Validate response formats" + - "Measure performance impact" + + phase_2_internal_migration: + title: "Update Internal Routes" + steps: + - "Migrate trending.ts to V2" + - "Update index.tsx to use V2" + - "Add filter support to internal routes" + - "Test thoroughly in staging" + monitoring: + - "Response time comparisons" + - "Error rate tracking" + - "Memory usage patterns" + testing: + setup: + - "Expand test fixtures for internal routes" + - "Create parallel test environments" + - "Set up monitoring dashboards" + + validation: + - "Compare V1 vs V2 responses" + - "Validate data consistency" + - "Monitor performance metrics" + - "Test error scenarios" + + phase_3_optimization: + title: "Performance Optimization" + steps: + - "Implement caching strategy" + - "Optimize batch sizes" + - "Add parallel processing" + - "Enhance error handling" + metrics: + - "Response times" + - "Cache hit rates" + - "Error rates" + - "Memory usage" + testing: + setup: + - "Configure load test scenarios" + - "Set up long-running tests" + - "Prepare rollback validation" + + validation: + - "Run load tests" + - "Validate cache behavior" + - "Monitor resource usage" + - "Test recovery procedures" + + deployment_strategy: + staging: + steps: + - "Deploy V2 functions" + - "Run full test suite" + - "Compare performance metrics" + - "Validate response formats" + + validation: + - "Zero deployment errors" + - "All tests passing" + - "Performance within bounds" + - "No type mismatches" + + production: + steps: + - "Deploy with feature flags" + - "Gradual traffic migration" + - "Monitor error rates" + - "Validate response times" + + validation: + - "Error rate < 0.1%" + - "Response time < baseline + 10%" + - "Cache hit rate > 80%" + - "Zero critical errors" + + type_updates: + overview: + description: "Type system consolidation and alignment with OpenAPI schema" + goals: + - "Consolidate overlapping types between globals.d.ts and src20.d.ts" + - "Ensure type definitions match API contract in schema.yml" + - "Improve type safety across the application" + - "Reduce duplication and improve maintainability" + + consolidation_plan: + shared_types: + base_interfaces: + - name: "SRC20Base" + description: "Common properties shared across SRC20 types" + source: "schema.yml#/components/schemas/Src20Detail" + properties: + - "tx_hash: string" + - "block_index: number" + - "tick: string" + - "op: SRC20Operation" + - "amt?: string | bigint" + - "block_time: Date" + + response_types: + - name: "SRC20Response" + description: "Standardized response format for SRC20 endpoints" + source: "schema.yml#/components/schemas/Src20ResponseBody" + properties: + - "last_block: number" + - "data: SRC20Detail" + - "performance?: PerformanceMetrics" + + request_types: + - name: "SRC20RequestParams" + description: "Unified request parameters" + source: "schema.yml components" + properties: + - "tick?: string" + - "op?: SRC20Operation" + - "limit?: number" + - "page?: number" + - "sortBy?: string" + + new_types: + performance_monitoring: + PerformanceMetrics: + description: "Metrics for monitoring and optimization" + properties: + - "duration: number" + - "cacheHit: boolean" + - "dataSize: number" + - "queryTime?: number" + - "enrichmentTime?: number" + + market_data: + MarketMetrics: + description: "Market-related data types" + properties: + - "floor_price: number" + - "market_cap: number" + - "volume_24h: number" + - "holders: number" + + enrichment_options: + EnrichmentConfig: + description: "Configuration for data enrichment" + properties: + - "includeMarketData: boolean" + - "enrichWithProgress: boolean" + - "batchSize?: number" + - "cacheDuration?: number" + + filter_options: + interface: "FilterOptions" + description: "Comprehensive filtering options for SRC20 tokens" + options: + minting: + description: "Filter by minting status and activity" + properties: + - "progress: number - Current mint progress percentage" + - "recent_activity: number - Recent mint transactions count" + + trending_mints: + description: "Filter trending mints by time period" + properties: + - "time_period: '24h' | '7d' - Time window for trending calculation" + - "min_mints: number - Minimum number of mint transactions" + + deploy: + description: "Filter by deployment date" + properties: + - "date_range: DateRange - Start and end dates for deployment" + + supply: + description: "Filter by token supply range" + properties: + - "min: number - Minimum supply amount" + - "max: number - Maximum supply amount" + + marketcap: + description: "Filter by market capitalization" + properties: + - "min: number - Minimum market cap in BTC" + - "max: number - Maximum market cap in BTC" + + holders: + description: "Filter by number of holders" + properties: + - "min: number - Minimum number of holders" + - "max: number - Maximum number of holders" + + volume: + description: "Filter by trading volume" + properties: + - "min: number - Minimum volume in BTC" + - "time_period: '24h' | '7d' - Time period for volume calculation" + + price_change: + description: "Filter by price movement" + properties: + - "percentage: number - Price change percentage" + - "time_period: '24h' | '7d' - Time period for price change" + + main_scenarios: + all_tokens_view: + description: "Complete view of all SRC20 tokens" + data_requirements: + - "All DEPLOY transactions" + - "Mint progress for each token" + - "Market data for fully minted tokens" + - "Holders count for each token" + filtering_options: + - "By status (minting/outminted)" + - "By supply range" + - "By market cap range" + - "By holder count range" + - "By volume range" + - "By price change" + sorting_options: + - "ASC/DESC by deploy date" + - "By market cap" + - "By holder count" + - "By volume" + + minting_only_view: + description: "View of actively minting tokens" + data_requirements: + - "DEPLOY transactions where progress < 100%" + - "Current mint progress" + - "Holders count" + - "Recent mint activity" + filtering_options: + - "By supply range" + - "By holder count range" + - "By mint progress range" + - "By mint activity" + sorting_options: + - "By mint progress" + - "By recent activity" + - "By deploy date" + + trending_view: + description: "View of trending tokens" + data_requirements: + - "Recent mint transactions" + - "Mint progress" + - "Top mints percentage" + - "Holders count" + filtering_options: + - "By time period (24h, 7d)" + - "By mint volume" + - "By holder growth" + sorting_options: + - "By mint activity" + - "By holder growth" + + specialized_views: + trending_minting: + description: "Trending tokens currently minting" + data_requirements: + - "Active minting tokens" + - "Recent mint transactions" + - "Mint progress" + - "Top mints percentage" + parameters: + - "Transaction count (default 1000)" + - "Time period" + - "Page/limit" + + top_market_cap: + description: "Top tokens by market capitalization" + data_requirements: + - "Fully minted tokens" + - "Market data (price, mcap, volume)" + - "Holder counts" + parameters: + - "Page/limit" + - "Min market cap" + - "Min volume" + + migrations: + phase_1: + - "Create base interfaces in shared types directory" + - "Update existing types to extend base interfaces" + - "Add JSDoc documentation for all types" + + phase_2: + - "Implement new performance monitoring types" + - "Add market data types with proper validation" + - "Update service layer to use new types" + + phase_3: + - "Consolidate duplicate types across files" + - "Remove deprecated type definitions" + - "Update all imports to use new type paths" + + validation: + typescript: + - "Run type checking with strict mode" + - "Verify no any types in core logic" + - "Ensure proper nullability handling" + + runtime: + - "Add runtime type validation for API responses" + - "Implement schema validation using OpenAPI spec" + - "Add error boundaries for type mismatches" + + documentation: + - "Generate TypeDoc documentation" + - "Update API documentation to reflect type changes" + - "Add examples for common type usage" + + affected_files: + services: + - "server/services/src20/queryService.ts" + changes: + - "Add fetchAndFormatSrc20DataV2" + - "Add enrichData helper" + - "Add performance monitoring" + - "Add type definitions" + + - "server/services/src20/marketService.ts" + changes: + - "Add batch market data fetching" + - "Add caching layer" + - "Add performance metrics" + + controller: + - "server/controller/src20Controller.ts" + changes: + - "Add V2 versions of handlers" + - "Implement gradual migration" + - "Add monitoring" + + routes: + internal: + - "routes/src20/index.tsx" + - "routes/api/internal/src20/trending.ts" + changes: + - "Migrate to V2 functions" + - "Add enhanced error handling" + - "Add performance monitoring" + + api_v2: + - "routes/api/v2/src20/*" + notes: "Maintain existing behavior" + changes: + - "Add performance monitoring only" + - "No functional changes in Phase 1" + + backward_compatibility: + guarantees: + - "All existing API response formats maintained" + - "No breaking changes to public endpoints" + - "Fallback to original behavior on error" + - "Performance regression protection" + + monitoring: + - "Response time tracking" + - "Error rate comparison" + - "Memory usage patterns" + - "Cache effectiveness" + + rollback_plan: + triggers: + - "Error rate increase > 0.1%" + - "Response time increase > 100ms" + - "Memory usage spike > 20%" + + steps: + - "Disable V2 functions" + - "Revert to original implementations" + - "Clear caches" + - "Notify monitoring" + + data_fetching_strategy: + overview: + description: "Strategy for optimizing data fetching between backend routes and frontend client-side" + goals: + - "Optimize above-the-fold content loading" + - "Balance server vs client-side data fetching" + - "Maintain responsive UI during data loading" + - "Support efficient pagination for large datasets" + + current_state: + client_side: + - component: "SRC20Section.tsx" + benefits: + - "Real-time updates without full page reload" + - "Smooth UI transitions" + - "Reduced server load for small datasets" + considerations: + - "Initial page load performance" + - "Memory usage for large datasets" + - "SEO implications" + + server_side: + - routes: "api/v2/src20/*" + benefits: + - "Efficient pagination" + - "Better handling of large datasets" + - "Improved SEO" + - "Reduced client-side processing" + considerations: + - "Additional server load" + - "Page reload on data updates" + + optimization_plan: + phase_1_analysis: + tasks: + - "Identify critical above-the-fold content" + - "Measure current performance metrics" + - "Map data dependencies between components" + - "Analyze dataset sizes and update frequencies" + metrics: + - "Time to First Contentful Paint (FCP)" + - "Time to Interactive (TTI)" + - "First Input Delay (FID)" + - "Memory usage patterns" + + phase_2_hybrid_approach: + server_side: + components: + - name: "Initial token list" + strategy: "Pre-render first page of results" + benefits: + - "Faster initial page load" + - "Better SEO" + - "Reduced client-side processing" + + - name: "Market data summaries" + strategy: "Server-side aggregation" + benefits: + - "Reduced API calls" + - "Consistent data presentation" + + client_side: + components: + - name: "Trending tokens" + strategy: "Real-time updates via client" + benefits: + - "Immediate user feedback" + - "Reduced server load" + + - name: "Filter interactions" + strategy: "Client-side filtering with cached data" + benefits: + - "Instant UI response" + - "Reduced API calls" + + phase_3_implementation: + steps: + - "Implement server-side rendering for initial data" + - "Add client-side hydration for interactivity" + - "Optimize data caching strategy" + - "Implement progressive loading" + + optimizations: + above_fold: + - "Pre-render critical content" + - "Defer non-essential data loading" + - "Implement placeholder loading states" + + pagination: + - "Virtual scrolling for large lists" + - "Prefetch next page data" + - "Cache previous pages" + + caching: + - "Implement service worker caching" + - "Use memory cache for frequent lookups" + - "Add cache invalidation strategy" + + component_specific_plans: + SRC20Section: + current: "Client-side fetching" + proposed: + initial_load: + - "Server-side render first page" + - "Include critical market data" + - "Pre-calculate trending tokens" + + subsequent_updates: + - "Client-side fetch for filters" + - "Real-time updates for trending" + - "Progressive load for additional pages" + + optimization_targets: + - "Reduce Time to First Meaningful Paint" + - "Minimize Content Layout Shift" + - "Optimize memory usage" + + TokenDetails: + current: "Server-side rendering" + maintain: true + reasons: + - "Complex data aggregation" + - "SEO requirements" + - "Infrequent updates" + + monitoring_and_metrics: + performance: + - "Page load times by component" + - "Server response times" + - "Client-side rendering times" + - "Memory usage patterns" + + user_experience: + - "Time to interactivity" + - "Input responsiveness" + - "Layout stability" + + resource_usage: + - "API call frequency" + - "Cache hit rates" + - "Browser memory usage" + + validation_criteria: + - "Improved or maintained Lighthouse scores" + - "Reduced Time to Interactive" + - "Stable memory usage" + - "Maintained server response times" diff --git a/.dockerignore b/.dockerignore index 40b878db5..ba6a89af4 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,3 @@ -node_modules/ \ No newline at end of file +node_modules/ +.git +.DS_Store \ No newline at end of file diff --git a/.env.sample b/.env.sample index 324618bf0..96378858a 100644 --- a/.env.sample +++ b/.env.sample @@ -4,7 +4,6 @@ DB_PASSWORD= DB_PORT= DB_NAME=btc_stamps DB_MAX_RETRIES=5 -API_BASE_URL= QUICKNODE_ENDPOINT= QUICKNODE_API_KEY= MINTING_SERVICE_FEE_ADDRESS= @@ -12,4 +11,7 @@ MINTING_SERVICE_FEE_FIXED_SATS= MINTING_SERVICE_FEE_ENABLED= # 1 or 0 IMAGES_SRC_PATH=https://stampchain.io/stamps CACHE=false # for prod put in false need to review cache system -ELASTICACHE_ENDPOINT= \ No newline at end of file +ELASTICACHE_ENDPOINT= +ELASITCACHE_PORT= +CSRF_SECRET_KEY= +OPENSTAMP_API_KEY= \ No newline at end of file diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 1621eb0fd..d680561d9 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,45 +1,107 @@ -name: Deploy +name: Code Quality on: push: - branches: main + branches: [dev] pull_request: - branches: main + branches: [main, dev] + types: [opened, synchronize, reopened] jobs: - deploy: - name: Deploy + quality-checks: + name: Quality Checks runs-on: ubuntu-latest + timeout-minutes: 15 # Prevent hanging jobs permissions: - id-token: write # Needed for auth with Deno Deploy - contents: read # Needed to clone the repository + id-token: write # Needed for auth with Deno Deploy + contents: read # Needed to clone the repository + pull-requests: write # Needed for PR comments + + env: + CSRF_SECRET_KEY: "12323" # Placeholder key steps: - - name: Clone repository + - name: Checkout repository uses: actions/checkout@v3 - - name: Install Deno - uses: denoland/setup-deno@v1 + # Cache npm dependencies + - name: Setup Node.js + uses: actions/setup-node@v3 with: - deno-version: v1.x + node-version: '20' + cache: 'npm' - - name: Build step - run: deno task build + - name: Install npm dependencies + run: npm ci # More reliable than npm install + + - name: Validate OpenAPI Schema + run: npm run validate:ci + + # Cache Deno dependencies + - name: Setup Deno + uses: denoland/setup-deno@v2 + with: + deno-version: v2.1.4 + + - name: Cache Deno dependencies + uses: actions/cache@v3 + with: + path: | + ~/.deno + ~/.cache/deno + key: ${{ runner.os }}-deno-${{ hashFiles('**/deps.ts') }} + restore-keys: | + ${{ runner.os }}-deno- - # - name: Start Deno app - # run: deno run -A dev.ts & - # env: - # PORT: 8000 + - name: Install reviewdog + uses: reviewdog/action-setup@v1 + with: + reviewdog_version: latest - # - name: Wait for Deno app to be ready - # run: | - # echo "Waiting for Deno app to start..." - # sleep 10 + # Code Quality Checks + - name: Check formatting + id: fmt + run: deno fmt --check + continue-on-error: true # Allow the workflow to continue for reviewdog - # - name: Install Dredd - # run: npm install -g dredd + - name: Report formatting issues + if: always() && steps.fmt.outcome == 'failure' + run: | + deno fmt --check | reviewdog -f=diff \ + -name="deno-fmt" \ + -reporter=github-pr-review \ + -filter-mode=added \ + -fail-level=any + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Check linting + id: lint + run: deno task check:lint + continue-on-error: true # Allow the workflow to continue for reviewdog + + - name: Report linting issues + if: always() && steps.lint.outcome == 'failure' + run: | + deno lint 2>&1 | reviewdog \ + -name="deno-lint" \ + -reporter=github-pr-review \ + -filter-mode=added \ + -efm="%f:%l:%c: %m" \ + -fail-level=error + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Fail the workflow if any checks failed + - name: Check for failures + if: steps.fmt.outcome == 'failure' || steps.lint.outcome == 'failure' + run: exit 1 + + # Build check + - name: Build project + run: deno task build - # - name: Run Dredd tests - # run: dredd - # env: - # DREDD_SERVER: http://localhost:8000 \ No newline at end of file + # Commented out for future implementation + # - name: Type check + # run: deno task check:types + # continue-on-error: true # Optional: allow type checks to fail for now \ No newline at end of file diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml new file mode 100644 index 000000000..e77e96bb0 --- /dev/null +++ b/.github/workflows/docker-test.yml @@ -0,0 +1,58 @@ +name: Docker Build Test + +on: + workflow_run: + workflows: ["Code Quality"] + types: + - completed + branches: + - main + - dev + +jobs: + docker-build-test: + # Only run if the Code Quality workflow succeeded + if: ${{ github.event.workflow_run.conclusion == 'success' }} + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker image + uses: docker/build-push-action@v5 + with: + context: . + load: true + tags: stamps-app:test + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Test container startup + run: | + # Run container in detached mode without port binding + docker run -d \ + --name test-container \ + stamps-app:test # Not binding port for CI, but still exposed in Dockerfile + + # Wait briefly to check for immediate crashes + sleep 5 + + # Check if container is still running + if ! docker ps | grep test-container > /dev/null; then + echo "Container crashed during startup" + docker logs test-container + exit 1 + fi + + echo "Container built and started successfully" + + # Show logs + docker logs test-container + + # Clean up + docker stop test-container + docker rm test-container \ No newline at end of file diff --git a/.gitignore b/.gitignore index 37358fb7b..b9bb5a18d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,10 @@ # Fresh build directory _fresh/ node_modules/ + +db.log +reports/ +openapitools.json +package-lock.json +app.log +logs/ diff --git a/.redocly.lint-ignore.yaml b/.redocly.lint-ignore.yaml new file mode 100644 index 000000000..725712bca --- /dev/null +++ b/.redocly.lint-ignore.yaml @@ -0,0 +1,13 @@ +# This file instructs Redocly's linter to ignore the rules contained for specific parts of your API. +# See https://redocly.com/docs/cli/ for more information. +schema.yml: + no-ambiguous-paths: + - '#/paths/~1api~1v2~1src101~1{deploy_hash}~1deploy' + - '#/paths/~1api~1v2~1src101~1{deploy_hash}~1total' + - '#/paths/~1api~1v2~1src101~1balance~1{address}' + - '#/paths/~1api~1v2~1src101~1index~1{deploy_hash}~1{index}' + - '#/paths/~1api~1v2~1stamps~1balance~1{address}' + - '#/paths/~1api~1v2~1stamps~1block~1{block_index}' + - '#/paths/~1api~1v2~1stamps~1ident~1{ident}' + operation-2xx-response: + - '#/paths/~1api~1v2~1error/get/responses' diff --git a/.redocly.yaml b/.redocly.yaml new file mode 100644 index 000000000..6841f782e --- /dev/null +++ b/.redocly.yaml @@ -0,0 +1,19 @@ +extends: + - recommended +apis: + main: + root: schema.yml + rules: + operation-2xx-response: + severity: error + excludePaths: + - '/api/v2/error#get' + operation-4xx-response: warn + no-path-trailing-slash: warn + no-server-example.com: warn + no-empty-servers: warn + operation-operationId-unique: warn + no-invalid-media-type-examples: warn + theme: + openapi: + hideLogo: true \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index 7a836dd6b..fb79a4612 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,45 +1,28 @@ -// { -// // Use IntelliSense to learn about possible attributes. -// // Hover to view descriptions of existing attributes. -// // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 -// "version": "0.2.0", -// "configurations": [ -// { -// "request": "launch", -// "name": "Launch Program", -// "type": "node", -// "program": "${workspaceFolder}/main.ts", -// "cwd": "${workspaceFolder}", -// "runtimeExecutable": "/snap/bin/deno", -// "runtimeArgs": [ -// "run", -// "--inspect-wait", -// "--allow-all", -// "--allow-read", -// "--allow-env", -// "--watch=static/,routes/,components/,islands/" -// ], -// "attachSimplePort": 9229 -// } -// ] -// } - { "version": "0.2.0", "configurations": [ { - "name": "Deno: Run", - "type": "node", + "name": "Deno Fresh: Debug", "request": "launch", + "type": "node", "cwd": "${workspaceFolder}", "runtimeExecutable": "deno", "runtimeArgs": [ - "run", - "--inspect-brk", - "-A", - "./main.ts" // replace with your main file + "task", + "dev" ], - "attachSimplePort": 9229 + "attachSimplePort": 9229, + "restart": true, + "outputCapture": "std", + "preLaunchTask": "run-local-script", + "postDebugTask": "deno: cleanup-ports", + "env": { + "DENO_ENV": "development", + "DENO_V8_FLAGS": "--max-old-space-size=8192" + }, + "console": "internalConsole", + "internalConsoleOptions": "openOnSessionStart", + "autoAttachChildProcesses": true } ] } diff --git a/.vscode/settings.json b/.vscode/settings.json index 8fe307e65..c202ba5de 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,7 +1,12 @@ { "deno.enable": true, "deno.lint": true, - // "deno.importMap": "./import_map.json", + "deno.importMap": "./deno.json", + "deno.suggest.imports.hosts": { + "https://deno.land": true, + "https://cdn.nest.land": true, + "https://crux.land": true + }, "deno.codeLens.test": true, "editor.defaultFormatter": "denoland.vscode-deno", "editor.formatOnSave": true, @@ -27,8 +32,16 @@ "editor.formatOnSave": true, "editor.tabSize": 2 }, - "cSpell.words": [ - "psbt", - "utxos" - ] + "[plaintext]": { + "editor.formatOnSave": false + }, + "files.associations": { + ".cursorrules": "plaintext" + }, + "[yaml]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[css]": { + "editor.defaultFormatter": "vscode.css-language-features" + } } diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 000000000..beb4e91a3 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,33 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "deno: cleanup-ports", + "type": "shell", + "command": "bash", + "args": [ + "-c", + "pids=$(lsof -ti:9229,8000 -sTCP:LISTEN -c deno 2>/dev/null) && if [ ! -z \"$pids\" ]; then echo \"Terminating Deno processes: $pids\" && kill -15 $pids && sleep 2 && kill -0 $pids 2>/dev/null || true; fi || true" + ], + "presentation": { + "reveal": "never", + "panel": "shared", + "echo": false, + "close": true + }, + "problemMatcher": [] + }, + { + "label": "run-local-script", + "type": "shell", + "command": "bash", + "args": ["${workspaceFolder}/scripts/local.sh"], + "presentation": { + "reveal": "always", + "panel": "shared", + "echo": false, + "close": true + } + } + ] +} diff --git a/Dockerfile b/Dockerfile index 4ba99bb69..f97796a6b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,61 @@ -# Utilizar la imagen oficial de Deno -FROM denoland/deno:alpine +FROM denoland/deno:alpine-2.1.4 +# Set environment variables +ENV HOME=/app \ + DENO_DIR=/app/.deno \ + DENO_ENV=production \ + NODE_DEBUG=* \ + XDG_CONFIG_HOME=/app/.config \ + XDG_CACHE_HOME=/app/.cache \ + XDG_DATA_HOME=/app/.local/share \ + NPM_CONFIG_CACHE=/app/.npm + +# Install additional tools +RUN apk add --no-cache bash + +# Create necessary directories +RUN mkdir -p /app \ + /app/.deno \ + /app/.npm \ + /app/.config \ + /app/.cache \ + /app/.local/share \ + /app/node_modules/.deno + +# Set up permissions more securely +RUN chown -R deno:deno /app && \ + chmod -R 755 /app && \ + chmod -R 775 /app/.deno /app/.npm /app/node_modules/.deno WORKDIR /app +# Copy files and set permissions +COPY --chown=deno:deno . . + +# Clean any existing caches +RUN rm -rf node_modules/.deno && \ + rm -rf .npm && \ + rm -rf .deno + +# Switch to deno user for build steps +USER deno + +# Build steps with all permissions granted and error handling +RUN deno run --allow-all main.ts build --lock=lock.json --lock-write || (echo "Build failed" && exit 1) -COPY . . +# Cache dependencies with proper error handling +RUN DENO_DIR=/app/.deno \ + NPM_CONFIG_CACHE=/app/.npm \ + deno cache --reload --lock=lock.json main.ts || (echo "Cache failed" && exit 1) +# Verify the build environment +RUN echo "Verifying environment and permissions:" && \ + ls -la /app && \ + ls -la /app/.deno || true && \ + ls -la /app/node_modules/.deno || true && \ + ls -la /app/.npm || true EXPOSE 8000 -RUN deno upgrade -RUN deno run -A dev.ts build -CMD ["deno", "run", "--allow-net", "--allow-read", "--allow-run", "--allow-write", "--allow-env", "main.ts"] +# Add all necessary permissions to the runtime command +CMD ["deno", "run", "--allow-net", "--allow-read", "--allow-run", "--allow-write", "--allow-env", "--allow-sys", "main.ts"] diff --git a/README.md b/README.md index e5fcb4e18..ec3fa578e 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,130 @@ # BITCOIN STAMPS EXPLORER AND API -CHECK ENV VARS AND PLEAS ENSURE THE DB_USER ONLY HAS READ PERMISION +This is the official API and block explorer for +[Bitcoin Stamps](https://stampchain.io/). It provides a comprehensive interface +for exploring Bitcoin Stamps transactions and metadata, working in conjunction +with the [Bitcoin Stamps Indexer](/~https://github.com/stampchain-io/btc_stamps). -This is the API for https://stampchain.io/docs and the (WIP) open source block explorer for Bitcoin Stamps. This is intended to be run concurrently with the Bitcoin Stamps Indexer database /~https://github.com/stampchain-io/btc_stamps +## Features +- Full Bitcoin Stamps block explorer +- API with OpenAPI/Swagger documentation +- Support for SRC-20, SRC-721, and SRC-101 token standards + +## Prerequisites + +1. **Install Deno** + > ⚠️ **Required Version**: 2.1.4 + ```sh + curl -fsSL https://deno.land/install.sh | sh + ``` + + Add Deno to your path: + ```sh + echo 'export DENO_INSTALL="$HOME/.deno"' >> ~/.bashrc + echo 'export PATH="$DENO_INSTALL/bin:$PATH"' >> ~/.bashrc + source ~/.bashrc + ``` + +2. **Required Services** + - MySQL/MariaDB (with read-only user access) + - Redis (for caching) + - Bitcoin Stamps Indexer database + +## Installation + +1. **Clone the repository:** + ```sh + git clone /~https://github.com/stampchain-io/bitcoin-stamps-explorer.git + cd bitcoin-stamps-explorer + ``` + +2. **Environment Setup** + ```sh + cp .env.sample .env + # Edit .env with your configuration + ``` + + ⚠️ **IMPORTANT**: Ensure DB_USER has READ-ONLY permissions for security! + +## Development Commands + +```sh +# Start development server with hot reload and debugging +deno task dev + +# Code quality checks (formatting, linting, type checking) +deno task check + +# Update Fresh framework +deno task update + +# Decode SRC-20 transactions +deno task decode +deno task decode_olga + +# Run schema validation +deno task validate:schema +``` + +## Production Deployment + +1. **Build the project:** + ```sh + deno task build + ``` + +2. **Start production server:** + ```sh + deno task start + ``` + +Docker deployment is also supported: + +```sh +docker build -t btc-stamps-explorer:2.1.4 . +docker run -p 8000:8000 btc-stamps-explorer:2.1.4 +``` + +The container uses: + +- Deno 2.1.4 Alpine base image +- Production environment +- Port 8000 +- Required permissions for network, file system, and environment variables + +For development with Docker: + +```sh +# Build with development tag +docker build -t btc-stamps-explorer:dev . + +# Run with mounted volumes for development +docker run -p 8000:8000 \ + --env-file .env \ + -v $(pwd):/app \ + btc-stamps-explorer:dev deno task dev +``` + +## API Documentation + +- OpenAPI/Swagger documentation available at `/docs` +- Schema validation with `deno task validate:schema` + +## Contributing + +1. Fork the repository +2. Create your feature branch +3. Run `deno task check` to ensure code quality +4. Add tests for new features +5. Submit a pull request + +## Additional Resources + +- [Bitcoin Stamps Indexer](/~https://github.com/stampchain-io/btc_stamps) +- [API Documentation](https://stampchain.io/docs) +- [Discussion Board](/~https://github.com/orgs/stampchain-io/discussions) + +## License + +This project is licensed under the [AGPL-3.0 License](LICENSE.md). diff --git a/TODO.md b/TODO.md deleted file mode 100644 index 985adb7a8..000000000 --- a/TODO.md +++ /dev/null @@ -1,80 +0,0 @@ -# API: - -- [DONE] Review stamps/[id] and cursed/[id] - - add into cursed and stamps query folders - - join issuances to update supply and locked status - - divisibility 🤯 - - Reissuance problem when querying by stamp number or tx_hash -- [DONE] Add retry in handleQuery and connectDb -- [WIP] Blocks endpoint: - - [DONE] create blocks_api file to host the query logic that will use api and - pages in the explorer - - [WIP]related blocks to show previous and next block for a given one - - [DONE]add to block queries to be able to search by hash -- [TODO] Other endpoints: - - [DOING] Migrate all the logic from enpoints to its own file to be reused by - pages and endpoint - - this is done for /stamps and /stamps/balance/[address] -- [DONE] create classes for database functions - will make importing/exporting - easier and faster -- [TODO] /stamps/block and /stamps/balance and /block_count doesnt work -- [TODO] /balance and /cursed pagination - -# EXPLORER: - -- [DONE] Retrieve images from the static/stamps folder -- [DONE] new images are not being updated.... need to restart the container to - be updated, asking for solutions to this... -- [WIP] Balance page -- [WIP] Wallet integration: - - [DONE] Unisat - - [DONE] Leather - - [WIP] OKX -- [TODO] Minting functions -- [TODO] Work on blocks page -- [TODO] Work on index page -- [TODO] Work on stamp page -- [TODO] tons of work... - -# SRC20: - -- GET[tx_hash] return src20 transaction info (fromAddress, toAddress, op, - validity) -- GET[address][tick] return balance for this address and tick - - remove cpid from the result(is noise for them) -- GET[block_index] return all valid tx for src20 in that block -- POST[{ "method": "mint"|"transfer"|"deploy", params: [] }] - - check if is a valid posible tx - - create unsigned transaction - -# OPENAPI - -## stamps - -[x] /stamps/index [x] /stamps/[id] [x] /stamps/ident/[ident] [x] -/stamps/block/[block_index] [x] /stamps/balance/[address] - -## cursed - -[x] /cursed/index [x] /cursed/[id] [x] /cursed/ident [x] -/cursed/block/[block_index] [x] /cursed/balance/[address] - -## issuances - -[x] /issuances/[id] - -## block - -[x] /block/[block_index] [x] /block/related/[block_index] [x] -/block/block_count/[...number] - -## balance - -[x] /balance/address - -## src20 - -[x] /src20/index [] /src20/create [x] /src20/tx/[tx_hash] [x] /src20/tick [x] -/src20/tick/[tick] [x] /src20/tick/[tick]/deploy [x] /src20/block/[block_index] -[x] /src/block/[block_index]/[tick] [x] /src20/balance/[address] [x] -/src20/balance/[address]/[tick] diff --git a/client/hooks/useConfig.ts b/client/hooks/useConfig.ts new file mode 100644 index 000000000..fa67f2e6c --- /dev/null +++ b/client/hooks/useConfig.ts @@ -0,0 +1,30 @@ +import { useEffect, useState } from "preact/hooks"; + +export function useConfig() { + const [config, setConfig] = useState(null); + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + + useEffect(() => { + fetch("/config") + .then((response) => { + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + return response.json(); + }) + .then((data: T) => { + setConfig(data); + setError(null); + }) + .catch((error) => { + console.error("Error loading config:", error); + setError(error.message); + }) + .finally(() => { + setIsLoading(false); + }); + }, []); + + return { config, isLoading, error }; +} diff --git a/client/hooks/useFairmintForm.ts b/client/hooks/useFairmintForm.ts new file mode 100644 index 000000000..7a8a736f1 --- /dev/null +++ b/client/hooks/useFairmintForm.ts @@ -0,0 +1,217 @@ +import { useEffect, useState } from "preact/hooks"; +import { useConfig } from "$client/hooks/useConfig.ts"; +import { useFeePolling } from "$client/hooks/useFeePolling.ts"; +import { + showConnectWalletModal, + walletContext, +} from "$client/wallet/wallet.ts"; +import axiod from "axiod"; +import { decodeBase64 } from "@std/encoding/base64"; +import { encodeHex } from "@std/encoding/hex"; +import { Config } from "$globals"; +import { logger } from "$lib/utils/logger.ts"; +import type { AncestorInfo } from "$types/index.d.ts"; + +interface FairmintFormState { + asset: string; + quantity: string; + fee: number; + BTCPrice: number; + jsonSize: number; + utxoAncestors?: AncestorInfo[]; +} + +export function useFairmintForm(fairminters: any[]) { + const { config, isLoading: configLoading } = useConfig(); + const { fees, loading: feeLoading, fetchFees } = useFeePolling(300000); + const [isLoading, setIsLoading] = useState(configLoading || feeLoading); + + const [formState, setFormState] = useState({ + asset: "", + quantity: "", + fee: 0, + BTCPrice: 0, + jsonSize: 0, + utxoAncestors: [], + }); + + const [isSubmitting, setIsSubmitting] = useState(false); + const [submissionMessage, setSubmissionMessage] = useState< + { + message: string; + txid?: string; + } | null + >(null); + const [apiError, setApiError] = useState(""); + + const { wallet } = walletContext; + const address = wallet.address; + + useEffect(() => { + setIsLoading(configLoading || feeLoading); + }, [configLoading, feeLoading]); + + useEffect(() => { + if (fees && !feeLoading) { + const recommendedFee = Math.round(fees.recommendedFee); + setFormState((prev) => ({ ...prev, fee: recommendedFee })); + } + }, [fees, feeLoading]); + + const handleAssetChange = (e: Event) => { + const selectedAsset = (e.target as HTMLSelectElement).value; + const selectedFairminter = fairminters.find( + (fm) => fm.asset === selectedAsset, + ); + setFormState((prev) => ({ + ...prev, + asset: selectedAsset, + quantity: selectedFairminter + ? selectedFairminter.max_mint_per_tx + : prev.quantity, + })); + }; + + const handleInputChange = (e: Event, field: string) => { + const value = (e.target as HTMLInputElement).value; + setFormState((prev) => ({ ...prev, [field]: value })); + }; + + const handleChangeFee = (newFee: number) => { + setFormState((prev) => ({ ...prev, fee: newFee })); + }; + + const handleSubmit = async () => { + if (!walletContext.isConnected) { + showConnectWalletModal.value = true; + return; + } + + if (!config) { + logger.error("ui", { + message: "Configuration not loaded", + context: "useFairmintForm", + }); + setApiError("Configuration not loaded"); + return; + } + + if (!formState.asset || !formState.quantity || formState.fee <= 0) { + logger.warn("ui", { + message: "Invalid form state", + context: "useFairmintForm", + formState, + }); + setApiError("Please fill in all required fields."); + return; + } + + setIsSubmitting(true); + setSubmissionMessage(null); + setApiError(""); + + try { + logger.debug("ui", { + message: "Submitting fairmint request", + context: "useFairmintForm", + payload: { + address, + asset: formState.asset, + quantity: formState.quantity, + fee_per_kb: formState.fee * 1000, + }, + }); + + const response = await axiod.post("/api/v2/fairmint/compose", { + address, + asset: formState.asset, + quantity: formState.quantity, + options: { + fee_per_kb: formState.fee * 1000, + }, + service_fee: config?.MINTING_SERVICE_FEE, + service_fee_address: config?.MINTING_SERVICE_FEE_ADDRESS, + }); + + logger.debug("ui", { + message: "Received API response", + context: "useFairmintForm", + response: response.data, + }); + + const psbtBase64 = response.data?.result?.psbt; + + if (!psbtBase64 || typeof psbtBase64 !== "string") { + throw new Error("Invalid response from server: PSBT not found."); + } + + // Convert the PSBT from Base64 to Hex + const psbtUint8Array = decodeBase64(psbtBase64); + const psbtHexArray = encodeHex(psbtUint8Array); + const psbtHex = new TextDecoder().decode(new Uint8Array(psbtHexArray)); + + logger.debug("ui", { + message: "Processing PSBT", + context: "useFairmintForm", + psbtHex, + }); + + const { signed, txid, error } = await walletContext.signPSBT( + wallet, + psbtHex, + [], + true, + ); + + if (signed) { + logger.info("ui", { + message: "Transaction successfully broadcasted", + context: "useFairmintForm", + txid, + }); + setSubmissionMessage({ message: "Transaction broadcasted.", txid }); + } else { + logger.error("ui", { + message: "Transaction signing failed", + context: "useFairmintForm", + error, + }); + setSubmissionMessage({ message: `Transaction failed: ${error}` }); + } + } catch (error: unknown) { + logger.error("ui", { + message: "Error during submission", + context: "useFairmintForm", + error: error instanceof Error ? error.message : String(error), + }); + setApiError( + error instanceof Error + ? error.message + : "An unexpected error occurred.", + ); + } finally { + setIsSubmitting(false); + } + }; + + const feeEstimationParams = { + type: "fairmint", + inputType: "P2WPKH", + outputTypes: ["P2WPKH"], + feeRate: formState.fee, + }; + + return { + formState, + handleAssetChange, + handleInputChange, + handleSubmit, + handleChangeFee, + fetchFees, + isLoading, + isSubmitting, + submissionMessage, + apiError, + feeEstimationParams, + }; +} diff --git a/client/hooks/useFeePolling.ts b/client/hooks/useFeePolling.ts new file mode 100644 index 000000000..ad0a3776e --- /dev/null +++ b/client/hooks/useFeePolling.ts @@ -0,0 +1,61 @@ +import { useCallback, useEffect, useState } from "preact/hooks"; +import { getCurrentBlock, getRecommendedFees } from "$lib/utils/mempool.ts"; + +interface Fees { + economyFee: number; + fastestFee: number; + halfHourFee: number; + hourFee: number; + recommendedFee: number; // Keep this spelling to match previous usage + block: number; + _economyFee: number; + _fastestFee: number; + _halfHourFee: number; + _hourFee: number; +} + +export const useFeePolling = (intervalDuration = 300000) => { + const [fees, setFees] = useState(null); + const [loading, setLoading] = useState(true); + + const fetchFees = useCallback(async () => { + try { + const newFees = await getRecommendedFees(); + const block = await getCurrentBlock(); + + if (newFees && block) { + const allFees: Fees = { + ...newFees, + _economyFee: newFees.economyFee, + _fastestFee: newFees.fastestFee, + _halfHourFee: newFees.halfHourFee, + _hourFee: newFees.hourFee, + economyFee: newFees.economyFee, + fastestFee: newFees.fastestFee, + recommendedFee: newFees.fastestFee, + block: block, + }; + setFees(allFees); + } + setLoading(false); + } catch (error) { + console.error("Error fetching fees:", error); + setLoading(false); + } + }, []); + + useEffect(() => { + fetchFees(); + const intervalId = setInterval(fetchFees, intervalDuration); + + return () => clearInterval(intervalId); + }, [fetchFees, intervalDuration]); + + return { + fees, + loading, + fetchFees, + satsPerVB: fees?.recommendedFee, + satsPerKB: fees?.recommendedFee * 1000, + }; +}; diff --git a/client/hooks/useSRC20Form.ts b/client/hooks/useSRC20Form.ts new file mode 100644 index 000000000..e4bc15efe --- /dev/null +++ b/client/hooks/useSRC20Form.ts @@ -0,0 +1,634 @@ +import { useEffect, useState } from "preact/hooks"; +import { walletContext } from "$client/wallet/wallet.ts"; +import axiod from "axiod"; +import { useConfig } from "$client/hooks/useConfig.ts"; +import { useFeePolling } from "$client/hooks/useFeePolling.ts"; +import { fetchBTCPriceInUSD } from "$lib/utils/balanceUtils.ts"; +import { Config } from "$globals"; +import { logger } from "$lib/utils/logger.ts"; +import { debounce } from "$lib/utils/debounce.ts"; +interface PSBTFees { + estMinerFee: number; + totalDustValue: number; + hasExactFees: boolean; + totalValue: number; + effectiveFeeRate: number; + estimatedSize?: number; + totalVsize?: number; +} + +interface SRC20FormState { + toAddress: string; + token: string; + amt: string; + fee: number; + feeError: string; + BTCPrice: number; + jsonSize: number; + apiError: string; + toAddressError: string; + tokenError: string; + amtError: string; + max: string; + maxError: string; + lim: string; + limError: string; + dec: string; + x: string; + tg: string; + web: string; + email: string; + file: File | null; + psbtFees?: PSBTFees; + maxAmount?: string; +} + +interface TxDetails { + hex: string; + est_tx_size: number; + input_value: number; + total_dust_value: number; + est_miner_fee: number; + fee: number; + change_value: number; + inputsToSign: number[]; + sourceAddress: string; + changeAddress: string; +} + +export class SRC20FormController { + private static prepareTxDebounced = debounce(async ( + params: { + wallet: { address?: string }; + formState: SRC20FormState; + action: string; + trxType: string; + canEstimateFees: (partial: boolean) => boolean; + }, + callbacks: { + setFormState: (fn: (prev: SRC20FormState) => SRC20FormState) => void; + logger: typeof logger; + }, + ) => { + const { wallet, formState, action, trxType } = params; + const { setFormState, logger } = callbacks; + + if (!wallet.address) return; + + try { + const response = await axiod.post("/api/v2/src20/create", { + sourceAddress: wallet.address, + toAddress: action === "transfer" ? formState.toAddress : wallet.address, + satsPerVB: formState.fee, + trxType, + op: action, + tick: formState.token || "TEST", + ...(action === "deploy" && { + max: formState.max || "1000", + lim: formState.lim || "1000", + dec: formState.dec || "18", + ...(formState.x && { x: formState.x }), + ...(formState.tg && { tg: formState.tg }), + ...(formState.web && { web: formState.web }), + ...(formState.email && { email: formState.email }), + }), + ...(["mint", "transfer"].includes(action) && { + amt: formState.amt || "1", + }), + }); + + if (response.data) { + // Log raw response for debugging + logger.debug("stamps", { + message: "Raw PSBT response", + data: { + feeDetails: response.data.feeDetails, + totalOutputValue: response.data.totalOutputValue, + estMinerFee: response.data.estMinerFee, + fullResponse: response.data, + }, + }); + + // Extract fee values from response + const minerFee = Number(response.data.feeDetails?.total) || 0; + const dustValue = Number(response.data.totalOutputValue) || 1683; // Default to standard SRC20 dust + const totalValue = minerFee + dustValue; + + logger.debug("stamps", { + message: "Extracted fee values", + data: { + minerFee, + dustValue, + totalValue, + feeDetails: response.data.feeDetails, + rawResponse: { + totalOutputValue: response.data.totalOutputValue, + feeDetails: response.data.feeDetails, + }, + }, + }); + + setFormState((prev) => { + const newState = { + ...prev, + psbtFees: { + estMinerFee: Number(response.data.est_miner_fee), + totalDustValue: Number(response.data.total_dust_value), + hasExactFees: true, + totalValue: Number(response.data.est_miner_fee) + + Number(response.data.total_dust_value), + effectiveFeeRate: + Number(response.data.feeDetails?.effectiveFeeRate) || 0, + estimatedSize: Number(response.data.est_tx_size), + totalVsize: Number(response.data.feeDetails?.totalVsize), + hex: response.data.hex, + inputsToSign: response.data.inputsToSign, + }, + }; + + logger.debug("stamps", { + message: "Updated form state with fees", + data: { + oldPsbtFees: prev.psbtFees, + newPsbtFees: newState.psbtFees, + rawResponse: response.data, + calculatedTotal: totalValue, + responseTotal: response.data.fee, + }, + }); + + return newState; + }); + } + } catch (error) { + logger.error("stamps", { + message: "Fee calculation failed", + error, + data: { + action, + token: formState.token, + fee: formState.fee, + }, + }); + } + }, 500); + + private static checkTokenExistenceDebounced = debounce(async ( + token: string, + setFormState: (fn: (prev: SRC20FormState) => SRC20FormState) => void, + ) => { + try { + const response = await axiod.get(`/api/v2/src20/tick/${token}/deploy`); + if (response.data && response.data.data) { + setFormState((prev) => ({ + ...prev, + tokenError: "This tick already exists.", + })); + } else { + setFormState((prev) => ({ ...prev, tokenError: "" })); + } + } catch (error) { + console.error("Error checking tick existence:", error); + setFormState((prev) => ({ ...prev, tokenError: "" })); + } + }, 800); + + static prepareTx( + ...args: Parameters + ) { + return this.prepareTxDebounced(...args); + } + + static checkTokenExistence( + ...args: Parameters + ) { + return this.checkTokenExistenceDebounced(...args); + } + + static cancelPrepareTx() { + this.prepareTxDebounced.cancel(); + } + + static cancelTokenCheck() { + this.checkTokenExistenceDebounced.cancel(); + } +} + +export function useSRC20Form( + action: string, + trxType: "olga" | "multisig" = "multisig", + initialToken?: string, +) { + logger.debug("ui", { + message: "useSRC20Form initialized", + action, + trxType, + initialToken, + }); + + const { config } = useConfig(); + const { fees, fetchFees } = useFeePolling(300000); // 5 minutes + const [apiError, setApiError] = useState(""); + + const [formState, setFormState] = useState({ + toAddress: "", + token: initialToken || "", + amt: "", + fee: 0, + feeError: "", + BTCPrice: 0, + jsonSize: 0, + apiError: "", + toAddressError: "", + tokenError: "", + amtError: "", + max: "", + maxError: "", + lim: "", + limError: "", + dec: "18", + x: "", + tg: "", + web: "", + email: "", + file: null as File | null, + psbtFees: { + estMinerFee: 0, + totalDustValue: 0, + hasExactFees: false, + totalValue: 0, + est_tx_size: 0, + }, + }); + + const [isSubmitting, setIsSubmitting] = useState(false); + const [submissionMessage, setSubmissionMessage] = useState< + { + message: string; + txid?: string; + } | null + >(null); + + const { wallet } = walletContext; + + useEffect(() => { + if (fees) { + const recommendedFee = Math.round(fees.recommendedFee); + setFormState((prev) => ({ ...prev, fee: recommendedFee })); + } + }, [fees]); + + useEffect(() => { + const fetchPrice = async () => { + const price = await fetchBTCPriceInUSD(); + setFormState((prev) => ({ ...prev, BTCPrice: price })); + }; + fetchPrice(); + }, []); + + function validateFormState( + formState: SRC20FormState, + action: string, + ): { isValid: boolean; error?: string } { + // Basic validations + if (!formState.token) { + return { isValid: false, error: "Token is required" }; + } + if (formState.fee <= 0) { + return { isValid: false, error: "Fee must be greater than 0" }; + } + + // Action-specific validations + switch (action) { + case "deploy": + if (!formState.max || !formState.lim || !formState.dec) { + return { + isValid: false, + error: "Max, limit and decimals are required for deploy", + }; + } + try { + const maxValue = BigInt(formState.max); + const limValue = BigInt(formState.lim); + if (limValue > maxValue) { + return { isValid: false, error: "Limit cannot exceed max supply" }; + } + } catch { + return { isValid: false, error: "Invalid max or limit value" }; + } + break; + + case "transfer": + if (!formState.toAddress) { + return { isValid: false, error: "Recipient address is required" }; + } + if (!formState.amt) { + return { isValid: false, error: "Amount is required" }; + } + break; + + case "mint": + if (!formState.amt) { + return { isValid: false, error: "Amount is required" }; + } + break; + } + + return { isValid: true }; + } + + // Update the effect to use validation + useEffect(() => { + const { isValid, error } = validateFormState(formState, action); + + if (!wallet?.address || isSubmitting) return; + + if (!isValid) { + setFormState((prev) => ({ ...prev, apiError: error || "" })); + return; + } + + // Clear any previous errors + setFormState((prev) => ({ ...prev, apiError: "" })); + + // Prepare transaction only if all validations pass + SRC20FormController.prepareTx( + { + wallet, + formState, + action, + trxType, + canEstimateFees: () => isValid, + }, + { + setFormState, + logger, + }, + ); + + return () => { + SRC20FormController.cancelPrepareTx(); + }; + }, [ + wallet?.address, + formState.fee, + formState.token, + formState.amt, + formState.toAddress, + action, + trxType, + isSubmitting, + ]); + + const handleInputChange = (e: Event, field: string) => { + const value = (e.target as HTMLInputElement).value; + let newValue = value; + + if (field === "token") { + newValue = value.toUpperCase().slice(0, 5); + setFormState((prev) => ({ + ...prev, + [field]: newValue, + [`${field}Error`]: "", + })); + + // Only check token existence if we're deploying and have a value + if (action === "deploy" && newValue) { + return SRC20FormController.checkTokenExistence(newValue, setFormState); + } + return; // Add explicit return for token field + } + + if (["lim", "max"].includes(field)) { + newValue = handleIntegerInput(value, field); + } else if (field === "dec") { + newValue = handleDecimalInput(value); + } + + setFormState((prev) => ({ + ...prev, + [field]: newValue, + [`${field}Error`]: "", + })); + + return; // Add explicit return for all other cases + }; + + // Add blur handler + const handleInputBlur = (field: string) => { + if (["max", "lim"].includes(field)) { + try { + const maxValue = BigInt(formState.max || "0"); + const limValue = BigInt(formState.lim || "0"); + + if (maxValue > 0n && limValue > maxValue) { + // Only adjust lim if max is set and lim exceeds it + setFormState((prev) => ({ + ...prev, + lim: maxValue.toString(), + limError: + "Limit Per Mint cannot exceed Max Circulation. Adjusted to match Max Circulation.", + })); + } else if (maxValue > 0n && limValue > 0n) { + // Clear any errors if values are valid + setFormState((prev) => ({ + ...prev, + maxError: "", + limError: "", + })); + } + } catch { + // Invalid numbers, handled by handleIntegerInput + } + } + }; + + const handleIntegerInput = (value: string, field: string): string => { + const sanitizedValue = value.replace(/\D/g, ""); + if (sanitizedValue === "") return ""; + + try { + const bigIntValue = BigInt(sanitizedValue); + const maxUint64 = BigInt("18446744073709551615"); // 2^64 - 1 + + if (bigIntValue <= maxUint64) { + return sanitizedValue; + } else { + setFormState((prev) => ({ + ...prev, + [`${field}Error`]: "Value exceeds maximum allowed (2^64 - 1)", + })); + return value; + } + } catch { + return value; + } + }; + + const handleDecimalInput = (value: string): string => { + const sanitizedValue = value.replace(/\D/g, ""); + const numValue = parseInt(sanitizedValue, 10); + if ( + sanitizedValue === "" || + (!isNaN(numValue) && numValue >= 0 && numValue <= 18) + ) { + return sanitizedValue; + } + return formState.dec; + }; + + const handleSubmit = async () => { + try { + setIsSubmitting(true); + setApiError(""); + + // Log submission attempt + logger.debug("stamps", { + message: "Transaction submission started", + data: { + action, + trxType, + satsPerVB: formState.fee, + currentEstimate: formState.psbtFees, + }, + }); + + // Use the stored PSBT if available + if (formState.psbtFees?.hex && formState.psbtFees?.inputsToSign) { + const walletResult = await walletContext.signPSBT( + wallet, + formState.psbtFees.hex, + formState.psbtFees.inputsToSign, + true, + ); + + if (walletResult.signed) { + setSubmissionMessage({ + message: "Transaction broadcasted successfully.", + txid: walletResult.txid, + }); + } else if (walletResult.cancelled) { + setSubmissionMessage({ + message: "Transaction signing cancelled by user.", + }); + } else { + setSubmissionMessage({ + message: `Transaction signing failed: ${walletResult.error}`, + }); + } + + return walletResult; + } else { + // Create new PSBT only if we don't have one + const response = await axiod.post("/api/v2/src20/create", { + sourceAddress: wallet?.address, + toAddress: action === "transfer" + ? formState.toAddress + : wallet?.address, + satsPerVB: formState.fee, + trxType, + op: action, + tick: formState.token, + ...(action === "deploy" && { + max: formState.max, + lim: formState.lim, + dec: formState.dec, + ...(formState.x && { x: formState.x }), + ...(formState.tg && { tg: formState.tg }), + ...(formState.web && { web: formState.web }), + ...(formState.email && { email: formState.email }), + }), + ...(["mint", "transfer"].includes(action) && { + amt: formState.amt, + }), + }); + + // Log the PSBT response + logger.debug("stamps", { + message: "Transaction PSBT created", + data: { + estimatedSize: response.data.est_tx_size, + minerFee: response.data.est_miner_fee, + totalValue: response.data.input_value, + changeValue: response.data.change_value, + }, + }); + + if (!response.data?.hex) { + throw new Error("No transaction hex received from server"); + } + + logger.debug("ui", { + message: "Preparing to sign PSBT", + hexLength: response.data.hex.length, + inputsToSignCount: response.data.inputsToSign?.length, + }); + + const walletResult = await walletContext.signPSBT( + wallet, + response.data.hex, + response.data.inputsToSign || [], + true, + ); + + logger.debug("ui", { + message: "Wallet signing completed", + result: walletResult, + }); + + if (walletResult.signed) { + setSubmissionMessage({ + message: "Transaction broadcasted successfully.", + txid: walletResult.txid, + }); + } else if (walletResult.cancelled) { + setSubmissionMessage({ + message: "Transaction signing cancelled by user.", + }); + } else { + setSubmissionMessage({ + message: `Transaction signing failed: ${walletResult.error}`, + }); + } + + return response.data; + } + } catch (error) { + logger.error("ui", { + message: `${action} error occurred`, + error: error instanceof Error ? error.message : String(error), + details: error, + }); + + if (error instanceof Error) { + const apiError = (error as any).response?.data?.error; + setApiError( + apiError || error.message || "An unexpected error occurred", + ); + } else { + setApiError("An unexpected error occurred"); + } + } finally { + setIsSubmitting(false); + } + }; + + const handleChangeFee = (newFee: number) => { + setFormState((prev) => ({ ...prev, fee: newFee })); + }; + + return { + formState, + setFormState, + handleChangeFee, + handleInputChange, + handleSubmit, + fetchFees, + config, + isSubmitting, + submissionMessage, + setApiError, + apiError, + handleInputBlur, + }; +} diff --git a/client/hooks/useTransactionForm.ts b/client/hooks/useTransactionForm.ts new file mode 100644 index 000000000..c9269b64d --- /dev/null +++ b/client/hooks/useTransactionForm.ts @@ -0,0 +1,127 @@ +import { useEffect, useState } from "preact/hooks"; +import { useFeePolling } from "$client/hooks/useFeePolling.ts"; +import { fetchBTCPriceInUSD } from "$lib/utils/balanceUtils.ts"; +import { + showConnectWalletModal, + walletContext, +} from "$client/wallet/wallet.ts"; + +interface TransactionFormState { + fee: number; + feeError: string; + BTCPrice: number; + recipientAddress?: string; + addressError?: string; + amount?: string; + amountError?: string; +} + +interface UseTransactionFormProps { + type: "send" | "transfer" | "buy"; + initialFee?: number; +} + +export function useTransactionForm( + { type, initialFee = 0 }: UseTransactionFormProps, +) { + const { fees, loading: feeLoading, fetchFees } = useFeePolling(300000); + const [isSubmitting, setIsSubmitting] = useState(false); + const [error, setError] = useState(null); + const [successMessage, setSuccessMessage] = useState(null); + + const [formState, setFormState] = useState({ + fee: initialFee, + feeError: "", + BTCPrice: 0, + recipientAddress: "", + addressError: "", + amount: "", + amountError: "", + }); + + // Initialize with recommended fee + useEffect(() => { + if (fees?.recommendedFee) { + handleChangeFee(Math.round(fees.recommendedFee)); + } + }, [fees?.recommendedFee]); + + // Fetch BTC price + useEffect(() => { + const fetchPrice = async () => { + const price = await fetchBTCPriceInUSD(); + setFormState((prev) => ({ ...prev, BTCPrice: price })); + }; + fetchPrice(); + }, []); + + const handleChangeFee = (newFee: number) => { + setFormState((prev) => ({ + ...prev, + fee: newFee, + feeError: "", + })); + }; + + const validateForm = () => { + let isValid = true; + const newState = { ...formState }; + + if (formState.fee <= 0) { + newState.feeError = "Fee must be set"; + isValid = false; + } + + if (type !== "buy" && !formState.recipientAddress) { + newState.addressError = "Recipient address is required"; + isValid = false; + } + + if ((type === "send" || type === "transfer") && !formState.amount) { + newState.amountError = "Amount is required"; + isValid = false; + } + + setFormState(newState); + return isValid; + }; + + const handleSubmit = async (submitCallback: () => Promise) => { + if (!walletContext.isConnected) { + showConnectWalletModal.value = true; + return; + } + + setError(null); + setSuccessMessage(null); + + if (!validateForm()) { + return; + } + + setIsSubmitting(true); + + try { + await submitCallback(); + } catch (err) { + console.error("Transaction error:", err); + setError(err instanceof Error ? err.message : "An error occurred"); + } finally { + setIsSubmitting(false); + } + }; + + return { + formState, + setFormState, + handleChangeFee, + handleSubmit, + isSubmitting, + error, + setError, + successMessage, + setSuccessMessage, + fetchFees, + isLoading: feeLoading, + }; +} diff --git a/client/hooks/useURLUpdate.ts b/client/hooks/useURLUpdate.ts new file mode 100644 index 000000000..8a78e7cd2 --- /dev/null +++ b/client/hooks/useURLUpdate.ts @@ -0,0 +1,43 @@ +import { useCallback } from "preact/hooks"; +import { + COLLECTION_FILTER_TYPES, + SRC20_FILTER_TYPES, + STAMP_FILTER_TYPES, + WALLET_FILTER_TYPES, +} from "$globals"; + +type FilterTypes = + | STAMP_FILTER_TYPES + | SRC20_FILTER_TYPES + | COLLECTION_FILTER_TYPES + | WALLET_FILTER_TYPES; + +interface URLUpdateParams { + sortBy?: "ASC" | "DESC"; + filterBy?: FilterTypes[]; +} + +export function useURLUpdate() { + const updateURL = useCallback((params: URLUpdateParams) => { + if (typeof self === "undefined") return; + + const url = new URL(self.location.href); + + if (params.sortBy) url.searchParams.set("sortBy", params.sortBy); + + if (params.filterBy !== undefined) { + params.filterBy.length > 0 + ? url.searchParams.set("filterBy", params.filterBy.join(",")) + : url.searchParams.delete("filterBy"); + } + + url.searchParams.set("page", "1"); + + const event = new CustomEvent("fresh-navigate", { + detail: { url: url.toString() }, + }); + self.dispatchEvent(event); + }, []); + + return { updateURL }; +} diff --git a/client/utils/carousel-slider.ts b/client/utils/carousel-slider.ts new file mode 100644 index 000000000..fed10720d --- /dev/null +++ b/client/utils/carousel-slider.ts @@ -0,0 +1,375 @@ +import { Swiper } from "swiper"; +import { Autoplay, Pagination } from "swiper/modules"; +import type { Swiper as SwiperType } from "swiper"; + +type CarouselElement = HTMLElement | null; + +const CAROUSEL_CONFIG = { + // Base dimensions + SLIDES: { + COUNT: { + MOBILE: 3, // Show 3 slides on mobile + DESKTOP: 5, // Show 5 slides on desktop + }, + MAX_WIDTH: 432, // Maximum width of center slide + CONTAINER_WIDTH_RATIO: 0.35, + }, + + // Scale factors for each position + SCALE: { + CENTER: 1.0, // Center slide at 100% + ADJACENT: 0.8, // Adjacent slides at 80% + OUTER: 0.6, // Outer slides at 60% + }, + + // Overlap percentages + OVERLAP: { + ADJACENT: 0.5, // Adjacent slides overlap center by 50% + OUTER: 0.5, // Outer slides overlap adjacent by 60% + }, + + // Visual effects + EFFECTS: { + BLUR: { + CENTER: 0, // No blur on center + ADJACENT: 1, // Slight blur on adjacent + OUTER: 2, // More blur on outer + }, + OPACITY: { + CENTER: "1", // Full opacity for center + ADJACENT: "0.8", // Reduced for adjacent + OUTER: "0.6", // Most reduced for outer + }, + }, + + // Animation settings + ANIMATION: { + SPEED: 600, + AUTOPLAY: 3000, + }, + + DEBUG: { + ENABLED: false, + }, + + BREAKPOINTS: { + MOBILE_LG: 768, // Match mobileLg breakpoint + }, +} as const; + +const calculateDimensions = (containerWidth: number) => { + const baseWidth = Math.min( + CAROUSEL_CONFIG.SLIDES.MAX_WIDTH, + containerWidth * CAROUSEL_CONFIG.SLIDES.CONTAINER_WIDTH_RATIO, + ); + + const adjacentWidth = baseWidth * CAROUSEL_CONFIG.SCALE.ADJACENT; + const outerWidth = baseWidth * CAROUSEL_CONFIG.SCALE.OUTER; + + const adjacentVisible = adjacentWidth * + (1 - CAROUSEL_CONFIG.OVERLAP.ADJACENT); + const outerVisible = outerWidth * (1 - CAROUSEL_CONFIG.OVERLAP.OUTER); + + const totalWidth = baseWidth + + (2 * adjacentVisible) + + (2 * outerVisible); + const centerOffset = (containerWidth - totalWidth) / 2; + + const adjacentTranslate = baseWidth * 0.5; + const outerTranslate = baseWidth * 0.5 + adjacentWidth * 0.5; + + return { + baseWidth, + translations: { + adjacent: adjacentTranslate, + outer: outerTranslate, + }, + centerOffset, + }; +}; + +const calculateTranslateX = ( + distance: number, + baseWidth: number, + isMobile: boolean, +): number => { + if (distance === 0) return 0; + + const direction = distance > 0 ? 1 : -1; + if (isMobile) { + return direction * (baseWidth * 0.75); + } + + if (Math.abs(distance) === 1) { + return direction * (baseWidth * 0.5); + } + + if (Math.abs(distance) === 2) { + return direction * (baseWidth * 0.9); + } + + return 0; +}; + +const debug = (message: string, data?: unknown) => { + if (CAROUSEL_CONFIG.DEBUG.ENABLED) { + console.log(`Carousel Debug: ${message}`, data); + } +}; + +const calculateTransforms = ( + swiper: SwiperType, + _containerWidth: number, + isMobile: boolean, + baseWidth: number, + centerX: number, +) => { + return swiper.slides.map((slideEl, i) => { + let distance = i - swiper.activeIndex; + if (distance > swiper.slides.length / 2) distance -= swiper.slides.length; + if (distance < -swiper.slides.length / 2) distance += swiper.slides.length; + + const isCenter = distance === 0; + const isAdjacent = Math.abs(distance) === 1; + const shouldShow = isMobile + ? Math.abs(distance) <= 1 + : Math.abs(distance) <= 2; + + const scale = isCenter + ? CAROUSEL_CONFIG.SCALE.CENTER + : isAdjacent + ? CAROUSEL_CONFIG.SCALE.ADJACENT + : CAROUSEL_CONFIG.SCALE.OUTER; + + const translateX = calculateTranslateX(distance, baseWidth, isMobile); + const finalTranslateX = translateX - (baseWidth / 2); + + return { + el: slideEl, + transform: { + visibility: shouldShow ? "visible" : "hidden", + width: `${baseWidth}px`, + position: "absolute", + left: `${centerX}px`, + transform: `translateX(${finalTranslateX}px) scale(${scale})`, + zIndex: isCenter ? "3" : isAdjacent ? "2" : "1", + opacity: isCenter + ? CAROUSEL_CONFIG.EFFECTS.OPACITY.CENTER + : isAdjacent + ? CAROUSEL_CONFIG.EFFECTS.OPACITY.ADJACENT + : CAROUSEL_CONFIG.EFFECTS.OPACITY.OUTER, + filter: isCenter + ? "none" + : `blur(${ + isAdjacent + ? CAROUSEL_CONFIG.EFFECTS.BLUR.ADJACENT + : CAROUSEL_CONFIG.EFFECTS.BLUR.OUTER + }px)`, + }, + }; + }); +}; + +export default function createCarouselSlider( + el: CarouselElement, +): SwiperType | undefined { + if (!el) return undefined; + + const swiperEl = el.querySelector(".swiper") as HTMLElement; + if (!swiperEl) return undefined; + + const isMobile = + globalThis.innerWidth < CAROUSEL_CONFIG.BREAKPOINTS.MOBILE_LG; + + const swiper = new Swiper(swiperEl, { + modules: [Autoplay, Pagination], + slidesPerView: isMobile + ? CAROUSEL_CONFIG.SLIDES.COUNT.MOBILE + : CAROUSEL_CONFIG.SLIDES.COUNT.DESKTOP, + centeredSlides: true, + loop: true, + speed: CAROUSEL_CONFIG.ANIMATION.SPEED, + watchSlidesProgress: true, + allowTouchMove: true, + virtualTranslate: true, + initialSlide: 1, + autoplay: { + delay: CAROUSEL_CONFIG.ANIMATION.AUTOPLAY, + disableOnInteraction: false, + pauseOnMouseEnter: false, + waitForTransition: false, + enabled: true, + }, + + effect: "custom", + + pagination: { + el: ".swiper-pagination", + clickable: true, + renderBullet: function (_index, className) { + return '
'; + }, + }, + + on: { + beforeInit: function (swiper: SwiperType) { + swiper.params.cssMode = false; + swiper.wrapperEl.style.transform = "translate3d(0, 0, 0)"; + swiper.wrapperEl.style.width = "100%"; + swiper.wrapperEl.style.display = "flex"; + swiper.wrapperEl.style.justifyContent = "center"; + }, + + init: function (swiper: SwiperType) { + swiper.autoplay.start(); + + debug("Carousel Initialized:", { + autoplay: { + running: swiper.autoplay.running, + delay: CAROUSEL_CONFIG.ANIMATION.AUTOPLAY, + }, + slides: { + total: swiper.slides.length, + active: swiper.activeIndex, + }, + }); + }, + + autoplayStart: function (_swiper: SwiperType) { + debug("Autoplay Started", { running: true }); + }, + + autoplayStop: function (_swiper: SwiperType) { + debug("Autoplay Stopped", { running: false }); + }, + + setTranslate: function (swiper: SwiperType) { + swiper.wrapperEl.style.transform = "translate3d(0, 0, 0)"; + }, + + progress: function (swiper: SwiperType) { + const containerWidth = el.offsetWidth; + const isMobile = containerWidth < CAROUSEL_CONFIG.BREAKPOINTS.MOBILE_LG; + const { baseWidth } = calculateDimensions(containerWidth); + const centerX = containerWidth / 2; + + const transforms = calculateTransforms( + swiper, + containerWidth, + isMobile, + baseWidth, + centerX, + ); + + requestAnimationFrame(() => { + transforms.forEach(({ el, transform }) => { + Object.assign(el.style, transform); + }); + }); + }, + + afterInit: function (swiper: SwiperType) { + setTimeout(() => { + swiper.autoplay.start(); + debug("Autoplay Started After Init", { + running: swiper.autoplay.running, + time: Date.now(), + }); + }, 100); + }, + + autoplay: function (swiper: SwiperType) { + const currentIndex = swiper.activeIndex; + const nextIndex = (currentIndex + 1) % swiper.slides.length; + + debug("Autoplay Moving:", { + current: currentIndex, + next: nextIndex, + time: Date.now(), + }); + + requestAnimationFrame(() => { + swiper.slideTo(nextIndex, CAROUSEL_CONFIG.ANIMATION.SPEED); + }); + }, + + slideChange: function (swiper: SwiperType) { + debug("Slide Changed:", { + activeIndex: swiper.activeIndex, + realIndex: swiper.realIndex, + autoplayRunning: swiper.autoplay.running, + time: Date.now(), + }); + const paginationBullets = document.querySelectorAll( + ".swiper-pagination .swiper-pagination-bullet", + ); + + const visibleSlides = isMobile + ? CAROUSEL_CONFIG.SLIDES.COUNT.MOBILE + : CAROUSEL_CONFIG.SLIDES.COUNT.DESKTOP; + + paginationBullets.forEach((bullet, index) => { + if (swiper.realIndex >= visibleSlides) { + if ( + index >= visibleSlides + ) { + bullet.style.display = "block"; + } else { + bullet.style.display = "none"; + } + } else { + if (index >= visibleSlides) { + bullet.style.display = "none"; + } else { + bullet.style.display = "block"; + } + } + }); + + if (!swiper.autoplay.running) { + swiper.autoplay.start(); + } + }, + }, + }); + + swiper.autoplay.start(); + + const autoplayMonitor = setInterval(() => { + if (!swiper.autoplay.running) { + debug("Autoplay Monitor - Restarting"); + swiper.autoplay.start(); + } + }, 1000); + + swiper.on("destroy", () => { + clearInterval(autoplayMonitor); + }); + + return swiper; +} +// Let me clearly state the design requirements and then propose a solution: +// Design Requirements: +// Center Image (Active): +// Maximum width/height of 408px and should scale up and down responsively +// Centered in the container/viewport +// Full opacity, no blur +// Scale: 1.0 (100%) +// Adjacent Images (Left and Right of Center): +// Scale: 0.8 (80% of center = ~326px) +// 50% overlapped behind the center image +// Slight blur effect +// Reduced opacity (0.8) +// One on each side of center image +// Outer Images (Furthest Left and Right): +// Scale: 0.6 (60% of center = ~245px) +// 60% overlapped behind adjacent images +// More blur effect +// More reduced opacity (0.6) +// One on each side of adjacent images +// Layout: +// Always show exactly 5 images +// Images should be arranged: Outer -> Adjacent -> Center -> Adjacent -> Outer +// Center image should be truly centered in the container +// Container should respect page margins and max-width diff --git a/client/wallet/leather.ts b/client/wallet/leather.ts new file mode 100644 index 000000000..da45043ef --- /dev/null +++ b/client/wallet/leather.ts @@ -0,0 +1,261 @@ +import { signal } from "@preact/signals"; +import { walletContext } from "./wallet.ts"; +import { SignPSBTResult, Wallet } from "$types/index.d.ts"; +import { logger } from "$lib/utils/logger.ts"; +import { checkWalletAvailability, getGlobalWallets } from "./wallet.ts"; +import { handleWalletError } from "./walletHelper.ts"; +import { getBTCBalanceInfo } from "$lib/utils/balanceUtils.ts"; + +interface LeatherAddress { + symbol: "BTC" | "STX"; + type?: "p2wpkh" | "p2tr"; + address: string; + publicKey: string; + derivationPath?: string; + tweakedPublicKey?: string; +} + +type AddToastFunction = (message: string, type: string) => void; + +export const isLeatherInstalled = signal(false); + +export const checkLeather = () => { + const isAvailable = checkWalletAvailability("leather"); + isLeatherInstalled.value = isAvailable; + return isAvailable; +}; + +export const connectLeather = async (addToast: AddToastFunction) => { + try { + const isInstalled = checkLeather(); + if (!isInstalled) { + logger.warn("ui", { + message: "Leather wallet not detected", + }); + addToast( + "Leather wallet not detected. Please install the Leather extension.", + "error", + ); + return; + } + + logger.debug("ui", { + message: "Connecting to Leather wallet", + }); + + const leatherProvider = getProvider(); + const response = await leatherProvider.request("getAddresses"); + + let addresses; + if ( + response?.result?.addresses && Array.isArray(response.result.addresses) + ) { + addresses = response.result.addresses; + } else { + throw new Error("Invalid response format from getAddresses"); + } + + if (!addresses || addresses.length === 0) { + throw new Error("No addresses received from Leather wallet"); + } + + await handleConnect(addresses); + logger.info("ui", { + message: "Successfully connected to Leather wallet", + }); + addToast("Successfully connected to Leather wallet", "success"); + } catch (error) { + logger.error("ui", { + message: "Error connecting to Leather wallet", + error: error instanceof Error ? error.message : String(error), + details: error, + }); + addToast( + `Failed to connect to Leather wallet: ${ + error instanceof Error ? error.message : "Unknown error" + }`, + "error", + ); + } +}; + +export const handleConnect = async (addresses: LeatherAddress[]) => { + if (!addresses || addresses.length === 0) { + throw new Error("No addresses received from Leather wallet"); + } + + // Prioritize p2wpkh (Native SegWit) address, but also allow p2tr (Taproot) as fallback + const btcAddress = addresses.find((addr) => + addr.symbol === "BTC" && (addr.type === "p2wpkh" || addr.type === "p2tr") + ); + + if (!btcAddress) { + throw new Error( + "No compatible BTC address found in the received addresses", + ); + } + + console.log(`Using BTC address type: ${btcAddress.type}`); + + const _wallet = {} as Wallet; + _wallet.address = btcAddress.address; + _wallet.accounts = [btcAddress.address]; + _wallet.publicKey = btcAddress.publicKey; + _wallet.addressType = btcAddress.type || "p2wpkh"; + + const addressInfo = await getBTCBalanceInfo(btcAddress.address); + + _wallet.btcBalance = { + confirmed: addressInfo?.balance ?? 0, + unconfirmed: addressInfo?.unconfirmedBalance ?? 0, + total: (addressInfo?.balance ?? 0) + (addressInfo?.unconfirmedBalance ?? 0), + }; + + _wallet.network = "mainnet"; + _wallet.provider = "leather"; + + walletContext.updateWallet(_wallet); +}; + +const signMessage = async (message: string) => { + const leatherProvider = getProvider(); + if (typeof leatherProvider === "undefined") { + throw new Error("Leather wallet not connected"); + } + + console.log("Leather wallet signing message:", message); + try { + const { signature } = await leatherProvider.request( + "signMessage", + { + message, + paymentType: "p2wpkh", + }, + ); + console.log("Leather wallet signature result:", signature); + return signature; + } catch (error) { + console.error("Error signing message with Leather wallet:", error); + throw error; + } +}; + +interface LeatherSignPSBTResponse { + error?: string; + result?: { + hex?: string; // Signed PSBT in hex format + txid?: string; // Transaction ID if broadcast + cancelled?: boolean; + }; +} + +export const signPSBT = async ( + psbtHex: string, + inputsToSign: { index: number }[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, +): Promise => { + logger.debug("ui", { + message: "Entering Leather signPSBT function", + data: { + psbtHexLength: psbtHex.length, + inputsCount: inputsToSign.length, + enableRBF, + autoBroadcast, + }, + }); + + const leatherProvider = getProvider(); + if (typeof leatherProvider === "undefined") { + logger.error("ui", { + message: "Leather wallet not connected", + }); + return { signed: false, error: "Leather wallet not connected" }; + } + + try { + const requestParams = { + hex: psbtHex, + network: "mainnet", + broadcast: autoBroadcast, + inputsToSign: inputsToSign || undefined, + rbf: enableRBF, + sighashTypes: sighashTypes || undefined, + }; + + logger.debug("ui", { + message: "Calling Leather provider signPsbt method", + data: requestParams, + }); + + const result = await leatherProvider.request( + "signPsbt", + requestParams, + ) as LeatherSignPSBTResponse; + + logger.debug("ui", { + message: "Leather signPsbt result received", + data: result, + }); + + if (!result) { + return { signed: false, error: "No result from Leather wallet" }; + } + + // Check for user cancellation + if (result.result?.cancelled) { + return { signed: false, cancelled: true }; + } + + // Check for error + if (result.error) { + return { signed: false, error: result.error }; + } + + if (result.result) { + if (result.result.hex) { + logger.info("ui", { + message: "PSBT signed successfully", + data: { hasHex: true, hasTxid: false }, + }); + return { signed: true, psbt: result.result.hex }; + } + if (result.result.txid) { + logger.info("ui", { + message: "PSBT signed and broadcast successfully", + data: { hasHex: false, hasTxid: true, txid: result.result.txid }, + }); + return { signed: true, txid: result.result.txid }; + } + } + + logger.error("ui", { + message: "Unexpected result format from Leather wallet", + data: result, + }); + return { + signed: false, + error: "Unexpected result format from Leather wallet", + }; + } catch (error: unknown) { + logger.error("ui", { + message: "Error in Leather signPSBT", + error: error instanceof Error ? error.message : String(error), + details: error, + }); + return handleWalletError(error, "Leather"); + } +}; + +export const leatherProvider = { + checkLeather, + connectLeather, + signMessage, + signPSBT, +}; + +const getProvider = () => { + const wallets = getGlobalWallets(); + return wallets.LeatherProvider; +}; diff --git a/client/wallet/okx.ts b/client/wallet/okx.ts new file mode 100644 index 000000000..60753ea1a --- /dev/null +++ b/client/wallet/okx.ts @@ -0,0 +1,222 @@ +import { signal } from "@preact/signals"; +import { walletContext } from "./wallet.ts"; +import { SignPSBTResult, Wallet } from "$types/index.d.ts"; +import { logger } from "$lib/utils/logger.ts"; +import { checkWalletAvailability, getGlobalWallets } from "./wallet.ts"; +import { handleWalletError } from "./walletHelper.ts"; + +export const isOKXInstalled = signal(false); + +export const connectOKX = async ( + addToast: (message: string, type: string) => void, +) => { + try { + const okx = (globalThis as any).okxwallet; + if (!okx) { + logger.error("ui", { + message: "OKX wallet not detected", + context: "connectOKX", + }); + addToast( + "OKX wallet not detected. Please install the OKX extension.", + "error", + ); + return; + } + await okx.bitcoin.requestAccounts(); + await handleAccountsChanged(); + logger.info("ui", { + message: "Successfully connected to OKX wallet", + context: "connectOKX", + }); + addToast("Successfully connected to OKX wallet", "success"); + } catch (error: unknown) { + logger.error("ui", { + message: "Failed to connect to OKX wallet", + context: "connectOKX", + error: error instanceof Error ? error.message : String(error), + }); + addToast( + `Failed to connect to OKX wallet: ${ + error instanceof Error ? error.message : "Unknown error" + }`, + "error", + ); + } +}; + +export const checkOKX = () => { + const isAvailable = checkWalletAvailability("okx"); + isOKXInstalled.value = isAvailable; + return isAvailable; +}; + +const getProvider = () => { + const wallets = getGlobalWallets(); + return wallets.okxwallet; +}; + +const handleAccountsChanged = async () => { + const okx = (globalThis as any).okxwallet; + if (!okx || !okx.bitcoin) { + logger.error("ui", { + message: "OKX wallet not connected", + context: "handleAccountsChanged", + }); + return; + } + + try { + const accounts: string[] = await okx.bitcoin.getAccounts(); + if (!accounts || accounts.length === 0) { + logger.error("ui", { + message: "No accounts found in OKX wallet", + context: "handleAccountsChanged", + }); + walletContext.disconnect(); + return; + } + + const address = accounts[0]; + const balanceInfo = await okx.bitcoin.getBalance(); + const publicKey = await okx.bitcoin.getPublicKey(); + + logger.debug("ui", { + message: "Fetched OKX wallet information", + context: "handleAccountsChanged", + address, + balanceInfo, + }); + + const _wallet: Wallet = { + address, + accounts, + publicKey, + btcBalance: { + confirmed: balanceInfo.confirmed, + unconfirmed: balanceInfo.unconfirmed, + total: balanceInfo.total, + }, + network: "mainnet", + provider: "okx", + stampBalance: [], + }; + + logger.info("ui", { + message: "Updated wallet information", + context: "handleAccountsChanged", + wallet: _wallet, + }); + + walletContext.updateWallet(_wallet); + } catch (error: unknown) { + logger.error("ui", { + message: "Error fetching account from OKX wallet", + context: "handleAccountsChanged", + error: error instanceof Error ? error.message : String(error), + }); + } +}; + +const signMessage = async (message: string) => { + const okx = (globalThis as any).okxwallet; + if (!okx || !okx.bitcoin) { + throw new Error("OKX wallet not connected"); + } + console.log("OKX wallet signing message:", message); + try { + const signature = await okx.bitcoin.signMessage(message); + console.log("OKX wallet signature result:", signature); + return signature; + } catch (error) { + console.error("Error signing message with OKX wallet:", error); + throw error; + } +}; + +const signPSBT = async ( + psbtHex: string, + inputsToSign: { index: number }[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, +): Promise => { + const okx = getProvider(); + if (!okx) { + return { signed: false, error: "OKX wallet not connected" }; + } + try { + logger.debug("ui", { + message: "Signing PSBT with OKX", + data: { + psbtHexLength: psbtHex.length, + inputsToSign, + enableRBF, + autoBroadcast, + }, + }); + + const options: any = { + autoFinalized: true, + enableRBF, // Add RBF support + }; + + if (inputsToSign?.length > 0) { + options.toSignInputs = inputsToSign.map((input, idx) => ({ + index: input.index, + sighashTypes: sighashTypes ? [sighashTypes[idx]] : undefined, + })); + } + + const signedPsbtHex = await okx.bitcoin.signPsbt(psbtHex, options); + + logger.debug("ui", { + message: "OKX signPsbt result", + data: { signedPsbtHex }, + }); + + if (!signedPsbtHex) { + return { signed: false, error: "No result from OKX wallet" }; + } + + if (autoBroadcast) { + try { + const txid = await okx.bitcoin.pushPsbt(signedPsbtHex); + logger.info("ui", { + message: "Successfully broadcast transaction", + data: { txid }, + }); + return { signed: true, txid }; + } catch (_broadcastError) { + return { + signed: true, + psbt: signedPsbtHex, + error: "Transaction signed but broadcast failed", + }; + } + } + + return { signed: true, psbt: signedPsbtHex }; + } catch (error: unknown) { + return handleWalletError(error, "OKX"); + } +}; + +const broadcastRawTX = async (rawTx: string) => { + const okx = (globalThis as any).okxwallet; + return await okx.bitcoin.pushTx(rawTx); +}; + +const broadcastPSBT = async (psbtHex: string) => { + const okx = (globalThis as any).okxwallet; + return await okx.bitcoin.pushPsbt(psbtHex); +}; + +export const okxProvider = { + checkOKX, + connectOKX, + signMessage, + signPSBT, + broadcastRawTX, + broadcastPSBT, +}; diff --git a/client/wallet/phantom.ts b/client/wallet/phantom.ts new file mode 100644 index 000000000..3b9542c5c --- /dev/null +++ b/client/wallet/phantom.ts @@ -0,0 +1,197 @@ +import { signal } from "@preact/signals"; +import { walletContext } from "./wallet.ts"; +import { SignPSBTResult, Wallet } from "$types/index.d.ts"; +import { checkWalletAvailability, getGlobalWallets } from "./wallet.ts"; +import { handleWalletError } from "./walletHelper.ts"; +import { getBTCBalanceInfo } from "$lib/utils/balanceUtils.ts"; +import { logger } from "$lib/utils/logger.ts"; +import { broadcastTransaction } from "$lib/utils/minting/broadcast.ts"; + +export const isPhantomInstalled = signal(false); + +export const connectPhantom = async ( + addToast: (message: string, type: string) => void, +) => { + try { + const provider = getProvider(); + if (!provider) { + addToast( + "Phantom wallet not detected. Please install the Phantom extension.", + "error", + ); + return; + } + const accounts = await provider.requestAccounts(); + await handleAccountsChanged(accounts); + addToast("Successfully connected to Phantom wallet", "success"); + } catch (error: unknown) { + addToast( + `Failed to connect to Phantom wallet: ${ + error instanceof Error ? error.message : "Unknown error" + }`, + "error", + ); + } +}; + +const getProvider = () => { + const wallets = getGlobalWallets(); + return wallets.phantom?.bitcoin; +}; + +export const checkPhantom = () => { + const isAvailable = checkWalletAvailability("phantom"); + isPhantomInstalled.value = isAvailable; + return isAvailable; +}; + +const handleAccountsChanged = async (accounts: any[]) => { + if (accounts.length === 0) { + walletContext.disconnect(); + return; + } + + const _wallet = {} as Wallet; + _wallet.address = accounts[0]?.address; + _wallet.accounts = accounts.map((acc) => acc.address); + _wallet.publicKey = accounts[0]?.publicKey; + + if (_wallet.address) { + const addressInfo = await getBTCBalanceInfo(_wallet.address); + + _wallet.btcBalance = { + confirmed: addressInfo?.balance ?? 0, + unconfirmed: addressInfo?.unconfirmedBalance ?? 0, + total: (addressInfo?.balance ?? 0) + + (addressInfo?.unconfirmedBalance ?? 0), + }; + } + + _wallet.network = "mainnet"; + _wallet.provider = "phantom"; + + walletContext.updateWallet(_wallet); +}; + +const signMessage = async (message: string) => { + const provider = getProvider(); + if (!provider) { + throw new Error("Phantom wallet not connected"); + } + console.log("Phantom wallet signing message:", message); + try { + const result = await provider.signMessage( + new TextEncoder().encode(message), + ); + console.log("Phantom wallet signature result:", result); + return btoa(String.fromCharCode(...new Uint8Array(result.signature))); + } catch (error) { + console.error("Error signing message with Phantom wallet:", error); + throw error; + } +}; + +const hexToUint8Array = (hex: string): Uint8Array => { + if (hex.length % 2 !== 0) { + throw new Error("Invalid hex string"); + } + const array = new Uint8Array(hex.length / 2); + for (let i = 0; i < array.length; i++) { + const byte = hex.substr(i * 2, 2); + array[i] = parseInt(byte, 16); + } + return array; +}; + +const uint8ArrayToHex = (bytes: Uint8Array): string => { + return Array.from(bytes) + .map((byte) => byte.toString(16).padStart(2, "0")) + .join(""); +}; + +const signPSBT = async ( + psbtHex: string, + inputsToSign: { index: number; address?: string; sighashType?: number }[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, +): Promise => { + const provider = getProvider(); + if (!provider) { + return { signed: false, error: "Phantom wallet not connected" }; + } + + try { + logger.debug("ui", { + message: "Signing PSBT with Phantom", + data: { + psbtHexLength: psbtHex.length, + inputsToSign, + enableRBF, + autoBroadcast, + }, + }); + + const psbtBuffer = hexToUint8Array(psbtHex); + const inputsToSignArray = inputsToSign?.map((input) => ({ + address: input.address || walletContext.wallet.address, + signingIndexes: [input.index], + sigHash: input.sighashType || sighashTypes?.[0], + })); + + const result = await provider.signPSBT(psbtBuffer, { + inputsToSign: inputsToSignArray, + enableRBF, + }); + + logger.debug("ui", { + message: "Phantom signPSBT result", + data: { result }, + }); + + if (!result) { + return { signed: false, error: "No result from Phantom wallet" }; + } + + const signedPsbtHex = uint8ArrayToHex(result); + + if (autoBroadcast) { + try { + const txid = await broadcastTransaction(signedPsbtHex); + logger.debug("ui", { + message: "Transaction broadcast successful", + data: { txid }, + }); + return { + signed: true, + psbt: signedPsbtHex, + txid, + broadcast: true, + }; + } catch (broadcastError) { + logger.error("ui", { + message: "Transaction broadcast failed", + error: broadcastError, + }); + return { + signed: true, + psbt: signedPsbtHex, + error: + `Transaction signed but broadcast failed: ${broadcastError.message}`, + broadcast: false, + }; + } + } + + return { signed: true, psbt: signedPsbtHex }; + } catch (error: unknown) { + return handleWalletError(error, "Phantom"); + } +}; + +export const phantomProvider = { + checkPhantom, + connectPhantom, + signMessage, + signPSBT, +}; diff --git a/client/wallet/tapwallet.ts b/client/wallet/tapwallet.ts new file mode 100644 index 000000000..df0ecb462 --- /dev/null +++ b/client/wallet/tapwallet.ts @@ -0,0 +1,181 @@ +import { signal } from "@preact/signals"; +import { walletContext } from "./wallet.ts"; +import { SignPSBTResult, Wallet } from "$types/index.d.ts"; +import { checkWalletAvailability } from "./wallet.ts"; +import { handleWalletError } from "./walletHelper.ts"; +import { logger } from "$lib/utils/logger.ts"; + +export const isTapWalletInstalled = signal(false); + +export const connectTapWallet = async ( + addToast: (message: string, type: "error" | "success") => void, +) => { + try { + const tapwallet = (globalThis as any).tapwallet; + if (!tapwallet) { + addToast( + "TapWallet not detected. Please install the TapWallet extension.", + "error", + ); + return; + } + const accounts = await tapwallet.requestAccounts(); + await handleAccountsChanged(accounts); + addToast("Successfully connected to TapWallet", "success"); + } catch (error) { + if (error instanceof Error) { + addToast(`Failed to connect to TapWallet: ${error.message}`, "error"); + } else { + addToast("Failed to connect to TapWallet", "error"); + } + } +}; + +export const checkTapWallet = () => { + const isAvailable = checkWalletAvailability("tapwallet"); + isTapWalletInstalled.value = isAvailable; + return isAvailable; +}; + +const handleAccountsChanged = async (accounts: string[]) => { + if (accounts.length === 0) { + walletContext.disconnect(); + return; + } + + const tapwallet = (globalThis as any).tapwallet; + const _wallet = {} as Wallet; + _wallet.address = accounts[0]; + _wallet.accounts = accounts; + + const publicKey = await tapwallet.getPublicKey(); + _wallet.publicKey = publicKey; + + const balance = await tapwallet.getBalance(); + _wallet.btcBalance = { + confirmed: balance.confirmed, + unconfirmed: balance.unconfirmed, + total: balance.confirmed + balance.unconfirmed, + }; + + _wallet.network = await tapwallet.getNetwork(); + _wallet.provider = "tapwallet"; + + walletContext.updateWallet(_wallet); +}; + +const signMessage = async (message: string) => { + const tapwallet = (globalThis as any).tapwallet; + if (!tapwallet) { + throw new Error("TapWallet not connected"); + } + + try { + logger.debug("ui", { + message: "TapWallet signing message", + data: { messageLength: message.length }, + }); + + const signature = await tapwallet.signMessage(message); + + logger.debug("ui", { + message: "TapWallet signature result", + data: { signature }, + }); + + return signature; + } catch (error) { + logger.error("ui", { + message: "Error signing message with TapWallet", + error: error instanceof Error ? error.message : String(error), + }); + throw error; + } +}; + +const signPSBT = async ( + psbtHex: string, + inputsToSign: { index: number }[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, +): Promise => { + const tapwallet = (globalThis as any).tapwallet; + if (!tapwallet) { + return { signed: false, error: "TapWallet not connected" }; + } + + try { + logger.debug("ui", { + message: "Signing PSBT with TapWallet", + data: { + psbtHexLength: psbtHex.length, + inputsToSign, + enableRBF, + autoBroadcast, + }, + }); + + const options: any = { + enableRBF, + }; + + if (inputsToSign?.length > 0) { + options.inputsToSign = inputsToSign.map((input) => ({ + index: input.index, + sighashTypes: sighashTypes || undefined, + })); + } + + const signedPsbtHex = await tapwallet.signPsbt(psbtHex, options); + + logger.debug("ui", { + message: "TapWallet signPsbt result", + data: { signedPsbtHex }, + }); + + if (!signedPsbtHex) { + return { signed: false, error: "No result from TapWallet" }; + } + + if (autoBroadcast) { + try { + const txid = await tapwallet.pushPsbt(signedPsbtHex); + logger.info("ui", { + message: "Successfully broadcast transaction", + data: { txid }, + }); + return { signed: true, txid }; + } catch (_broadcastError) { + return { + signed: true, + psbt: signedPsbtHex, + error: "Transaction signed but broadcast failed", + }; + } + } + + return { signed: true, psbt: signedPsbtHex }; + } catch (error: unknown) { + return handleWalletError(error, "TapWallet"); + } +}; + +const broadcastRawTX = async (rawTx: string) => { + const tapwallet = (globalThis as any).tapwallet; + return await tapwallet.pushTx({ rawtx: rawTx }); +}; + +const broadcastPSBT = async (psbtHex: string) => { + const tapwallet = (globalThis as any).tapwallet; + return await tapwallet.pushPsbt(psbtHex); +}; + +export const tapWalletProvider = { + checkTapWallet, + connectTapWallet, + signMessage, + signPSBT, + broadcastRawTX, + broadcastPSBT, +}; diff --git a/client/wallet/unisat.ts b/client/wallet/unisat.ts new file mode 100644 index 000000000..2fd0847b3 --- /dev/null +++ b/client/wallet/unisat.ts @@ -0,0 +1,137 @@ +import { signal } from "@preact/signals"; +import { walletContext } from "./wallet.ts"; +import { SignPSBTResult, Wallet } from "$types/index.d.ts"; +import { checkWalletAvailability, getGlobalWallets } from "./wallet.ts"; +import { handleWalletError } from "./walletHelper.ts"; +import { logger } from "$lib/utils/logger.ts"; +export const isUnisatInstalled = signal(false); + +export const checkUnisat = () => { + const isAvailable = checkWalletAvailability("unisat"); + isUnisatInstalled.value = isAvailable; + return isAvailable; +}; + +const getProvider = () => { + const wallets = getGlobalWallets(); + return wallets.unisat; +}; + +export const connectUnisat = async ( + addToast: (message: string, type: "error" | "success") => void, +) => { + const unisat = getProvider(); + if (!unisat) { + addToast("Unisat not installed", "error"); + return; + } + const result = await unisat.requestAccounts(); + handleAccountsChanged(result); + addToast("Connected using Unisat wallet", "success"); +}; + +const handleAccountsChanged = async (_accounts: string[]) => { + console.log("handleAccountsChanged", _accounts); + if (walletContext.wallet.address === _accounts[0]) { + return; + } + if (_accounts.length === 0) { + walletContext.disconnect(); + return; + } + const _wallet = {} as Wallet; + const unisat = getProvider(); + _wallet.accounts = _accounts; + const address = _accounts[0]; + _wallet.address = address; + const publicKey = await unisat.getPublicKey(); + _wallet.publicKey = publicKey; + const balance = await unisat.getBalance(); + _wallet.btcBalance = { + confirmed: balance.confirmed, + unconfirmed: balance.unconfirmed, + total: balance.confirmed + balance.unconfirmed, + }; + _wallet.network = "mainnet"; + _wallet.provider = "unisat"; + walletContext.updateWallet(_wallet); +}; + +const unisat = getProvider(); +unisat?.on("accountsChanged", handleAccountsChanged); + +export const signPSBT = async ( + psbtHex: string, + inputsToSign: { index: number }[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, +): Promise => { + try { + const unisat = getProvider(); + if (!unisat) { + return { signed: false, error: "Unisat wallet not connected" }; + } + + logger.debug("ui", { + message: "Signing PSBT with Unisat", + data: { + psbtHexLength: psbtHex.length, + inputsToSign, + enableRBF, + autoBroadcast, + }, + }); + + const unisatOptions: any = { + autoFinalized: true, + enableRBF, // Note: Check if Unisat supports RBF in their options + }; + + if (inputsToSign?.length > 0) { + unisatOptions.toSignInputs = inputsToSign.map((input) => ({ + index: input.index, + address: walletContext.wallet.address, + sighashTypes: sighashTypes, + })); + } + + const signedPsbtHex = await unisat.signPsbt(psbtHex, unisatOptions); + + logger.debug("ui", { + message: "Unisat signPsbt result", + data: { signedPsbtHex }, + }); + + if (!signedPsbtHex) { + return { signed: false, error: "No result from Unisat wallet" }; + } + + if (autoBroadcast) { + try { + const txid = await unisat.pushPsbt(signedPsbtHex); + logger.info("ui", { + message: "Successfully broadcast transaction", + data: { txid }, + }); + return { signed: true, txid }; + } catch (_broadcastError) { + return { + signed: true, + psbt: signedPsbtHex, + error: "Transaction signed but broadcast failed", + }; + } + } + + return { signed: true, psbt: signedPsbtHex }; + } catch (error: unknown) { + return handleWalletError(error, "Unisat"); + } +}; + +// Export the provider +export const unisatProvider = { + connectUnisat, + signPSBT, +}; diff --git a/client/wallet/wallet.ts b/client/wallet/wallet.ts new file mode 100644 index 000000000..30864ddda --- /dev/null +++ b/client/wallet/wallet.ts @@ -0,0 +1,213 @@ +import { signal } from "@preact/signals"; +import { logger } from "$lib/utils/logger.ts"; + +import { Wallet } from "$types/index.d.ts"; +import { + broadcastPSBT, + broadcastRawTX, + signMessage, + signPSBT, +} from "./walletHelper.ts"; + +// Add this at the very top of the file, before any imports +declare global { + interface Window { + __DEBUG?: { + namespaces: string; + enabled: boolean; + }; + } +} + +interface GlobalWithDebug { + __DEBUG?: { + namespaces: string; + enabled: boolean; + }; +} + +// Move interfaces and variables to the top +interface WalletProviders { + LeatherProvider?: any; + okxwallet?: any; + unisat?: any; + tapwallet?: any; + phantom?: any; +} + +interface WalletContext { + readonly wallet: Wallet; + readonly isConnected: boolean; + updateWallet: (wallet: Wallet) => void; + getBasicStampInfo: (address: string) => Promise; + disconnect: () => void; + signMessage: (message: string) => Promise; + signPSBT: ( + wallet: Wallet, + psbt: string, + inputsToSign: any[], + enableRBF?: boolean, + sighashTypes?: number[], + autoBroadcast?: boolean, + ) => Promise; + broadcastRawTX: (rawTx: string) => Promise; + broadcastPSBT: (psbtHex: string) => Promise; + showConnectModal: () => void; +} + +// Initialize wallet state +export const initialWallet: Wallet = { + address: undefined, + publicKey: undefined, + accounts: [], + btcBalance: { + confirmed: 0, + unconfirmed: 0, + total: 0, + }, + stampBalance: [], + type: undefined, + provider: undefined, + network: undefined, + addressType: undefined, +}; + +let initialWalletState; +let initialConnected = false; +try { + const savedWallet = localStorage.getItem("wallet"); + initialConnected = savedWallet ? true : false; + initialWalletState = savedWallet ? JSON.parse(savedWallet) : initialWallet; +} catch (error) { + console.error("Error reading the wallet state:", error); + initialWalletState = initialWallet; +} + +export const walletSignal = signal(initialWalletState); +export const isConnectedSignal = signal(initialConnected); +export const showConnectWalletModal = signal(false); + +export const updateWallet = (_wallet: Wallet) => { + walletSignal.value = _wallet; + localStorage.setItem("wallet", JSON.stringify(_wallet)); + isConnectedSignal.value = true; +}; + +export const disconnect = () => { + walletSignal.value = initialWallet; + isConnectedSignal.value = false; + localStorage.removeItem("wallet"); +}; + +export const getBasicStampInfo = async (address: string) => { + const response = await fetch( + `/api/v2/balance/getStampsBalance?address=${encodeURIComponent(address)}`, + { method: "GET" }, + ); + const { stampBalance } = await response.json(); + return { stampBalance }; +}; + +// Wallet context with all functionality +export const walletContext: WalletContext = { + get wallet() { + return walletSignal.value; + }, + get isConnected() { + return isConnectedSignal.value; + }, + updateWallet, + disconnect, + getBasicStampInfo, + signMessage: async (message: string) => { + return await signMessage(walletContext.wallet, message); + }, + signPSBT: async ( + wallet: Wallet, + psbt: string, + inputsToSign: any[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, + ) => { + return await signPSBT( + wallet, + psbt, + inputsToSign, + enableRBF, + sighashTypes, + autoBroadcast, + ); + }, + broadcastRawTX: async (rawTx: string) => { + return await broadcastRawTX(walletContext.wallet, rawTx); + }, + broadcastPSBT: async (psbtHex: string) => { + return await broadcastPSBT(walletContext.wallet, psbtHex); + }, + showConnectModal: () => { + showConnectWalletModal.value = true; + }, +}; + +// Provider checking functions +export function getGlobalWallets(): WalletProviders { + // Skip provider checks if we're not in a browser context + if (typeof globalThis === "undefined" || !("document" in globalThis)) { + return {}; + } + + // Only log wallet availability on client-side + const global = globalThis as unknown as { + LeatherProvider?: unknown; + okxwallet?: { bitcoin?: unknown }; + unisat?: unknown; + tapwallet?: unknown; + phantom?: { bitcoin?: { isPhantom?: boolean } }; + }; + + logger.debug("ui", { + message: "Checking wallet providers (client-side)", + data: { + hasLeather: Boolean(global.LeatherProvider), + hasOKX: Boolean(global.okxwallet?.bitcoin), + hasUnisat: Boolean(global.unisat), + hasTapWallet: Boolean(global.tapwallet), + hasPhantom: Boolean(global.phantom?.bitcoin?.isPhantom), + timestamp: new Date().toISOString(), + }, + }); + + return { + LeatherProvider: global.LeatherProvider, + okxwallet: global.okxwallet, + unisat: global.unisat, + tapwallet: global.tapwallet, + phantom: global.phantom, + }; +} + +// Update wallet availability checks +export function checkWalletAvailability(provider: string): boolean { + // Skip checks if we're not in a browser context + if (typeof globalThis === "undefined" || !("document" in globalThis)) { + return false; + } + + const wallets = getGlobalWallets(); + + switch (provider) { + case "leather": + return !!wallets.LeatherProvider; + case "okx": + return !!wallets.okxwallet?.bitcoin; + case "unisat": + return !!wallets.unisat; + case "tapwallet": + return !!wallets.tapwallet; + case "phantom": + return !!wallets.phantom?.bitcoin?.isPhantom; + default: + return false; + } +} diff --git a/client/wallet/walletHelper.ts b/client/wallet/walletHelper.ts new file mode 100644 index 000000000..d86fea52f --- /dev/null +++ b/client/wallet/walletHelper.ts @@ -0,0 +1,230 @@ +import { leatherProvider } from "./leather.ts"; +import { okxProvider } from "./okx.ts"; +import { unisatProvider } from "./unisat.ts"; +import { tapWalletProvider } from "./tapwallet.ts"; +import { phantomProvider } from "./phantom.ts"; +import { SignPSBTResult, Wallet } from "$types/index.d.ts"; +import { logger } from "$lib/utils/logger.ts"; + +interface WalletProvider { + signMessage: (message: string) => Promise; + signPSBT: ( + psbtHex: string, + inputsToSign: { index: number }[], + enableRBF?: boolean, + sighashTypes?: number[], + autoBroadcast?: boolean, + ) => Promise; + broadcastRawTX?: (rawTx: string) => Promise; + broadcastPSBT?: (psbtHex: string) => Promise; +} + +// Add shared error handling +interface JSONRPCError { + jsonrpc?: string; + id?: string; + error?: { + code?: number; + message?: string; + }; +} + +interface WalletError extends JSONRPCError { + details?: { + error?: { + message?: string; + code?: number; + }; + }; + message?: string; +} + +export function handleWalletError( + error: unknown, + walletName: string, +): SignPSBTResult { + logger.error("ui", { + message: `Error signing PSBT with ${walletName}`, + error, + details: error instanceof Error ? error : undefined, + }); + + // Handle string errors directly + if (typeof error === "string") { + return { + signed: false, + error: error, + }; + } + + // Handle Error instances + if (error instanceof Error) { + return { + signed: false, + error: error.message, + }; + } + + // Handle JSON-RPC style errors first (most common for wallets) + const jsonRpcError = error as JSONRPCError; + if (jsonRpcError?.error?.message) { + return { + signed: false, + error: jsonRpcError.error.message, + }; + } + + // Cast to our known error structure for other cases + const walletError = error as WalletError; + + // Check for nested error structures + if (walletError?.details?.error?.message) { + return { + signed: false, + error: walletError.details.error.message, + }; + } + + // Check for direct message property + if (walletError?.message) { + return { + signed: false, + error: walletError.message, + }; + } + + // Default error with wallet context + return { + signed: false, + error: `Unknown error occurred with ${walletName}`, + }; +} + +export const getWalletProvider = ( + provider: string | undefined, +): WalletProvider => { + logger.debug("ui", { + message: "Getting wallet provider", + data: { + provider, + stack: new Error().stack, + }, + }); + console.log("Getting wallet provider for:", provider); + switch (provider) { + case "leather": + return leatherProvider; + case "okx": + return okxProvider; + case "unisat": + return unisatProvider; + case "tapwallet": + return tapWalletProvider; + case "phantom": + return phantomProvider; + default: + throw new Error(`Unsupported wallet provider: ${provider}`); + } +}; + +export const signMessage = async (wallet: Wallet, message: string) => { + console.log("Signing message for wallet:", wallet.provider); + console.log("Message to sign:", message); + if (!wallet.provider) throw new Error("No wallet provider specified"); + const provider = getWalletProvider(wallet.provider); + return await provider.signMessage(message); +}; + +export const signPSBT = async ( + wallet: Wallet, + psbtHex: string, + inputsToSign: any[], + enableRBF = true, + sighashTypes?: number[], + autoBroadcast = true, +): Promise => { + console.log("Entering signPSBT in walletHelper.ts"); + console.log("Wallet provider:", wallet.provider); + console.log("PSBT hex length:", psbtHex.length); + console.log("Number of inputs to sign:", inputsToSign.length); + console.log("Enable RBF:", enableRBF); + + if (!wallet.provider) { + console.error("No wallet provider specified"); + return { signed: false, error: "No wallet provider specified" }; + } + + const provider = getWalletProvider(wallet.provider); + console.log("Got wallet provider:", provider); + + try { + console.log("Calling provider.signPSBT"); + const inputIndexToSign = inputsToSign.map((input) => ({ + index: input.index, + })); + console.log("input Index to Sign", inputIndexToSign); + const result = await provider.signPSBT( + psbtHex, + inputIndexToSign, + enableRBF, + sighashTypes, + autoBroadcast, + ); + console.log("PSBT signing result:", JSON.stringify(result, null, 2)); + + if (!result) { + return { signed: false, error: "No result from wallet provider" }; + } + + if (result.signed) { + return { + signed: true, + psbt: result.psbt, + txid: result.txid, + error: result.error, // Include error if any + }; + } else if (result.cancelled) { + return { signed: false, cancelled: true }; + } else { + // If result contains an error message, use it + if (result?.error) { + return { + signed: false, + error: result.error, + }; + } + return { + signed: false, + error: "Failed to sign PSBT", + }; + } + } catch (error) { + console.error("Error in signPSBT:", error); + console.log("Error details:", JSON.stringify(error, null, 2)); + + // Use the handleWalletError function to process the error + return handleWalletError(error, wallet.provider || "unknown"); + } +}; + +export const broadcastRawTX = async (wallet: Wallet, rawTx: string) => { + console.log("Broadcasting raw TX for wallet:", wallet.provider); + if (!wallet.provider) throw new Error("No wallet provider specified"); + const provider = getWalletProvider(wallet.provider); + if (!provider.broadcastRawTX) { + throw new Error( + `${wallet.provider} does not support broadcasting raw transactions`, + ); + } + return await provider.broadcastRawTX(rawTx); +}; + +export const broadcastPSBT = async (wallet: Wallet, psbtHex: string) => { + console.log("Broadcasting PSBT for wallet:", wallet.provider); + if (!wallet.provider) throw new Error("No wallet provider specified"); + const provider = getWalletProvider(wallet.provider); + if (!provider.broadcastPSBT) { + throw new Error(`${wallet.provider} does not support broadcasting PSBT`); + } + return await provider.broadcastPSBT(psbtHex); +}; diff --git a/components/BlockHeaderTable.tsx b/components/BlockHeaderTable.tsx deleted file mode 100644 index 258c720c0..000000000 --- a/components/BlockHeaderTable.tsx +++ /dev/null @@ -1,63 +0,0 @@ -import dayjs from "$dayjs/"; -import relativeTime from "$dayjs/plugin/relativeTime"; - -import { short_address } from "$lib/utils/util.ts"; - -dayjs.extend(relativeTime); - -interface BlockHeaderTableProps { - block: { - block_info: BlockInfo; - issuances: StampRow[]; - sends: SendRow[]; - }; -} - -export default function BlockHeaderTable(props: BlockHeaderTableProps) { - const { block_info, issuances, sends } = props.block; - - return ( -
- - - - - - - - - - - - - - - - - - - - - - - -
Block Index{block_info.block_index}Block Hash - {short_address(block_info.block_hash)} - Time - {dayjs(Number(block_info.block_time)).fromNow()} -
Ledger Hash - {block_info.ledger_hash - ? short_address(block_info.ledger_hash) - : "null"} - Txlist Hash - {block_info.txlist_hash - ? short_address(block_info.txlist_hash) - : "null"} - Txlist Hash - {block_info.messages_hash - ? short_address(block_info.messages_hash) - : "null"} -
Bitcoin Stamps{issuances.length}
-
- ); -} diff --git a/components/BlockInfo.tsx b/components/BlockInfo.tsx deleted file mode 100644 index 0db95e630..000000000 --- a/components/BlockInfo.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import BlockHeaderTable from "$/components/BlockHeaderTable.tsx"; -import BlockIssuancesTable from "$/components/BlockIssuancesTable.tsx"; -// import BlockSendsTable from "$/components/BlockSendsTable.tsx"; - -interface BlockInfoProps { - block: BlockInfo; -} - -export default function BlockInfo(props: BlockInfoProps) { - const { block } = props; - - return ( -
- - - {/* */} -
- ); -} diff --git a/components/BlockIssuancesTable.tsx b/components/BlockIssuancesTable.tsx deleted file mode 100644 index 30c6965cf..000000000 --- a/components/BlockIssuancesTable.tsx +++ /dev/null @@ -1,113 +0,0 @@ -import dayjs from "$dayjs/"; -import relativeTime from "$dayjs/plugin/relativeTime"; - -import Stamp from "$/components/Stamp.tsx"; -import { StampKind } from "$/components/StampKind.tsx"; - -import { get_suffix_from_mimetype, short_address } from "$lib/utils/util.ts"; - -dayjs.extend(relativeTime); - -interface BlockIssuancesTableProps { - block: { - block_info: BlockInfo; - issuances: StampRow[]; - sends: SendRow[]; - }; -} - -export default function BlockIssuancesTable(props: BlockIssuancesTableProps) { - const { block_info, issuances } = props.block; - - return ( -
- - - - - - - - - {/* */} - {/* */} - - {/* */} - - {/* */} - - - - {issuances.map((issuance: StampRow) => { - const kind = issuance.is_btc_stamp - ? "stamp" - : issuance.cpid.startsWith("A") - ? "cursed" - : "named"; - return ( - - - - - - - { - /* */ - } - { - /* */ - } - - - { - /* */ - } - - { - /* */ - } - - ); - })} - -
ImageStampKindcpidCreatorDivisibleLockedSupplyKeyburnTimestampis_reissuance
- - - - - - {issuance.stamp} - - - - - - {issuance.cpid} - - - - {issuance.creator_name ?? short_address(issuance.creator)} - - - {issuance.divisible ? "true" : "false"} - - {issuance.locked ? "true" : "false"} - - {issuance.supply - ? issuance.supply.toLocaleString(undefined, { - maximumFractionDigits: 0, - }) - : "N/A"} - - {issuance.keyburn ? "true" : "false"} - - {dayjs(Number(block_info.block_time)).fromNow()} - - {issuance.is_reissue ? "true" : "false"} -
-
- ); -} diff --git a/components/BlockSendsTable.tsx b/components/BlockSendsTable.tsx deleted file mode 100644 index 21dfc7b39..000000000 --- a/components/BlockSendsTable.tsx +++ /dev/null @@ -1,113 +0,0 @@ -import dayjs from "$dayjs/"; -import relativeTime from "$dayjs/plugin/relativeTime"; - -import { short_address } from "$lib/utils/util.ts"; -import Stamp from "$/components/Stamp.tsx"; -import { StampKind } from "$/components/StampKind.tsx"; -import { BlockInfo, SendRow, StampRow } from "globals"; -dayjs.extend(relativeTime); - -interface BlockSendsTableProps { - block: { - block_info: BlockInfo; - issuances: StampRow[]; - sends: SendRow[]; - }; -} - -export default function BlockSendsTable(props: BlockSendsTableProps) { - const { block_info, sends } = props.block; - - // this will likely be easier just looking at the dispenser when showing the dispense - // const fetchSatoshirate = async (txHash: string, source: string) => { - // try { - // const satoshirate = await fetchPricefromBlockCypherAPI(txHash, source); - // return satoshirate; - // } catch (error) { - // console.error("Error fetching satoshirate:", error); - // return null; - // } - // }; - - return ( -
- - - - - - - - - - - - - - - - - - - - - {sends.map((send: SendRow) => { - const kind = send.is_btc_stamp - ? "stamp" - : send.cpid.startsWith("A") - ? "cursed" - : "named"; - - // const satoshirate = await fetchSatoshirate( - // send.tx_hash, - // send.source, - // ); - - return ( - - - - - - - - - - - - - - - ); - })} - -
- Sends -
ImageStampKindFromToIDTickQtyUnit PriceMemoTx hashTx indexCreated
- - - - - {send.stamp} - - - - {send.source ? short_address(send.source) : "NULL"} - - {send.destination ? short_address(send.destination) : "NULL"} - - - {send.cpid} - - - {send.tick ? send.tick : "NULL"} - {send.quantity} - {send.satoshirate - ? `${send.satoshirate / 100000000} BTC` - : "0 BTC"} - {send.memo}{short_address(send.tx_hash)} - {dayjs(Number(block_info.block_time)).fromNow()} -
-
- ); -} diff --git a/components/BtcAddressInfo.tsx b/components/BtcAddressInfo.tsx deleted file mode 100644 index 00bc42b78..000000000 --- a/components/BtcAddressInfo.tsx +++ /dev/null @@ -1,42 +0,0 @@ -import { short_address } from "utils/util.ts"; - -interface BTCAddressInfoProps { - btc: { - address: string; - balance: number; - txCount: number; - unconfirmedBalance: number; - unconfirmedTxCount: number; - }; -} - -export default function BtcAddressInfo(props: BTCAddressInfoProps) { - const { btc } = props; - - return ( -
- - - - - - - - - - - - - - - - - -
Address{short_address(btc.address)}BTC Balance{btc.balance} BTC - Confirmed TXs - {btc.txCount}
Unconfirmed Balance{btc.unconfirmedBalance} - Unconfirmed TXs - {btc.unconfirmedTxCount}
-
- ); -} diff --git a/components/Button.tsx b/components/Button.tsx deleted file mode 100644 index f1b80a0b0..000000000 --- a/components/Button.tsx +++ /dev/null @@ -1,12 +0,0 @@ -import { JSX } from "preact"; -import { IS_BROWSER } from "$fresh/runtime.ts"; - -export function Button(props: JSX.HTMLAttributes) { - return ( -