From 3fd176ff8a54e159292ca96c19ce62c2aff7938d Mon Sep 17 00:00:00 2001 From: vara-bonthu Date: Sat, 28 Jun 2025 16:34:09 -0700 Subject: [PATCH 1/2] feat: Migrated to Taskfile Signed-off-by: vara-bonthu --- .github/pull_request_template.md | 4 +- Makefile | 65 -------- README.md | 98 +++++++++--- Taskfile.yml | 250 +++++++++++++++++++++++++++++++ uv.lock | 77 ++++++++++ 5 files changed, 409 insertions(+), 85 deletions(-) delete mode 100644 Makefile create mode 100644 Taskfile.yml diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 11ad813..e4cca1c 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -14,7 +14,7 @@ Brief description of changes and motivation. ## ๐Ÿงช Testing -- [ ] โœ… All existing tests pass (`uv run pytest`) +- [ ] โœ… All existing tests pass (`task test`) - [ ] ๐Ÿ”ฌ Tested with MCP Inspector - [ ] ๐Ÿ“Š Tested with sample Spark data - [ ] ๐Ÿš€ Tested with real Spark History Server (if applicable) @@ -22,7 +22,7 @@ Brief description of changes and motivation. ### ๐Ÿ”ฌ Test Commands Run ```bash # Example: -# uv run pytest test_tools.py -v +# task test # npx @modelcontextprotocol/inspector uv run main.py ``` diff --git a/Makefile b/Makefile deleted file mode 100644 index a6b2a75..0000000 --- a/Makefile +++ /dev/null @@ -1,65 +0,0 @@ -.PHONY: install test lint format clean dev setup help - -# Default target -help: - @echo "Available commands:" - @echo " install - Install project dependencies" - @echo " dev - Install development dependencies" - @echo " test - Run tests" - @echo " test-cov - Run tests with coverage" - @echo " lint - Run linting (ruff + mypy)" - @echo " format - Format code (black + ruff fix)" - @echo " clean - Clean temporary files" - @echo " setup - Setup development environment" - @echo " start-spark - Start Spark History Server" - @echo " start-mcp - Start MCP Server" - @echo " start-inspector - Start MCP Inspector" - -install: - uv sync - -dev: install - uv sync --group dev - -test: - uv run pytest - -test-cov: - uv run pytest --cov=. --cov-report=html --cov-report=term-missing - -lint: - @echo "Running ruff..." - uv run ruff check . - @echo "Running mypy..." - uv run mypy *.py --ignore-missing-imports - -format: - @echo "Running black..." - uv run black . - @echo "Running ruff fix..." - uv run ruff check --fix . - -clean: - rm -rf .pytest_cache/ .coverage htmlcov/ dist/ build/ - find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true - find . -type f -name "*.pyc" -delete - -setup: dev - chmod +x *.sh - -# MCP Development shortcuts -start-spark: - @echo "Starting Spark History Server..." - ./start_local_spark_history.sh - -start-mcp: - @echo "Starting MCP Server..." - uv run main.py - -start-inspector: - @echo "Starting MCP Inspector..." - DANGEROUSLY_OMIT_AUTH=true npx @modelcontextprotocol/inspector - -# Validation -validate: lint test - @echo "โœ… All validations passed!" diff --git a/README.md b/README.md index ad81bc2..ba71eca 100644 --- a/README.md +++ b/README.md @@ -86,19 +86,42 @@ npx @modelcontextprotocol/inspector ## ๐Ÿ› ๏ธ Available Tools +### ๐Ÿ“Š Application & Job Analysis | ๐Ÿ”ง Tool | ๐Ÿ“ Description | |---------|----------------| -| `list_applications` | ๐Ÿ“‹ List Spark applications with filtering | -| `get_application_details` | ๐Ÿ“Š Get comprehensive application info | -| `get_application_jobs` | ๐Ÿ”— List jobs within an application | -| `get_job_details` | ๐Ÿ” Get detailed job information | -| `get_stage_details` | โšก Analyze stage-level metrics | -| `get_task_details` | ๐ŸŽฏ Examine individual task performance | -| `get_executor_summary` | ๐Ÿ–ฅ๏ธ Review executor utilization | -| `compare_job_performance` | ๐Ÿ“ˆ Compare multiple jobs | -| `get_application_environment` | โš™๏ธ Review Spark configuration | -| `get_storage_info` | ๐Ÿ’พ Analyze RDD storage usage | -| `get_sql_execution_details` | ๐Ÿ”Ž Deep dive into SQL queries | +| `get_application` | Get detailed information about a specific Spark application | +| `get_jobs` | Get a list of all jobs for a Spark application | +| `get_slowest_jobs` | Get the N slowest jobs for a Spark application | + +### โšก Stage & Task Analysis +| ๐Ÿ”ง Tool | ๐Ÿ“ Description | +|---------|----------------| +| `get_stages` | Get a list of all stages for a Spark application | +| `get_slowest_stages` | Get the N slowest stages for a Spark application | +| `get_stage` | Get information about a specific stage | +| `get_stage_task_summary` | Get task metrics summary for a specific stage | + +### ๐Ÿ–ฅ๏ธ Executor & Resource Analysis +| ๐Ÿ”ง Tool | ๐Ÿ“ Description | +|---------|----------------| +| `get_executors` | Get executor information for an application | +| `get_executor` | Get information about a specific executor | +| `get_executor_summary` | Get aggregated metrics across all executors | +| `get_resource_usage_timeline` | Get resource usage timeline for an application | + +### ๐Ÿ” SQL & Performance Analysis +| ๐Ÿ”ง Tool | ๐Ÿ“ Description | +|---------|----------------| +| `get_slowest_sql_queries` | Get the top N slowest SQL queries for an application | +| `get_job_bottlenecks` | Identify performance bottlenecks in a Spark job | +| `get_environment` | Get comprehensive Spark runtime configuration | + +### ๐Ÿ“ˆ Comparison Tools +| ๐Ÿ”ง Tool | ๐Ÿ“ Description | +|---------|----------------| +| `compare_job_performance` | Compare performance metrics between two Spark jobs | +| `compare_job_environments` | Compare Spark environment configurations between two jobs | +| `compare_sql_execution_plans` | Compare SQL execution plans between two Spark jobs | ## ๐Ÿš€ Production Deployment @@ -122,17 +145,56 @@ helm install spark-history-mcp ./deploy/kubernetes/helm/spark-history-mcp/ \ ## ๐Ÿงช Testing & Development ### ๐Ÿ”ฌ Local Development + +#### ๐Ÿ“‹ Prerequisites +- Install [Task](https://taskfile.dev/installation/) for running development commands: + ```bash + # macOS + brew install go-task + + # Other platforms - see https://taskfile.dev/installation/ + ``` + +*Note: uv will be automatically installed when you run `task install`* + +#### ๐Ÿš€ Development Commands + +**Quick Setup:** ```bash -# ๐Ÿ”ฅ Start local Spark History Server with sample data -./start_local_spark_history.sh +# ๐Ÿ“ฆ Install dependencies and setup pre-commit hooks +task install +task pre-commit-install -# โšก Start MCP server -uv run main.py +# ๐Ÿš€ Start services one by one (all in background) +task start-spark-bg # Start Spark History Server +task start-mcp-bg # Start MCP Server +task start-inspector-bg # Start MCP Inspector + +# ๐ŸŒ Then open http://localhost:6274 in your browser -# ๐ŸŒ Test with MCP Inspector -npx @modelcontextprotocol/inspector uv run main.py +# ๐Ÿ›‘ When done, stop all services +task stop-all ``` +**Essential Commands:** +```bash + +# ๐Ÿ›‘ Stop all background services +task stop-all + +# ๐Ÿงช Run tests and checks +task test # Run pytest +task lint # Check code style +task pre-commit # Run all pre-commit hooks +task validate # Run lint + tests + +# ๐Ÿ”ง Development utilities +task format # Auto-format code +task clean # Clean build artifacts +``` + +*For complete command reference, see `Taskfile.yml`* + ### ๐Ÿ“Š Sample Data The repository includes real Spark event logs for testing: - `spark-bcec39f6201b42b9925124595baad260` - โœ… Successful ETL job @@ -215,7 +277,7 @@ For production AI agent integration, see [`examples/integrations/`](examples/int 1. ๐Ÿด Fork the repository 2. ๐ŸŒฟ Create feature branch: `git checkout -b feature/new-tool` 3. ๐Ÿงช Add tests for new functionality -4. โœ… Run tests: `uv run pytest` +4. โœ… Run tests: `task test` 5. ๐Ÿ“ค Submit pull request ## ๐Ÿ“„ License diff --git a/Taskfile.yml b/Taskfile.yml new file mode 100644 index 0000000..34c2a49 --- /dev/null +++ b/Taskfile.yml @@ -0,0 +1,250 @@ +version: '3' + +output: group + +vars: + PROJECT_NAME: spark-history-server-mcp + PYTHON_VERSION: 3.12 + +tasks: + default: + desc: Show available tasks + cmds: + - task --list + + check-uv: + desc: Check if uv is installed and install if needed + cmds: + - | + if ! command -v uv &> /dev/null; then + echo "๐Ÿ“ฆ Installing uv..." + curl -LsSf https://astral.sh/uv/install.sh | sh + export PATH="$HOME/.local/bin:$PATH" + echo "โœ… uv installed! Please restart your terminal or run: export PATH=\"\$HOME/.local/bin:\$PATH\"" + else + echo "โœ… uv is already installed" + fi + + install: + desc: Install project dependencies + deps: [check-uv] + cmds: + - uv sync --group dev + - echo "โœ… Dependencies installed!" + + lint: + desc: Run code linting with ruff + cmds: + - uv run ruff check . + - echo "โœ… Linting completed!" + + format: + desc: Format code with ruff + cmds: + - uv run ruff format . + - echo "โœ… Code formatted!" + + lint-fix: + desc: Run linting with auto-fix + cmds: + - uv run ruff check --fix . + - echo "โœ… Linting issues fixed!" + + type-check: + desc: Run type checking with mypy + cmds: + - uv run mypy *.py --ignore-missing-imports + - echo "โœ… Type checking completed!" + + test: + desc: Run tests with pytest + cmds: + - uv run pytest --cov=. --cov-report=term-missing + - echo "โœ… Tests completed!" + + test-verbose: + desc: Run tests with verbose output + cmds: + - uv run pytest -v --cov=. --cov-report=term-missing + + security: + desc: Run security scan with bandit + cmds: + - uv run bandit -r . -f json -o bandit-report.json + - echo "โœ… Security scan completed! Check bandit-report.json" + + pre-commit: + desc: Run all pre-commit checks + cmds: + - pre-commit run --all-files + - echo "โœ… Pre-commit checks completed!" + + pre-commit-install: + desc: Install pre-commit hooks + cmds: + - pre-commit install + - echo "โœ… Pre-commit hooks installed!" + + clean: + desc: Clean build artifacts and cache + cmds: + - rm -rf __pycache__/ + - rm -rf .pytest_cache/ + - rm -rf .coverage + - rm -rf htmlcov/ + - rm -rf .mypy_cache/ + - rm -rf bandit-report.json + - find . -type f -name "*.pyc" -delete + - find . -type d -name "__pycache__" -delete + - echo "โœ… Cleanup completed!" + + start-spark: + desc: Start local Spark History Server for testing + cmds: + - ./start_local_spark_history.sh + - echo "๐Ÿ”ฅ Spark History Server started!" + + start-spark-bg: + desc: Start local Spark History Server in background + silent: true + cmds: + - | + echo "Starting Spark History Server in background..." + nohup ./start_local_spark_history.sh > spark-history.log 2>&1 & + sleep 3 + echo "โœ… Spark History Server started (logs: spark-history.log)" + + start-mcp: + desc: Start MCP server + cmds: + - uv run main.py + + start-mcp-bg: + desc: Start MCP server in background + silent: true + cmds: + - | + echo "Starting MCP server in background..." + nohup uv run main.py > mcp-server.log 2>&1 & + sleep 2 + echo "โœ… MCP server started (logs: mcp-server.log)" + + start-inspector: + desc: Start MCP Inspector for testing + cmds: + - echo "Starting MCP Inspector at http://localhost:6274" + - echo "Press Ctrl+C to stop the inspector" + - DANGEROUSLY_OMIT_AUTH=true npx @modelcontextprotocol/inspector uv run main.py + + start-inspector-bg: + desc: Start MCP Inspector in background + silent: true + cmds: + - | + echo "Starting MCP Inspector in background..." + nohup sh -c 'DANGEROUSLY_OMIT_AUTH=true npx @modelcontextprotocol/inspector uv run main.py' > inspector.log 2>&1 & + sleep 3 + echo "โœ… MCP Inspector started at http://localhost:6274" + + dev-all: + desc: Start all development services and prepare for testing + silent: true + cmds: + - task start-spark-bg + - task start-mcp-bg + - sleep 1 + - | + echo "" + echo "๐ŸŽ‰ All services are running!" + echo "" + echo "๐ŸŒ Spark History Server: http://localhost:18080" + echo "๐Ÿš€ MCP Server: http://localhost:18888" + echo "" + echo "Next steps:" + echo " โ€ข Run: task start-inspector (foreground, press Ctrl+C to stop)" + echo " โ€ข Or: task start-inspector-bg (background)" + echo " โ€ข Then open: http://localhost:6274" + echo "" + echo "To stop all services: task stop-all" + echo "" + + stop-all: + desc: Stop all background services + silent: true + cmds: + - | + echo "Stopping all services..." + pkill -f "start_local_spark_history.sh" || true + pkill -f "main.py" || true + pkill -f "inspector" || true + docker stop spark-history-server 2>/dev/null || true + sleep 1 + echo "โœ… All services stopped!" + + validate: + desc: Run all validation checks (lint, type-check, test) + deps: [lint, test] + cmds: + - echo "โœ… All validations passed!" + + ci: + desc: Run full CI pipeline locally + deps: [lint, test, security] + cmds: + - echo "โœ… CI pipeline completed successfully!" + + dev-setup: + desc: Complete development environment setup + cmds: + - task: install + - task: pre-commit-install + - echo "๐ŸŽ‰ Development environment ready!" + + docker-build: + desc: Build Docker image + cmds: + - docker build -t {{.PROJECT_NAME}}:latest . + - echo "๐Ÿณ Docker image built!" + + docker-run: + desc: Run Docker container + cmds: + - docker run -p 18888:18888 {{.PROJECT_NAME}}:latest + - echo "๐Ÿณ Docker container started!" + + helm-install: + desc: Install with Helm (local) + cmds: + - helm install {{.PROJECT_NAME}} ./deploy/kubernetes/helm/{{.PROJECT_NAME}}/ + - echo "โš“ Helm installation completed!" + + helm-upgrade: + desc: Upgrade Helm release + cmds: + - helm upgrade {{.PROJECT_NAME}} ./deploy/kubernetes/helm/{{.PROJECT_NAME}}/ + - echo "โš“ Helm upgrade completed!" + + helm-uninstall: + desc: Uninstall Helm release + cmds: + - helm uninstall {{.PROJECT_NAME}} + - echo "โš“ Helm uninstallation completed!" + + docs-serve: + desc: Serve documentation locally (if using mkdocs) + cmds: + - echo "๐Ÿ“š Documentation server would start here" + - echo "๐Ÿ’ก Consider adding mkdocs for documentation" + + benchmark: + desc: Run performance benchmarks + cmds: + - echo "๐Ÿƒ Running performance benchmarks..." + - echo "๐Ÿ’ก Add specific benchmark commands here" + + release-check: + desc: Pre-release validation + deps: [clean, install, ci] + cmds: + - echo "๐Ÿš€ Release validation completed!" + - echo "โœ… Ready for release!" diff --git a/uv.lock b/uv.lock index 1b03fa3..4f404d3 100644 --- a/uv.lock +++ b/uv.lock @@ -58,6 +58,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, ] +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.2" @@ -156,6 +165,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/08/b8/7ddd1e8ba9701dea08ce22029917140e6f66a859427406579fd8d0ca7274/coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c", size = 204000, upload-time = "2025-06-13T13:02:27.173Z" }, ] +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, +] + +[[package]] +name = "filelock" +version = "3.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -202,6 +229,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload-time = "2023-12-22T08:01:19.89Z" }, ] +[[package]] +name = "identify" +version = "2.6.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -302,6 +338,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -338,6 +383,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "pre-commit" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, +] + [[package]] name = "pydantic" version = "2.11.7" @@ -578,6 +639,7 @@ dependencies = [ dev = [ { name = "black" }, { name = "mypy" }, + { name = "pre-commit" }, { name = "pytest" }, { name = "pytest-cov" }, { name = "ruff" }, @@ -595,6 +657,7 @@ requires-dist = [ dev = [ { name = "black", specifier = ">=23.0.0" }, { name = "mypy", specifier = ">=1.7.0" }, + { name = "pre-commit", specifier = ">=3.0.0" }, { name = "pytest", specifier = ">=8.4.1" }, { name = "pytest-cov", specifier = ">=4.0.0" }, { name = "ruff", specifier = ">=0.1.0" }, @@ -681,3 +744,17 @@ sdist = { url = "https://files.pythonhosted.org/packages/de/ad/713be230bcda622ea wheels = [ { url = "https://files.pythonhosted.org/packages/6d/0d/8adfeaa62945f90d19ddc461c55f4a50c258af7662d34b6a3d5d1f8646f6/uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885", size = 62431, upload-time = "2025-06-01T07:48:15.664Z" }, ] + +[[package]] +name = "virtualenv" +version = "20.31.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" }, +] From 6e67cca86c7e24af3c3974928ad994cfd9a4d557 Mon Sep 17 00:00:00 2001 From: vara-bonthu Date: Sat, 28 Jun 2025 16:49:10 -0700 Subject: [PATCH 2/2] fix: removed brokenlink in readme Signed-off-by: vara-bonthu --- README.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/README.md b/README.md index ba71eca..6401fbb 100644 --- a/README.md +++ b/README.md @@ -80,9 +80,6 @@ npx @modelcontextprotocol/inspector ### โšก Job Performance Comparison ![Job Comparison](screenshots/job-compare.png) -*Compare performance metrics between different Spark jobs* -![alt text](job-compare.png) - ## ๐Ÿ› ๏ธ Available Tools