diff --git a/.env.template b/.env.template new file mode 100644 index 00000000..7e1d7ed0 --- /dev/null +++ b/.env.template @@ -0,0 +1,4 @@ +# API Keys for LLM Providers +# Copy this file to .env and fill in the values. +GOOGLE_GEMINI_API_KEY=your_api_key_here +OPENAI_API_KEY=your_api_key_here diff --git a/.github/actions/setup-python-env/action.yml b/.github/actions/setup-python-env/action.yml new file mode 100644 index 00000000..3dd616c1 --- /dev/null +++ b/.github/actions/setup-python-env/action.yml @@ -0,0 +1,45 @@ +name: 'Setup Python Environment' +description: 'Checks out code, sets up Python, caches dependencies, and installs them.' +inputs: + python-version: + description: 'The Python version to use.' + required: true + default: '3.11' + install-dev-reqs: + description: 'Whether to install requirements-dev.txt' + required: false + default: 'true' + install-docs-reqs: + description: 'Whether to install requirements-docs.txt' + required: false + default: 'false' +runs: + using: 'composite' + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + - name: Cache pip + uses: actions/cache@v4 + with: + path: ~/.cache/pip + # Include all requirements files in the cache key + key: ${{ runner.os }}-pip-${{ inputs.python-version }}-${{ hashFiles('**/requirements*.txt') }} + restore-keys: | + ${{ runner.os }}-pip-${{ inputs.python-version }}- + ${{ runner.os }}-pip- + - name: Install dependencies + shell: bash + run: | + python -m pip install --upgrade pip + if [ -f requirements.txt ]; then + pip install -r requirements.txt + fi + if [ "${{ inputs.install-dev-reqs }}" == "true" ] && [ -f requirements-dev.txt ]; then + pip install -r requirements-dev.txt + fi + if [ "${{ inputs.install-docs-reqs }}" == "true" ] && [ -f requirements-docs.txt ]; then + pip install -r requirements-docs.txt + fi diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 37db506c..7a5c7f18 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -197,8 +197,10 @@ setup.py โ†’ Python package setup (currently empty) 1. **Install dependencies**: `pip install pytest pytest-cov mypy pylint` 2. **Set Python path**: `export PYTHONPATH=.` or prefix commands with `PYTHONPATH=.` 3. **Test before changing**: `PYTHONPATH=. python -m pytest test/ -v` to validate current state -4. **Check module imports**: Ensure new Python modules have proper `__init__.py` files -5. **Follow branch naming**: Use `dev//` pattern for feature branches +4. **Configure the agent**: Edit `config/model_config.yaml` to configure the agent before running it. +5. **Check module imports**: Ensure new Python modules have proper `__init__.py` files +6. **Follow branch naming**: Use `dev//` pattern for feature branches +7. **Fill out the PR template**: Ensure the PR template at `.github/PULL_REQUEST_TEMPLATE.md` is filled out before submitting a new PR. **NEVER do the following:** - Run tests without setting PYTHONPATH diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index bc295c08..4eb68f5c 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -46,7 +46,7 @@ jobs: - language: actions build-mode: none - language: c-cpp - build-mode: manual + build-mode: none - language: javascript-typescript build-mode: none - language: python diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml new file mode 100644 index 00000000..b9d6d20f --- /dev/null +++ b/.github/workflows/dependency-review.yml @@ -0,0 +1,14 @@ +name: 'Dependency Review' +on: [pull_request] + +permissions: + contents: read + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: 'Checkout Repository' + uses: actions/checkout@v4 + - name: 'Dependency Review' + uses: actions/dependency-review-action@v4 diff --git a/.github/workflows/docker-scan.yml b/.github/workflows/docker-scan.yml new file mode 100644 index 00000000..168053bb --- /dev/null +++ b/.github/workflows/docker-scan.yml @@ -0,0 +1,30 @@ +name: 'Docker Image Scan' +on: + push: + branches: [ main ] + pull_request: + +permissions: + contents: read + +jobs: + build-and-scan: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Build an image from Dockerfile + id: build-image + run: | + docker build -t ${{ github.repository }}:latest . + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@master + with: + image-ref: '${{ github.repository }}:latest' + format: 'table' + exit-code: '0' + ignore-unfixed: true + vuln-type: 'os,library' + severity: 'CRITICAL,HIGH' diff --git a/.github/workflows/gosec.yml b/.github/workflows/gosec.yml index e9dd9462..7b54b914 100644 --- a/.github/workflows/gosec.yml +++ b/.github/workflows/gosec.yml @@ -1,16 +1,12 @@ # GoSec Security Checker # This workflow runs gosec to check Go code for security issues +# It is currently disabled from running automatically. name: GoSec Security Checker permissions: contents: read on: - push: - paths: - - '**.go' - pull_request: - paths: - - '**.go' + workflow_dispatch: jobs: gosec: diff --git a/.github/workflows/prompt-evaluation.yml b/.github/workflows/prompt-evaluation.yml new file mode 100644 index 00000000..c45c33bb --- /dev/null +++ b/.github/workflows/prompt-evaluation.yml @@ -0,0 +1,45 @@ +permissions: + contents: read +name: 'Prompt Evaluation' + +on: + workflow_dispatch: + inputs: + prompt_file: + description: 'Path to the prompt file (e.g., data/prompts/default.yaml)' + required: true + default: 'data/prompts/default.yaml' + provider: + description: 'LLM provider to use (gemini, openai, ollama)' + required: true + default: 'gemini' + model: + description: 'Model name to use' + required: false + +jobs: + evaluate: + runs-on: ubuntu-latest + steps: + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env + with: + python-version: '3.11' + + - name: Run prompt evaluation + env: + GOOGLE_GEMINI_API_KEY: ${{ secrets.GOOGLE_GEMINI_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + PYTHONPATH: . + run: | + python scripts/evaluate-prompt.py \ + --prompt-file ${{ github.event.inputs.prompt_file }} \ + --provider ${{ github.event.inputs.provider }} \ + --model ${{ github.event.inputs.model }} \ + --output-file prompt-output.txt + + - name: Upload prompt output + uses: actions/upload-artifact@v4 + with: + name: prompt-output-${{ github.event.inputs.provider }}-${{ github.event.inputs.model || 'default' }} + path: prompt-output.txt diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 0a6b4a79..26893806 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -2,7 +2,8 @@ permissions: contents: read name: Pylint -on: [push] +on: + pull_request: jobs: build: @@ -11,15 +12,10 @@ jobs: matrix: python-version: ["3.10", "3.11", "3.12" ] steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pylint - name: Analysing the code with pylint run: | pylint $(git ls-files '*.py') diff --git a/.github/workflows/python-docs.yml b/.github/workflows/python-docs.yml index 5a3fd91b..0db84aa8 100644 --- a/.github/workflows/python-docs.yml +++ b/.github/workflows/python-docs.yml @@ -16,15 +16,12 @@ jobs: build-docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: python-version: '3.11' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install sphinx sphinx-autodoc-typehints + install-dev-reqs: 'false' + install-docs-reqs: 'true' - name: Generate Sphinx docs run: | sphinx-apidoc -o docs/ src/ diff --git a/.github/workflows/python-style.yml b/.github/workflows/python-style.yml index d1c6fc7f..7cb4d0ee 100644 --- a/.github/workflows/python-style.yml +++ b/.github/workflows/python-style.yml @@ -16,12 +16,9 @@ jobs: flake8-lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: python-version: '3.11' - - name: Install flake8 - run: pip install flake8 - name: Run flake8 run: flake8 src/ --count --select=E9,F63,F7,F82 --show-source --statistics diff --git a/.github/workflows/python-test-static.yml b/.github/workflows/python-test-static.yml deleted file mode 100644 index be371031..00000000 --- a/.github/workflows/python-test-static.yml +++ /dev/null @@ -1,7 +0,0 @@ -# Python Unit Tests & Static Analysis -# This workflow runs unit tests and static code analysis -name: Python Tests and Static Analysis (legacy) - -# This workflow has been consolidated into .github/workflows/python-test.yml. -# Left here for documentation/history. The consolidated workflow runs static analysis -# and a test matrix plus a focused deepagent test stage. diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml index 03cc32bf..e39d9878 100644 --- a/.github/workflows/python-test.yml +++ b/.github/workflows/python-test.yml @@ -20,26 +20,12 @@ jobs: name: Static analysis & unit tests (one python) runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: python-version: '3.11' - - name: Cache pip - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - name: Install dependencies for static - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install pytest pytest-cov mypy - name: Run ruff (lint) run: | - python -m pip install ruff python -m ruff check src/ - name: Run unit tests with coverage run: | @@ -64,25 +50,10 @@ jobs: matrix: python-version: [3.11, 3.12] steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: python-version: ${{ matrix.python-version }} - - name: Cache pip - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m venv .venv_ci - . .venv_ci/bin/activate - pip install --upgrade pip setuptools wheel - pip install -r requirements.txt - name: Run tests env: PYTHONPATH: . @@ -94,25 +65,10 @@ jobs: runs-on: ubuntu-latest needs: tests steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: - python-version: 3.12 - - name: Cache pip - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-3.12-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - name: Install test deps only - run: | - python -m pip install --upgrade pip - python -m venv .venv_ci - . .venv_ci/bin/activate - pip install --upgrade pip setuptools wheel - pip install pytest python-dotenv + python-version: '3.12' - name: Run deepagent unit tests env: PYTHONPATH: . @@ -124,25 +80,14 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'workflow_dispatch' steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: - python-version: 3.12 + python-version: '3.12' + install-dev-reqs: 'false' # python-dotenv is in dev-reqs, but we want to install it manually - name: Install provider packages run: | - python -m pip install --upgrade pip - python -m venv .venv_ci - . .venv_ci/bin/activate - pip install --upgrade pip setuptools wheel pip install langchain-google-genai langchain-community langchain-ollama python-dotenv - - name: Cache pip for provider-smoke - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-provider-smoke-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - name: Quick deepagent smoke (dry-run disabled) env: PYTHONPATH: . @@ -157,22 +102,13 @@ jobs: matrix: provider: [gemini, openai, ollama] steps: - - uses: actions/checkout@v4 - - name: Cache pip - uses: actions/cache@v4 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-providers-${{ matrix.provider }}-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - name: Set up Python - uses: actions/setup-python@v4 + - name: Setup Python Environment + uses: ./.github/actions/setup-python-env with: - python-version: 3.12 + python-version: '3.12' + install-dev-reqs: 'false' - name: Install provider packages run: | - python -m pip install --upgrade pip - pip install -r requirements.txt pip install langchain-google-genai langchain-community langchain-ollama - name: Run provider smoke for matrix provider env: diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml deleted file mode 100644 index 3db51850..00000000 --- a/.github/workflows/python-tests.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: Python Tests - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -permissions: - contents: read - -jobs: - tests: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [3.12] - - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Run tests - env: - PYTHONPATH: . - run: | - python -m pytest -q diff --git a/.github/workflows/spelling2.yml b/.github/workflows/spelling2.yml index 1d1434e4..535a5251 100644 --- a/.github/workflows/spelling2.yml +++ b/.github/workflows/spelling2.yml @@ -98,12 +98,12 @@ jobs: suppress_push_for_open_pull_request: ${{ github.actor != 'dependabot[bot]' && 1 }} checkout: true check_file_names: 1 - spell_check_this: microsoft/terminal@main + spell_check_this: ${{ github.repository }}@${{ github.ref }} post_comment: 0 use_magic_file: 1 report-timing: 1 warnings: bad-regex,binary-file,deprecated-feature,ignored-expect-variant,large-file,limited-references,no-newline-at-eof,noisy-file,non-alpha-in-dictionary,token-is-substring,unexpected-line-ending,whitespace-in-dictionary,minified-file,unsupported-configuration,no-files-to-check,unclosed-block-ignore-begin,unclosed-block-ignore-end - experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }} + experimental_apply_changes_via_bot: ${{ github.repository_owner != 'SoftwareDevLabs' && 1 }} use_sarif: 1 check_extra_dictionaries: "" dictionary_source_prefixes: > @@ -161,7 +161,7 @@ jobs: uses: check-spelling/check-spelling@v0.0.25 with: checkout: true - spell_check_this: microsoft/terminal@main + spell_check_this: ${{ github.repository }}@${{ github.ref }} task: ${{ needs.spelling.outputs.followup }} comment-pr: @@ -179,9 +179,9 @@ jobs: uses: check-spelling/check-spelling@v0.0.25 with: checkout: true - spell_check_this: microsoft/terminal@main + spell_check_this: ${{ github.repository }}@${{ github.ref }} task: ${{ needs.spelling.outputs.followup }} - experimental_apply_changes_via_bot: ${{ github.repository_owner != 'microsoft' && 1 }} + experimental_apply_changes_via_bot: ${{ github.repository_owner != 'SoftwareDevLabs' && 1 }} update: name: Update PR @@ -191,7 +191,7 @@ jobs: actions: read runs-on: ubuntu-latest if: ${{ - github.repository_owner != 'microsoft' && + github.repository_owner != 'SoftwareDevLabs' && github.event_name == 'issue_comment' && github.event.issue.pull_request && contains(github.event.comment.body, '@check-spelling-bot apply') && diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml index 58b9bd0f..eab38cd4 100644 --- a/.github/workflows/super-linter.yml +++ b/.github/workflows/super-linter.yml @@ -32,8 +32,9 @@ jobs: env: # To report GitHub Actions status checks GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CREATE_LOG_FILE: 'true' - name: Upload Super-Linter report uses: actions/upload-artifact@v4 with: name: super-linter-report - path: super-linter.report + path: super-linter.log diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..ff75ed63 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,108 @@ +# Instructions for AI Agents + +This document provides instructions and guidelines for AI agents working with the SDLC_core repository. + +## Repository Overview + +SDLC_core is a Python-based Software Development Life Cycle core project that provides AI/ML capabilities for software development workflows. The repository contains modules for LLM clients, intelligent agents, memory management, prompt engineering, document retrieval, skill execution, and various utilities. + +- **Primary Language**: Python 3.10-3.12 +- **Secondary Languages**: TypeScript (for Azure pipelines), Shell scripts +- **Project Type**: AI/ML library and tooling for SDLC workflows + +## Environment Setup + +### 1. Install Dependencies + +**IMPORTANT**: The project's dependencies are split into multiple files. For development and testing, you should install the dependencies from `requirements-dev.txt`. + +```bash +pip install -r requirements-dev.txt +``` + +### 2. Set Python Path + +You must set the `PYTHONPATH` to the root of the repository for imports to work correctly. + +```bash +export PYTHONPATH=. +``` + +Alternatively, you can prefix your commands with `PYTHONPATH=.`: + +```bash +PYTHONPATH=. python -m pytest +``` + +## Building and Testing + +### Testing + +The test infrastructure is set up. Use the following commands to run tests: + +```bash +# Run all tests +PYTHONPATH=. python -m pytest test/ -v + +# Run tests with coverage +PYTHONPATH=. python -m pytest test/ --cov=src/ --cov-report=xml + +# Run specific test suites +PYTHONPATH=. python -m pytest test/unit/ -v +PYTHONPATH=. python -m pytest test/integration/ -v +PYTHONPATH=. python -m pytest test/e2e/ -v +``` + +### Linting and Static Analysis + +```bash +# Run pylint +python -m pylint src/ --exit-zero + +# Run mypy +python -m mypy src/ --ignore-missing-imports --exclude="src/llm/router.py" +``` + +**Note on mypy:** The exclusion for `src/llm/router.py` is necessary to avoid conflicts with `src/fallback/router.py`. + +## Project Architecture + +The core logic is in the `src/` directory, which is organized into the following modules: + +- `src/agents/`: Agent classes (planner, executor, base agent) +- `src/memory/`: Short-term and long-term memory modules +- `src/pipelines/`: Chat flows, document processing, task routing +- `src/retrieval/`: Vector search and document lookup +- `src/skills/`: Web search, code execution capabilities +- `src/vision_audio/`: Multimodal processing (image/audio) +- `src/prompt_engineering/`: Template management, few-shot, chaining +- `src/llm/`: OpenAI, Anthropic, custom LLM routing +- `src/fallback/`: Recovery logic when LLMs fail +- `src/guardrails/`: PII filters, output validation, safety +- `src/handlers/`: Input/output processing, error management +- `src/utils/`: Logging, caching, rate limiting, tokens + +Other important directories: +- `config/`: YAML configurations for models, prompts, logging +- `data/`: Prompts, embeddings, dynamic content +- `examples/`: Minimal scripts demonstrating key features +- `test/`: Unit, integration, smoke, and e2e tests + +## Key Development Rules + +### ALWAYS: + +1. **Install dependencies** before making changes. +2. **Set the `PYTHONPATH`** for all commands. +3. **Run tests** (`PYTHONPATH=. python -m pytest test/ -v`) to validate the current state before making changes. +4. **Configure the agent** by editing `config/model_config.yaml` before running it. +5. **Ensure new Python modules** have proper `__init__.py` files. +6. **Follow the branch naming convention**: `dev//`. +7. **Fill out the PR template** when submitting a pull request. The template is located at `.github/PULL_REQUEST_TEMPLATE.md`. + +### NEVER: + +- Run tests without setting `PYTHONPATH`. +- Assume `requirements.txt` contains dependencies. +- Create modules named "router" (conflicts with existing router.py files). +- Modify Azure pipeline scripts (`build/azure-pipelines/`) without TypeScript knowledge. diff --git a/Dockerfile b/Dockerfile index e69de29b..923f72a4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -0,0 +1,29 @@ +# Use an official Python runtime as a parent image +FROM python:3.11-slim-buster + +# Set the working directory in the container +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* + +# Copy the requirements file into the container +COPY requirements.txt . + +# Install any needed packages specified in requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the rest of the application's code into the container +COPY . . + +# Set the python path +ENV PYTHONPATH=. + +# Expose the port the app runs on +EXPOSE 8000 + +# Define the command to run the application +CMD ["uvicorn", "src.app:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/README.md b/README.md index a8625f61..6882b6e0 100644 --- a/README.md +++ b/README.md @@ -7,86 +7,60 @@ - [Installing and running Windows Terminal](#installing-and-running-windows-terminal) - [Module Roadmap](#SDLC_core-roadmap) -- [SDLC_core Overview](#terminal--console-overview) - - [Key Component 1](#key-component-1) - - [Key Component 2](#key-component-2) - - [Shared Components](#shared-components) +- [SDLC_core Overview](#sdlc_core-overview) - [Resources](#resources) - [FAQ](#faq) - [Documentation](#documentation) - [Contributing](#contributing) - [Communicating with the Team](#communicating-with-the-team) - [Developer Guidance](#developer-guidance) -- [Prerequisites](#prerequisites) -- [Building the Code](#building-the-code) -- [Running \& Debugging](#running--debugging) - - [Coding Guidance](#coding-guidance) - [Code of Conduct](#code-of-conduct)
-This repository contains the source code for: - -* [SDLC_core](add link) -* Components shared between the two projects -* -- +This repository contains the source code for the SDLC_core project, a Python-based framework for building AI-powered software development life cycle tools. Related repositories include: -* [SDLC_core Documentation](add link) - ([Repo: Contribute to the docs](https://github.com/SoftwareDevLabs)) -* -- - -## Installing and running Windows Terminal - -> [!NOTE] -> Add the detials here +* [SDLC_core Documentation](https://github.com/SoftwareDevLabs) (Placeholder) ## SDLC_core Roadmap The plan for the SDLC_core [is described here](./doc/roadmap-20xx.md) and will be updated as the project proceeds. -## SDLC_core Overview - -Please take a few minutes to review the overview below before diving into the -code: - -### Key Component 1 - - - -### Key Component 2 - - - -### Shared Components +## Installing and running Windows Terminal +> [!NOTE] +> This section is a placeholder and may not be relevant to this project. +## SDLC_core Overview +SDLC_core is a Python-based Software Development Life Cycle core project that provides AI/ML capabilities for software development workflows. The repository contains modules for LLM clients, intelligent agents, memory management, prompt engineering, document retrieval, skill execution, and various utilities. It combines a Python core with TypeScript for Azure DevOps pipeline configurations. ## Resources -For more information about SDLC_core, you may find some of these -resources useful and interesting: +> [!NOTE] +> This section is a placeholder. Please add relevant links. * [Link 1](add link) * [Link 2](add link) - ## FAQ +> [!NOTE] +> This section is a placeholder. Please add frequently asked questions. + ### Q1 ### Q2 -### ... ## Documentation All project documentation is located at [softwaremodule-docs](./doc/). If you would like to contribute to the documentation, please submit a pull request on the [SDLC_core -Documentation](https://github.com/SoftwareDevLabs). +Documentation](https://github.com/SoftwareDevLabs) repository. --- @@ -120,11 +94,15 @@ Documentation](https://github.com/SoftwareDevLabs). ## ๐Ÿงญ Getting Started -1. Clone the repo -2. Install via `requirements.txt` -3. Set up model configs -4. Check sample code -5. Begin in notebooks +1. **Clone the repository.** +2. **Set up your Python environment.** A Python version between 3.10 and 3.12 is recommended. +3. **Install dependencies.** The project's dependencies are split into several files. For general development, you will need `requirements-dev.txt`. + ```bash + pip install -r requirements-dev.txt + ``` +4. **Set up your environment variables.** Copy the `.env.template` file to `.env` and fill in the required API keys for the LLM providers you want to use. +5. **Explore the examples.** The `examples/` directory contains scripts that demonstrate the key features of the project. +6. **Experiment in notebooks.** The `notebooks/` directory is a great place to start experimenting with the codebase. --- @@ -141,9 +119,12 @@ Documentation](https://github.com/SoftwareDevLabs). ## ๐Ÿ“ Core Files -- `requirements.txt` โ€“ Package dependencies -- `README.md` โ€“ Project overview and usage -- `Dockerfile` โ€“ Container build instructions +- `requirements.txt` โ€“ Core package dependencies for the project. +- `requirements-dev.txt` - Dependencies for development and testing. +- `requirements-docs.txt` - Dependencies for generating documentation. +- `AGENTS.md` - Instructions for AI agents working with this repository. +- `README.md` โ€“ Project overview and usage. +- `Dockerfile` โ€“ Container build instructions. ## Running tests @@ -172,8 +153,7 @@ make lint ## Contributing -We are excited to work alongside you, our amazing community, to build and -enhance Windows Terminal\! +We are excited to work with the community to build and enhance this project. ***BEFORE you start work on a feature/fix***, please read & follow our [Contributor's Guide](./CONTRIBUTING.md) to help avoid any wasted or duplicate effort. @@ -184,26 +164,10 @@ The easiest way to communicate with the team is via GitHub issues. Please file new issues, feature requests and suggestions, but **DO search for similar open/closed preexisting issues before creating a new issue.** -If you would like to ask a question that you feel doesn't warrant an issue (yet), please reach out to us via Twitter: - - - contact [info@softwaredevlabs.com][conduct-email] +If you would like to ask a question that you feel doesn't warrant an issue (yet), please reach out to us via email: [info@softwaredevlabs.com][conduct-email] ## Developer Guidance -## Prerequisites - - -### Configuration - - -## Building the Code - - -## Running & Debugging - - -### Coding Guidance - Please review these brief docs below about our coding practices. > ๐Ÿ‘‰ If you find something missing from these docs, feel free to contribute to diff --git a/build/azure-pipelines/alpine/cli-build-alpine.yml b/build/azure-pipelines/alpine/cli-build-alpine.yml deleted file mode 100644 index d35b8d3a..00000000 --- a/build/azure-pipelines/alpine/cli-build-alpine.yml +++ /dev/null @@ -1,108 +0,0 @@ -parameters: - - name: SDLC_CORE_BUILD_ALPINE - type: boolean - default: false - - name: SDLC_CORE_BUILD_ALPINE_ARM64 - type: boolean - default: false - - name: SDLC_CORE_QUALITY - type: string - - name: SDLC_CORE_CHECK_ONLY - type: boolean - default: false - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.SDLC_CORE_QUALITY, 'oss') }}: - - template: ../cli/cli-apply-patches.yml@self - - - script: | - set -e - npm ci - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install build dependencies - - - task: Npm@1 - displayName: Download openssl prebuilt - inputs: - command: custom - customCommand: pack @sdlc-core-internal/openssl-prebuilt@0.0.11 - customRegistry: useFeed - customFeed: "Monaco/openssl-prebuilt" - workingDir: $(Build.ArtifactStagingDirectory) - - - script: | - set -e - mkdir $(Build.ArtifactStagingDirectory)/openssl - tar -xvzf $(Build.ArtifactStagingDirectory)/sdlc-core-internal-openssl-prebuilt-0.0.11.tgz --strip-components=1 --directory=$(Build.ArtifactStagingDirectory)/openssl - displayName: Extract openssl prebuilt - - # inspired by: https://github.com/emk/rust-musl-builder/blob/main/Dockerfile - - bash: | - set -e - sudo apt-get update - sudo apt-get install -yq build-essential musl-dev musl-tools linux-libc-dev pkgconf xutils-dev lld - sudo ln -s "/usr/bin/g++" "/usr/bin/musl-g++" || echo "link exists" - displayName: Install musl build dependencies - - - template: ../cli/install-rust-posix.yml@self - parameters: - targets: - - ${{ if eq(parameters.SDLC_CORE_BUILD_ALPINE_ARM64, true) }}: - - aarch64-unknown-linux-musl - - ${{ if eq(parameters.SDLC_CORE_BUILD_ALPINE, true) }}: - - x86_64-unknown-linux-musl - - - ${{ if eq(parameters.SDLC_CORE_BUILD_ALPINE_ARM64, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - SDLC_CORE_CLI_TARGET: aarch64-unknown-linux-musl - SDLC_CORE_CLI_ARTIFACT: sdlc_core_cli_alpine_arm64_cli - SDLC_CORE_QUALITY: ${{ parameters.SDLC_CORE_QUALITY }} - SDLC_CORE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-linux-musl/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-linux-musl/include - OPENSSL_STATIC: "1" - SYSROOT_ARCH: arm64 - IS_MUSL: "1" - - - ${{ if eq(parameters.SDLC_CORE_BUILD_ALPINE, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - SDLC_CORE_CLI_TARGET: x86_64-unknown-linux-musl - SDLC_CORE_CLI_ARTIFACT: sdlc_core_cli_alpine_x64_cli - SDLC_CORE_QUALITY: ${{ parameters.SDLC_CORE_QUALITY }} - SDLC_CORE_CLI_ENV: - CXX_aarch64-unknown-linux-musl: musl-g++ - CC_aarch64-unknown-linux-musl: musl-gcc - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-linux-musl/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-linux-musl/include - OPENSSL_STATIC: "1" - - - ${{ if not(parameters.SDLC_CORE_CHECK_ONLY) }}: - - ${{ if eq(parameters.SDLC_CORE_BUILD_ALPINE_ARM64, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/sdlc_core_cli_alpine_arm64_cli.tar.gz - artifactName: sdlc_core_cli_alpine_arm64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "SDLC_core Alpine arm64 CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish sdlc_core_cli_alpine_arm64_cli artifact - - - ${{ if eq(parameters.SDLC_CORE_BUILD_ALPINE, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/sdlc_core_cli_alpine_x64_cli.tar.gz - artifactName: sdlc_core_cli_alpine_x64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "SDLC_core Alpine x64 CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish sdlc_core_cli_alpine_x64_cli artifact diff --git a/build/azure-pipelines/alpine/product-build-alpine.yml b/build/azure-pipelines/alpine/product-build-alpine.yml deleted file mode 100644 index 12b88f11..00000000 --- a/build/azure-pipelines/alpine/product-build-alpine.yml +++ /dev/null @@ -1,182 +0,0 @@ -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: sdlc-core - KeyVaultName: sdlc-core-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - task: DownloadPipelineArtifact@2 - inputs: - artifact: Compilation - path: $(Build.ArtifactStagingDirectory) - displayName: Download compilation output - - - script: tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz - displayName: Extract compilation output - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js alpine $(SDLC_CORE_ARCH) $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - task: Docker@1 - inputs: - azureSubscriptionEndpoint: sdlc-core - azureContainerRegistry: sdlc-corehub.azurecr.io - command: "Run an image" - imageName: "sdlc-core-linux-build-agent:alpine-$(SDLC_CORE_ARCH)" - containerCommand: uname - displayName: "Pull image" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: sudo apt-get update && sudo apt-get install -y libkrb5-dev - displayName: Install build dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - npm_config_arch: $(NPM_ARCH) - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - SDLC_CORE_REMOTE_DEPENDENCIES_CONTAINER_NAME: sdlc-corehub.azurecr.io/sdlc-core-linux-build-agent:alpine-$(SDLC_CORE_ARCH) - SDLC_CORE_HOST_MOUNT: "/mnt/vss/_work/1/s" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: node build/azure-pipelines/distro/mixin-npm - displayName: Mixin distro node modules - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive - - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - template: ../common/install-builtin-extensions.yml@self - - - script: | - set -e - TARGET=$([ "$(SDLC_CORE_ARCH)" == "x64" ] && echo "linux-alpine" || echo "alpine-arm64") # TODO@joaomoreno - npm run gulp sdlc-core-reh-$TARGET-min-ci - (cd .. && mv sdlc-core-reh-$TARGET sdlc-core-server-$TARGET) # TODO@joaomoreno - ARCHIVE_PATH=".build/linux/server/sdlc-core-server-$TARGET.tar.gz" - DIR_PATH="$(realpath ../sdlc-core-server-$TARGET)" - mkdir -p $(dirname $ARCHIVE_PATH) - tar --owner=0 --group=0 -czf $ARCHIVE_PATH -C .. sdlc-core-server-$TARGET - echo "##vso[task.setvariable variable=SERVER_DIR_PATH]$DIR_PATH" - echo "##vso[task.setvariable variable=SERVER_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server - - - script: | - set -e - TARGET=$([ "$(SDLC_CORE_ARCH)" == "x64" ] && echo "linux-alpine" || echo "alpine-arm64") - npm run gulp sdlc-core-reh-web-$TARGET-min-ci - (cd .. && mv sdlc-core-reh-web-$TARGET sdlc-core-server-$TARGET-web) # TODO@joaomoreno - ARCHIVE_PATH=".build/linux/web/sdlc-core-server-$TARGET-web.tar.gz" - DIR_PATH="$(realpath ../sdlc-core-server-$TARGET-web)" - mkdir -p $(dirname $ARCHIVE_PATH) - tar --owner=0 --group=0 -czf $ARCHIVE_PATH -C .. sdlc-core-server-$TARGET-web - echo "##vso[task.setvariable variable=WEB_DIR_PATH]$DIR_PATH" - echo "##vso[task.setvariable variable=WEB_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server (web) - - - script: echo "##vso[task.setvariable variable=ARTIFACT_PREFIX]attempt$(System.JobAttempt)_" - condition: and(succeededOrFailed(), notIn(variables['Agent.JobStatus'], 'Succeeded', 'SucceededWithIssues')) - displayName: Generate artifact prefix - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SERVER_PATH) - artifactName: $(ARTIFACT_PREFIX)sdlc_core_server_alpine_$(SDLC_CORE_ARCH)_archive-unsigned - sbomBuildDropPath: $(SERVER_DIR_PATH) - sbomPackageName: "SDLC_core Alpine $(SDLC_CORE_ARCH) Server" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish server archive - condition: and(succeededOrFailed(), ne(variables['SERVER_PATH'], ''), ne(variables['SDLC_CORE_ARCH'], 'x64')) - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(WEB_PATH) - artifactName: $(ARTIFACT_PREFIX)sdlc_core_web_alpine_$(SDLC_CORE_ARCH)_archive-unsigned - sbomBuildDropPath: $(WEB_DIR_PATH) - sbomPackageName: "SDLC_core Alpine $(SDLC_CORE_ARCH) Web" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish web server archive - condition: and(succeededOrFailed(), ne(variables['WEB_PATH'], ''), ne(variables['SDLC_CORE_ARCH'], 'x64')) - - # same as above, keep legacy name - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SERVER_PATH) - artifactName: $(ARTIFACT_PREFIX)sdlc_core_server_linux_alpine_archive-unsigned - sbomEnabled: false - displayName: Publish x64 server archive - condition: and(succeededOrFailed(), ne(variables['SERVER_PATH'], ''), eq(variables['SDLC_CORE_ARCH'], 'x64')) - - # same as above, keep legacy name - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(WEB_PATH) - artifactName: $(ARTIFACT_PREFIX)sdlc_core_web_linux_alpine_archive-unsigned - sbomEnabled: false - displayName: Publish x64 web server archive - condition: and(succeededOrFailed(), ne(variables['WEB_PATH'], ''), eq(variables['SDLC_CORE_ARCH'], 'x64')) diff --git a/build/azure-pipelines/cli/cli-apply-patches.yml b/build/azure-pipelines/cli/cli-apply-patches.yml deleted file mode 100644 index 2815124e..00000000 --- a/build/azure-pipelines/cli/cli-apply-patches.yml +++ /dev/null @@ -1,8 +0,0 @@ -steps: - - template: ../distro/download-distro.yml@self - - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - script: node .build/distro/cli-patches/index.js - displayName: Apply distro patches diff --git a/build/azure-pipelines/cli/cli-compile.yml b/build/azure-pipelines/cli/cli-compile.yml deleted file mode 100644 index 5ce69d88..00000000 --- a/build/azure-pipelines/cli/cli-compile.yml +++ /dev/null @@ -1,171 +0,0 @@ -parameters: - - name: SDLC_CORE_QUALITY - type: string - - name: SDLC_CORE_CLI_TARGET - type: string - - name: SDLC_CORE_CLI_ARTIFACT - type: string - - name: SDLC_CORE_CLI_ENV - type: object - default: {} - - name: SDLC_CORE_CHECK_ONLY - type: boolean - default: false - -steps: - - ${{ if contains(parameters.VSCODE_CLI_TARGET, '-windows-') }}: - - ${{ if eq(parameters.SDLC_CORE_QUALITY, 'oss') }}: - - pwsh: Write-Host "##vso[task.setvariable variable=SDLC_CORE_CLI_PRODUCT_JSON]$(Build.SourcesDirectory)/product.json" - displayName: Set product.json path - - ${{ else }}: - - pwsh: Write-Host "##vso[task.setvariable variable=SDLC_CORE_CLI_PRODUCT_JSON]$(Build.SourcesDirectory)/.build/distro/mixin/${{ parameters.SDLC_CORE_QUALITY }}/product.json" - displayName: Set product.json path - - ${{ else }}: - - ${{ if eq(parameters.SDLC_CORE_QUALITY, 'oss') }}: - - script: echo "##vso[task.setvariable variable=SDLC_CORE_CLI_PRODUCT_JSON]$(Build.SourcesDirectory)/product.json" - displayName: Set product.json path - - ${{ else }}: - - script: echo "##vso[task.setvariable variable=SDLC_CORE_CLI_PRODUCT_JSON]$(Build.SourcesDirectory)/.build/distro/mixin/${{ parameters.SDLC_CORE_QUALITY }}/product.json" - displayName: Set product.json path - - - ${{ if parameters.SDLC_CORE_CHECK_ONLY }}: - - script: cargo clippy --target ${{ parameters.SDLC_CORE_CLI_TARGET }} --bin=code - displayName: Lint ${{ parameters.SDLC_CORE_CLI_TARGET }} - workingDirectory: $(Build.SourcesDirectory)/cli - env: - CARGO_NET_GIT_FETCH_WITH_CLI: true - ${{ each pair in parameters.VSCODE_CLI_ENV }}: - ${{ pair.key }}: ${{ pair.value }} - - - ${{ else }}: - - ${{ if contains(parameters.VSCODE_CLI_TARGET, '-linux-') }}: - - script: | - set -e - if [ -n "$SYSROOT_ARCH" ]; then - export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots - node -e '(async () => { const { getVSCodeSysroot } = require("../build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"], process.env["IS_MUSL"] === "1"); })()' - if [ "$SYSROOT_ARCH" == "arm64" ]; then - if [ -n "$IS_MUSL" ]; then - export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER="$VSCODE_SYSROOT_DIR/output/bin/aarch64-linux-musl-gcc" - export CC_aarch64_unknown_linux_musl="$VSCODE_SYSROOT_DIR/output/bin/aarch64-linux-musl-gcc" - export CXX_aarch64_unknown_linux_musl="$VSCODE_SYSROOT_DIR/output/bin/aarch64-linux-musl-g++" - else - export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER="$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/bin/aarch64-linux-gnu-gcc" - export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=--sysroot=$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot" - export CC_aarch64_unknown_linux_gnu="$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/bin/aarch64-linux-gnu-gcc --sysroot=$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot" - export PKG_CONFIG_LIBDIR_aarch64_unknown_linux_gnu="$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot/usr/lib/aarch64-linux-gnu/pkgconfig:$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot/usr/share/pkgconfig" - export PKG_CONFIG_SYSROOT_DIR_aarch64_unknown_linux_gnu="$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot" - export OBJDUMP="$VSCODE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/bin/objdump" - fi - elif [ "$SYSROOT_ARCH" == "amd64" ]; then - export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER="$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/bin/x86_64-linux-gnu-gcc" - export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUSTFLAGS="-C link-arg=--sysroot=$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot -C link-arg=-L$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/usr/lib/x86_64-linux-gnu" - export CC_x86_64_unknown_linux_gnu="$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/bin/x86_64-linux-gnu-gcc --sysroot=$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot" - export PKG_CONFIG_LIBDIR_x86_64_unknown_linux_gnu="$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/usr/lib/x86_64-linux-gnu/pkgconfig:$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/usr/share/pkgconfig" - export PKG_CONFIG_SYSROOT_DIR_x86_64_unknown_linux_gnu="$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot" - export OBJDUMP="$VSCODE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/bin/objdump" - elif [ "$SYSROOT_ARCH" == "armhf" ]; then - export CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER="$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/bin/arm-rpi-linux-gnueabihf-gcc" - export CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUSTFLAGS="-C link-arg=--sysroot=$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot" - export CC_armv7_unknown_linux_gnueabihf="$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/bin/arm-rpi-linux-gnueabihf-gcc --sysroot=$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot" - export PKG_CONFIG_LIBDIR_armv7_unknown_linux_gnueabihf="$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot/usr/lib/arm-rpi-linux-gnueabihf/pkgconfig:$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot/usr/share/pkgconfig" - export PKG_CONFIG_SYSROOT_DIR_armv7_unknown_linux_gnueabihf="$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot" - export OBJDUMP="$VSCODE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/bin/objdump" - fi - fi - - cargo build --release --target ${{ parameters.VSCODE_CLI_TARGET }} --bin=code - - # verify glibc requirement - if [ -n "$SYSROOT_ARCH" ] && [ -n "$OBJDUMP" ]; then - glibc_version="2.28" - while IFS= read -r line; do - if [[ $line == *"GLIBC_"* ]]; then - version=$(echo "$line" | awk '{print $5}' | tr -d '()') - version=${version#*_} - if [[ $(printf "%s\n%s" "$version" "$glibc_version" | sort -V | tail -n1) == "$version" ]]; then - glibc_version=$version - fi - fi - done < <("$OBJDUMP" -T "$PWD/target/${{ parameters.VSCODE_CLI_TARGET }}/release/code") - if [[ "$glibc_version" != "2.28" ]]; then - echo "Error: binary has dependency on GLIBC > 2.28, found $glibc_version" - exit 1 - else - echo "Maximum GLIBC version is $glibc_version as expected." - fi - fi - displayName: Compile ${{ parameters.VSCODE_CLI_TARGET }} - workingDirectory: $(Build.SourcesDirectory)/cli - env: - CARGO_NET_GIT_FETCH_WITH_CLI: true - VSCODE_CLI_COMMIT: $(Build.SourceVersion) - GITHUB_TOKEN: "$(github-distro-mixin-password)" - ${{ each pair in parameters.VSCODE_CLI_ENV }}: - ${{ pair.key }}: ${{ pair.value }} - - - ${{ else }}: - - script: cargo build --release --target ${{ parameters.VSCODE_CLI_TARGET }} --bin=code - displayName: Compile ${{ parameters.VSCODE_CLI_TARGET }} - workingDirectory: $(Build.SourcesDirectory)/cli - env: - CARGO_NET_GIT_FETCH_WITH_CLI: true - VSCODE_CLI_COMMIT: $(Build.SourceVersion) - ${{ each pair in parameters.VSCODE_CLI_ENV }}: - ${{ pair.key }}: ${{ pair.value }} - - - ${{ if contains(parameters.VSCODE_CLI_TARGET, '-windows-') }}: - - task: PublishSymbols@2 - inputs: - IndexSources: false - SymbolsFolder: $(Build.SourcesDirectory)/cli/target/${{ parameters.VSCODE_CLI_TARGET }}/release - SearchPattern: 'code.pdb' - SymbolServerType: TeamServices - SymbolsProduct: 'code' - ArtifactServices.Symbol.AccountName: microsoft - ArtifactServices.Symbol.PAT: $(System.AccessToken) - ArtifactServices.Symbol.UseAAD: false - displayName: Publish Symbols - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $AppProductJson = Get-Content -Raw -Path "$env:VSCODE_CLI_PRODUCT_JSON" | ConvertFrom-Json - $env:VSCODE_CLI_APPLICATION_NAME = $AppProductJson.applicationName - - Write-Host "##vso[task.setvariable variable=VSCODE_CLI_APPLICATION_NAME]$env:VSCODE_CLI_APPLICATION_NAME" - - New-Item -ItemType Directory -Force -Path "$(Build.ArtifactStagingDirectory)/cli" - Move-Item -Path $(Build.SourcesDirectory)/cli/target/${{ parameters.VSCODE_CLI_TARGET }}/release/code.exe -Destination "$(Build.ArtifactStagingDirectory)/cli/${env:VSCODE_CLI_APPLICATION_NAME}.exe" - displayName: Stage CLI - - - task: ArchiveFiles@2 - displayName: Archive CLI - inputs: - rootFolderOrFile: $(Build.ArtifactStagingDirectory)/cli/$(VSCODE_CLI_APPLICATION_NAME).exe - includeRootFolder: false - archiveType: zip - archiveFile: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.zip - - - ${{ else }}: - - script: | - set -e - VSCODE_CLI_APPLICATION_NAME=$(node -p "require(\"$VSCODE_CLI_PRODUCT_JSON\").applicationName") - echo "##vso[task.setvariable variable=VSCODE_CLI_APPLICATION_NAME]$VSCODE_CLI_APPLICATION_NAME" - - mkdir -p $(Build.ArtifactStagingDirectory)/cli - mv $(Build.SourcesDirectory)/cli/target/${{ parameters.VSCODE_CLI_TARGET }}/release/code $(Build.ArtifactStagingDirectory)/cli/$VSCODE_CLI_APPLICATION_NAME - displayName: Stage CLI - - - task: ArchiveFiles@2 - displayName: Archive CLI - inputs: - rootFolderOrFile: $(Build.ArtifactStagingDirectory)/cli/$(VSCODE_CLI_APPLICATION_NAME) - includeRootFolder: false - ${{ if contains(parameters.VSCODE_CLI_TARGET, '-darwin') }}: - archiveType: zip - archiveFile: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.zip - ${{ else }}: - archiveType: tar - tarCompression: gz - archiveFile: $(Build.ArtifactStagingDirectory)/${{ parameters.VSCODE_CLI_ARTIFACT }}.tar.gz diff --git a/build/azure-pipelines/cli/cli-darwin-sign.yml b/build/azure-pipelines/cli/cli-darwin-sign.yml deleted file mode 100644 index d702b82f..00000000 --- a/build/azure-pipelines/cli/cli-darwin-sign.yml +++ /dev/null @@ -1,61 +0,0 @@ -parameters: - - name: VSCODE_CLI_ARTIFACTS - type: object - default: [] - -steps: - - task: UseDotNet@2 - inputs: - version: 6.x - - - task: EsrpCodeSigning@5 - inputs: - UseMSIAuthentication: true - ConnectedServiceName: vscode-esrp - AppRegistrationClientId: $(ESRP_CLIENT_ID) - AppRegistrationTenantId: $(ESRP_TENANT_ID) - AuthAKVName: vscode-esrp - AuthSignCertName: esrp-sign - FolderPath: . - Pattern: noop - displayName: 'Install ESRP Tooling' - - - ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}: - - task: DownloadPipelineArtifact@2 - displayName: Download ${{ target }} - inputs: - artifact: ${{ target }} - path: $(Build.ArtifactStagingDirectory)/pkg/${{ target }} - - - task: ExtractFiles@1 - displayName: Extract artifact - inputs: - archiveFilePatterns: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/*.zip - destinationFolder: $(Build.ArtifactStagingDirectory)/sign/${{ target }} - - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Codesign - - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Build.ArtifactStagingDirectory)/pkg "*.zip" - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Notarize - - - ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}: - - script: | - set -e - ASSET_ID=$(echo "${{ target }}" | sed "s/unsigned_//") - mv $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/${{ target }}.zip $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/$ASSET_ID.zip - echo "##vso[task.setvariable variable=ASSET_ID]$ASSET_ID" - displayName: Set asset id variable - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/$(ASSET_ID).zip - artifactName: $(ASSET_ID) - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/sign/${{ target }} - sbomPackageName: "VS Code macOS ${{ target }} CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish signed artifact with ID $(ASSET_ID) diff --git a/build/azure-pipelines/cli/cli-win32-sign.yml b/build/azure-pipelines/cli/cli-win32-sign.yml deleted file mode 100644 index eb85e9e2..00000000 --- a/build/azure-pipelines/cli/cli-win32-sign.yml +++ /dev/null @@ -1,70 +0,0 @@ -parameters: - - name: VSCODE_CLI_ARTIFACTS - type: object - default: [] - -steps: - - task: UseDotNet@2 - inputs: - version: 6.x - - - task: EsrpCodeSigning@5 - inputs: - UseMSIAuthentication: true - ConnectedServiceName: vscode-esrp - AppRegistrationClientId: $(ESRP_CLIENT_ID) - AppRegistrationTenantId: $(ESRP_TENANT_ID) - AuthAKVName: vscode-esrp - AuthSignCertName: esrp-sign - FolderPath: . - Pattern: noop - displayName: 'Install ESRP Tooling' - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $EsrpCodeSigningTool = (gci -directory -filter EsrpCodeSigning_* $(Agent.RootDirectory)\_tasks | Select-Object -last 1).FullName - $Version = (gci -directory $EsrpCodeSigningTool | Select-Object -last 1).FullName - echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version\net6.0\esrpcli.dll" - displayName: Find ESRP CLI - - - ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}: - - task: DownloadPipelineArtifact@2 - displayName: Download artifact - inputs: - artifact: ${{ target }} - path: $(Build.ArtifactStagingDirectory)/pkg/${{ target }} - - - task: ExtractFiles@1 - displayName: Extract artifact - inputs: - archiveFilePatterns: $(Build.ArtifactStagingDirectory)/pkg/${{ target }}/*.zip - destinationFolder: $(Build.ArtifactStagingDirectory)/sign/${{ target }} - - - powershell: node build\azure-pipelines\common\sign $env:EsrpCliDllPath sign-windows $(Build.ArtifactStagingDirectory)/sign "*.exe" - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Codesign - - - ${{ each target in parameters.VSCODE_CLI_ARTIFACTS }}: - - powershell: | - $ASSET_ID = "${{ target }}".replace("unsigned_", ""); - echo "##vso[task.setvariable variable=ASSET_ID]$ASSET_ID" - displayName: Set asset id variable - - - task: ArchiveFiles@2 - displayName: Archive signed files - inputs: - rootFolderOrFile: $(Build.ArtifactStagingDirectory)/sign/${{ target }} - includeRootFolder: false - archiveType: zip - archiveFile: $(Build.ArtifactStagingDirectory)/$(ASSET_ID).zip - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/$(ASSET_ID).zip - artifactName: $(ASSET_ID) - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/sign/${{ target }} - sbomPackageName: "VS Code Windows ${{ target }} CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish signed artifact with ID $(ASSET_ID) diff --git a/build/azure-pipelines/cli/install-rust-posix.yml b/build/azure-pipelines/cli/install-rust-posix.yml deleted file mode 100644 index 0607cde3..00000000 --- a/build/azure-pipelines/cli/install-rust-posix.yml +++ /dev/null @@ -1,51 +0,0 @@ -parameters: - - name: channel - type: string - default: 1.85 - - name: targets - default: [] - type: object - -# Todo: use 1ES pipeline once extension is installed in ADO - -steps: - - task: RustInstaller@1 - inputs: - rustVersion: ms-${{ parameters.channel }} - cratesIoFeedOverride: $(CARGO_REGISTRY) - additionalTargets: ${{ join(' ', parameters.targets) }} - toolchainFeed: https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/nuget/v3/index.json - default: true - addToPath: true - displayName: Install MSFT Rust - condition: and(succeeded(), ne(variables['CARGO_REGISTRY'], 'none')) - - - script: | - set -e - curl https://sh.rustup.rs -sSf | sh -s -- -y --profile minimal --default-toolchain $RUSTUP_TOOLCHAIN - echo "##vso[task.setvariable variable=PATH;]$PATH:$HOME/.cargo/bin" - env: - RUSTUP_TOOLCHAIN: ${{ parameters.channel }} - displayName: Install OSS Rust - condition: and(succeeded(), eq(variables['CARGO_REGISTRY'], 'none')) - - - script: | - set -e - rustup default $RUSTUP_TOOLCHAIN - rustup update $RUSTUP_TOOLCHAIN - rustup component add clippy - env: - RUSTUP_TOOLCHAIN: ${{ parameters.channel }} - displayName: "Set Rust version" - condition: and(succeeded(), eq(variables['CARGO_REGISTRY'], 'none')) - - - ${{ each target in parameters.targets }}: - - script: rustup target add ${{ target }} - displayName: "Adding Rust target '${{ target }}'" - condition: and(succeeded(), eq(variables['CARGO_REGISTRY'], 'none')) - - - script: | - set -e - rustc --version - cargo --version - displayName: "Check Rust versions" diff --git a/build/azure-pipelines/cli/install-rust-win32.yml b/build/azure-pipelines/cli/install-rust-win32.yml deleted file mode 100644 index bff114fc..00000000 --- a/build/azure-pipelines/cli/install-rust-win32.yml +++ /dev/null @@ -1,51 +0,0 @@ -parameters: - - name: channel - type: string - default: 1.85 - - name: targets - default: [] - type: object - -# Todo: use 1ES pipeline once extension is installed in ADO - -steps: - - task: RustInstaller@1 - inputs: - rustVersion: ms-${{ parameters.channel }} - cratesIoFeedOverride: $(CARGO_REGISTRY) - additionalTargets: ${{ join(' ', parameters.targets) }} - toolchainFeed: https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/nuget/v3/index.json - default: true - addToPath: true - displayName: Install MSFT Rust - condition: and(succeeded(), ne(variables['CARGO_REGISTRY'], 'none')) - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - Invoke-WebRequest -Uri "https://win.rustup.rs" -Outfile $(Build.ArtifactStagingDirectory)/rustup-init.exe - exec { $(Build.ArtifactStagingDirectory)/rustup-init.exe -y --profile minimal --default-toolchain $env:RUSTUP_TOOLCHAIN --default-host x86_64-pc-windows-msvc } - echo "##vso[task.prependpath]$env:USERPROFILE\.cargo\bin" - env: - RUSTUP_TOOLCHAIN: ${{ parameters.channel }} - displayName: Install OSS Rust - condition: and(succeeded(), eq(variables['CARGO_REGISTRY'], 'none')) - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - exec { rustup default $RUSTUP_TOOLCHAIN } - exec { rustup update $RUSTUP_TOOLCHAIN } - env: - RUSTUP_TOOLCHAIN: ${{ parameters.channel }} - displayName: "Set Rust version" - condition: and(succeeded(), eq(variables['CARGO_REGISTRY'], 'none')) - - - ${{ each target in parameters.targets }}: - - script: rustup target add ${{ target }} - displayName: "Adding Rust target '${{ target }}'" - condition: and(succeeded(), eq(variables['CARGO_REGISTRY'], 'none')) - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - exec { rustc --version } - exec { cargo --version } - displayName: "Check Rust versions" diff --git a/build/azure-pipelines/cli/test.yml b/build/azure-pipelines/cli/test.yml deleted file mode 100644 index 6e2a1c68..00000000 --- a/build/azure-pipelines/cli/test.yml +++ /dev/null @@ -1,10 +0,0 @@ -steps: - - template: ./install-rust-posix.yml@self - - - script: cargo clippy -- -D warnings - workingDirectory: cli - displayName: Clippy lint - - - script: cargo test - workingDirectory: cli - displayName: ๐Ÿงช Run unit tests diff --git a/build/azure-pipelines/common/checkForArtifact.js b/build/azure-pipelines/common/checkForArtifact.js deleted file mode 100644 index 899448f7..00000000 --- a/build/azure-pipelines/common/checkForArtifact.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const publish_1 = require("./publish"); -const retry_1 = require("./retry"); -async function getPipelineArtifacts() { - const result = await (0, publish_1.requestAZDOAPI)('artifacts'); - return result.value.filter(a => !/sbom$/.test(a.name)); -} -async function main([variableName, artifactName]) { - if (!variableName || !artifactName) { - throw new Error(`Usage: node checkForArtifact.js `); - } - try { - const artifacts = await (0, retry_1.retry)(() => getPipelineArtifacts()); - const artifact = artifacts.find(a => a.name === artifactName); - console.log(`##vso[task.setvariable variable=${variableName}]${artifact ? 'true' : 'false'}`); - } - catch (err) { - console.error(`ERROR: Failed to get pipeline artifacts: ${err}`); - console.log(`##vso[task.setvariable variable=${variableName}]false`); - } -} -main(process.argv.slice(2)) - .then(() => { - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=checkForArtifact.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/checkForArtifact.ts b/build/azure-pipelines/common/checkForArtifact.ts deleted file mode 100644 index e0a1a2ce..00000000 --- a/build/azure-pipelines/common/checkForArtifact.ts +++ /dev/null @@ -1,35 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { Artifact, requestAZDOAPI } from './publish'; -import { retry } from './retry'; - -async function getPipelineArtifacts(): Promise { - const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); - return result.value.filter(a => !/sbom$/.test(a.name)); -} - -async function main([variableName, artifactName]: string[]): Promise { - if (!variableName || !artifactName) { - throw new Error(`Usage: node checkForArtifact.js `); - } - - try { - const artifacts = await retry(() => getPipelineArtifacts()); - const artifact = artifacts.find(a => a.name === artifactName); - console.log(`##vso[task.setvariable variable=${variableName}]${artifact ? 'true' : 'false'}`); - } catch (err) { - console.error(`ERROR: Failed to get pipeline artifacts: ${err}`); - console.log(`##vso[task.setvariable variable=${variableName}]false`); - } -} - -main(process.argv.slice(2)) - .then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); diff --git a/build/azure-pipelines/common/codesign.js b/build/azure-pipelines/common/codesign.js deleted file mode 100644 index e3a8f330..00000000 --- a/build/azure-pipelines/common/codesign.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.printBanner = printBanner; -exports.streamProcessOutputAndCheckResult = streamProcessOutputAndCheckResult; -exports.spawnCodesignProcess = spawnCodesignProcess; -const zx_1 = require("zx"); -function printBanner(title) { - title = `${title} (${new Date().toISOString()})`; - console.log('\n'); - console.log('#'.repeat(75)); - console.log(`# ${title.padEnd(71)} #`); - console.log('#'.repeat(75)); - console.log('\n'); -} -async function streamProcessOutputAndCheckResult(name, promise) { - const result = await promise.pipe(process.stdout); - if (result.ok) { - console.log(`\n${name} completed successfully. Duration: ${result.duration} ms`); - return; - } - throw new Error(`${name} failed: ${result.stderr}`); -} -function spawnCodesignProcess(esrpCliDLLPath, type, folder, glob) { - return (0, zx_1.$) `node build/azure-pipelines/common/sign ${esrpCliDLLPath} ${type} ${folder} ${glob}`; -} -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/codesign.ts b/build/azure-pipelines/common/codesign.ts deleted file mode 100644 index 9f26b392..00000000 --- a/build/azure-pipelines/common/codesign.ts +++ /dev/null @@ -1,30 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { $, ProcessPromise } from 'zx'; - -export function printBanner(title: string) { - title = `${title} (${new Date().toISOString()})`; - - console.log('\n'); - console.log('#'.repeat(75)); - console.log(`# ${title.padEnd(71)} #`); - console.log('#'.repeat(75)); - console.log('\n'); -} - -export async function streamProcessOutputAndCheckResult(name: string, promise: ProcessPromise): Promise { - const result = await promise.pipe(process.stdout); - if (result.ok) { - console.log(`\n${name} completed successfully. Duration: ${result.duration} ms`); - return; - } - - throw new Error(`${name} failed: ${result.stderr}`); -} - -export function spawnCodesignProcess(esrpCliDLLPath: string, type: 'sign-windows' | 'sign-windows-appx' | 'sign-pgp' | 'sign-darwin' | 'notarize-darwin', folder: string, glob: string): ProcessPromise { - return $`node build/azure-pipelines/common/sign ${esrpCliDLLPath} ${type} ${folder} ${glob}`; -} diff --git a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js deleted file mode 100644 index 10fa9087..00000000 --- a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const crypto_1 = __importDefault(require("crypto")); -const productjson = JSON.parse(fs_1.default.readFileSync(path_1.default.join(__dirname, '../../../product.json'), 'utf8')); -const shasum = crypto_1.default.createHash('sha256'); -for (const ext of productjson.builtInExtensions) { - shasum.update(`${ext.name}@${ext.version}`); -} -process.stdout.write(shasum.digest('hex')); -//# sourceMappingURL=computeBuiltInDepsCacheKey.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts b/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts deleted file mode 100644 index 8abaaccb..00000000 --- a/build/azure-pipelines/common/computeBuiltInDepsCacheKey.ts +++ /dev/null @@ -1,17 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import path from 'path'; -import crypto from 'crypto'; - -const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../../product.json'), 'utf8')); -const shasum = crypto.createHash('sha256'); - -for (const ext of productjson.builtInExtensions) { - shasum.update(`${ext.name}@${ext.version}`); -} - -process.stdout.write(shasum.digest('hex')); diff --git a/build/azure-pipelines/common/computeNodeModulesCacheKey.js b/build/azure-pipelines/common/computeNodeModulesCacheKey.js deleted file mode 100644 index c09c13be..00000000 --- a/build/azure-pipelines/common/computeNodeModulesCacheKey.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const crypto_1 = __importDefault(require("crypto")); -const { dirs } = require('../../npm/dirs'); -const ROOT = path_1.default.join(__dirname, '../../../'); -const shasum = crypto_1.default.createHash('sha256'); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build/.cachesalt'))); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, '.npmrc'))); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'build', '.npmrc'))); -shasum.update(fs_1.default.readFileSync(path_1.default.join(ROOT, 'remote', '.npmrc'))); -// Add `package.json` and `package-lock.json` files -for (const dir of dirs) { - const packageJsonPath = path_1.default.join(ROOT, dir, 'package.json'); - const packageJson = JSON.parse(fs_1.default.readFileSync(packageJsonPath).toString()); - const relevantPackageJsonSections = { - dependencies: packageJson.dependencies, - devDependencies: packageJson.devDependencies, - optionalDependencies: packageJson.optionalDependencies, - resolutions: packageJson.resolutions, - distro: packageJson.distro - }; - shasum.update(JSON.stringify(relevantPackageJsonSections)); - const packageLockPath = path_1.default.join(ROOT, dir, 'package-lock.json'); - shasum.update(fs_1.default.readFileSync(packageLockPath)); -} -// Add any other command line arguments -for (let i = 2; i < process.argv.length; i++) { - shasum.update(process.argv[i]); -} -process.stdout.write(shasum.digest('hex')); -//# sourceMappingURL=computeNodeModulesCacheKey.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/computeNodeModulesCacheKey.ts b/build/azure-pipelines/common/computeNodeModulesCacheKey.ts deleted file mode 100644 index 57b35dc7..00000000 --- a/build/azure-pipelines/common/computeNodeModulesCacheKey.ts +++ /dev/null @@ -1,42 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import path from 'path'; -import crypto from 'crypto'; -const { dirs } = require('../../npm/dirs'); - -const ROOT = path.join(__dirname, '../../../'); - -const shasum = crypto.createHash('sha256'); - -shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt'))); -shasum.update(fs.readFileSync(path.join(ROOT, '.npmrc'))); -shasum.update(fs.readFileSync(path.join(ROOT, 'build', '.npmrc'))); -shasum.update(fs.readFileSync(path.join(ROOT, 'remote', '.npmrc'))); - -// Add `package.json` and `package-lock.json` files -for (const dir of dirs) { - const packageJsonPath = path.join(ROOT, dir, 'package.json'); - const packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString()); - const relevantPackageJsonSections = { - dependencies: packageJson.dependencies, - devDependencies: packageJson.devDependencies, - optionalDependencies: packageJson.optionalDependencies, - resolutions: packageJson.resolutions, - distro: packageJson.distro - }; - shasum.update(JSON.stringify(relevantPackageJsonSections)); - - const packageLockPath = path.join(ROOT, dir, 'package-lock.json'); - shasum.update(fs.readFileSync(packageLockPath)); -} - -// Add any other command line arguments -for (let i = 2; i < process.argv.length; i++) { - shasum.update(process.argv[i]); -} - -process.stdout.write(shasum.digest('hex')); diff --git a/build/azure-pipelines/common/createBuild.js b/build/azure-pipelines/common/createBuild.js deleted file mode 100644 index c605ed62..00000000 --- a/build/azure-pipelines/common/createBuild.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const identity_1 = require("@azure/identity"); -const cosmos_1 = require("@azure/cosmos"); -const retry_1 = require("./retry"); -if (process.argv.length !== 3) { - console.error('Usage: node createBuild.js VERSION'); - process.exit(-1); -} -function getEnv(name) { - const result = process.env[name]; - if (typeof result === 'undefined') { - throw new Error('Missing env: ' + name); - } - return result; -} -async function main() { - const [, , _version] = process.argv; - const quality = getEnv('VSCODE_QUALITY'); - const commit = getEnv('BUILD_SOURCEVERSION'); - const queuedBy = getEnv('BUILD_QUEUEDBY'); - const sourceBranch = getEnv('BUILD_SOURCEBRANCH'); - const version = _version + (quality === 'stable' ? '' : `-${quality}`); - console.log('Creating build...'); - console.log('Quality:', quality); - console.log('Version:', version); - console.log('Commit:', commit); - const build = { - id: commit, - timestamp: (new Date()).getTime(), - version, - isReleased: false, - private: process.env['VSCODE_PRIVATE_BUILD']?.toLowerCase() === 'true', - sourceBranch, - queuedBy, - assets: [], - updates: {} - }; - const aadCredentials = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); - const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials }); - const scripts = client.database('builds').container(quality).scripts; - await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }])); -} -main().then(() => { - console.log('Build successfully created'); - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=createBuild.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/createBuild.ts b/build/azure-pipelines/common/createBuild.ts deleted file mode 100644 index 6afeb01e..00000000 --- a/build/azure-pipelines/common/createBuild.ts +++ /dev/null @@ -1,62 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { ClientAssertionCredential } from '@azure/identity'; -import { CosmosClient } from '@azure/cosmos'; -import { retry } from './retry'; - -if (process.argv.length !== 3) { - console.error('Usage: node createBuild.js VERSION'); - process.exit(-1); -} - -function getEnv(name: string): string { - const result = process.env[name]; - - if (typeof result === 'undefined') { - throw new Error('Missing env: ' + name); - } - - return result; -} - -async function main(): Promise { - const [, , _version] = process.argv; - const quality = getEnv('VSCODE_QUALITY'); - const commit = getEnv('BUILD_SOURCEVERSION'); - const queuedBy = getEnv('BUILD_QUEUEDBY'); - const sourceBranch = getEnv('BUILD_SOURCEBRANCH'); - const version = _version + (quality === 'stable' ? '' : `-${quality}`); - - console.log('Creating build...'); - console.log('Quality:', quality); - console.log('Version:', version); - console.log('Commit:', commit); - - const build = { - id: commit, - timestamp: (new Date()).getTime(), - version, - isReleased: false, - private: process.env['VSCODE_PRIVATE_BUILD']?.toLowerCase() === 'true', - sourceBranch, - queuedBy, - assets: [], - updates: {} - }; - - const aadCredentials = new ClientAssertionCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, () => Promise.resolve(process.env['AZURE_ID_TOKEN']!)); - const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, aadCredentials }); - const scripts = client.database('builds').container(quality).scripts; - await retry(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }])); -} - -main().then(() => { - console.log('Build successfully created'); - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); diff --git a/build/azure-pipelines/common/extract-telemetry.sh b/build/azure-pipelines/common/extract-telemetry.sh deleted file mode 100755 index 9cebe22b..00000000 --- a/build/azure-pipelines/common/extract-telemetry.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash -set -e - -cd $BUILD_STAGINGDIRECTORY -mkdir extraction -cd extraction -git clone --depth 1 https://github.com/microsoft/vscode-extension-telemetry.git -git clone --depth 1 https://github.com/microsoft/vscode-chrome-debug-core.git -git clone --depth 1 https://github.com/microsoft/vscode-node-debug2.git -git clone --depth 1 https://github.com/microsoft/vscode-node-debug.git -git clone --depth 1 https://github.com/microsoft/vscode-html-languageservice.git -git clone --depth 1 https://github.com/microsoft/vscode-json-languageservice.git -node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints -node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o . -mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry -mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json -mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json -cd .. -rm -rf extraction diff --git a/build/azure-pipelines/common/getPublishAuthTokens.js b/build/azure-pipelines/common/getPublishAuthTokens.js deleted file mode 100644 index 9c22e9ad..00000000 --- a/build/azure-pipelines/common/getPublishAuthTokens.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getAccessToken = getAccessToken; -const msal_node_1 = require("@azure/msal-node"); -function e(name) { - const result = process.env[name]; - if (typeof result !== 'string') { - throw new Error(`Missing env: ${name}`); - } - return result; -} -async function getAccessToken(endpoint, tenantId, clientId, idToken) { - const app = new msal_node_1.ConfidentialClientApplication({ - auth: { - clientId, - authority: `https://login.microsoftonline.com/${tenantId}`, - clientAssertion: idToken - } - }); - const result = await app.acquireTokenByClientCredential({ scopes: [`${endpoint}.default`] }); - if (!result) { - throw new Error('Failed to get access token'); - } - return { - token: result.accessToken, - expiresOnTimestamp: result.expiresOn.getTime(), - refreshAfterTimestamp: result.refreshOn?.getTime() - }; -} -async function main() { - const cosmosDBAccessToken = await getAccessToken(e('AZURE_DOCUMENTDB_ENDPOINT'), e('AZURE_TENANT_ID'), e('AZURE_CLIENT_ID'), e('AZURE_ID_TOKEN')); - const blobServiceAccessToken = await getAccessToken(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], process.env['AZURE_ID_TOKEN']); - console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken })); -} -if (require.main === module) { - main().then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=getPublishAuthTokens.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/getPublishAuthTokens.ts b/build/azure-pipelines/common/getPublishAuthTokens.ts deleted file mode 100644 index 68e76de1..00000000 --- a/build/azure-pipelines/common/getPublishAuthTokens.ts +++ /dev/null @@ -1,54 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { AccessToken } from '@azure/core-auth'; -import { ConfidentialClientApplication } from '@azure/msal-node'; - -function e(name: string): string { - const result = process.env[name]; - - if (typeof result !== 'string') { - throw new Error(`Missing env: ${name}`); - } - - return result; -} - -export async function getAccessToken(endpoint: string, tenantId: string, clientId: string, idToken: string): Promise { - const app = new ConfidentialClientApplication({ - auth: { - clientId, - authority: `https://login.microsoftonline.com/${tenantId}`, - clientAssertion: idToken - } - }); - - const result = await app.acquireTokenByClientCredential({ scopes: [`${endpoint}.default`] }); - - if (!result) { - throw new Error('Failed to get access token'); - } - - return { - token: result.accessToken, - expiresOnTimestamp: result.expiresOn!.getTime(), - refreshAfterTimestamp: result.refreshOn?.getTime() - }; -} - -async function main() { - const cosmosDBAccessToken = await getAccessToken(e('AZURE_DOCUMENTDB_ENDPOINT')!, e('AZURE_TENANT_ID')!, e('AZURE_CLIENT_ID')!, e('AZURE_ID_TOKEN')!); - const blobServiceAccessToken = await getAccessToken(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, process.env['AZURE_ID_TOKEN']!); - console.log(JSON.stringify({ cosmosDBAccessToken, blobServiceAccessToken })); -} - -if (require.main === module) { - main().then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); -} diff --git a/build/azure-pipelines/common/install-builtin-extensions.yml b/build/azure-pipelines/common/install-builtin-extensions.yml deleted file mode 100644 index c1ee18d0..00000000 --- a/build/azure-pipelines/common/install-builtin-extensions.yml +++ /dev/null @@ -1,24 +0,0 @@ -steps: - - pwsh: mkdir .build -ea 0 - condition: and(succeeded(), contains(variables['Agent.OS'], 'windows')) - displayName: Create .build folder - - - script: mkdir -p .build - condition: and(succeeded(), not(contains(variables['Agent.OS'], 'windows'))) - displayName: Create .build folder - - - script: node build/azure-pipelines/common/computeBuiltInDepsCacheKey.js > .build/builtindepshash - displayName: Prepare built-in extensions cache key - - - task: Cache@2 - inputs: - key: '"builtin-extensions" | .build/builtindepshash' - path: .build/builtInExtensions - cacheHitVar: BUILTIN_EXTENSIONS_RESTORED - displayName: Restore built-in extensions cache - - - script: node build/lib/builtInExtensions.js - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - condition: and(succeeded(), ne(variables.BUILTIN_EXTENSIONS_RESTORED, 'true')) - displayName: Download built-in extensions diff --git a/build/azure-pipelines/common/installPlaywright.js b/build/azure-pipelines/common/installPlaywright.js deleted file mode 100644 index 4f25e5b5..00000000 --- a/build/azure-pipelines/common/installPlaywright.js +++ /dev/null @@ -1,12 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -process.env.DEBUG = 'pw:install'; // enable logging for this (https://github.com/microsoft/playwright/issues/17394) -const { installDefaultBrowsersForNpmInstall } = require('playwright-core/lib/server'); -async function install() { - await installDefaultBrowsersForNpmInstall(); -} -install(); -//# sourceMappingURL=installPlaywright.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/listNodeModules.js b/build/azure-pipelines/common/listNodeModules.js deleted file mode 100644 index 301b5f93..00000000 --- a/build/azure-pipelines/common/listNodeModules.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -if (process.argv.length !== 3) { - console.error('Usage: node listNodeModules.js OUTPUT_FILE'); - process.exit(-1); -} -const ROOT = path_1.default.join(__dirname, '../../../'); -function findNodeModulesFiles(location, inNodeModules, result) { - const entries = fs_1.default.readdirSync(path_1.default.join(ROOT, location)); - for (const entry of entries) { - const entryPath = `${location}/${entry}`; - if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) { - continue; - } - let stat; - try { - stat = fs_1.default.statSync(path_1.default.join(ROOT, entryPath)); - } - catch (err) { - continue; - } - if (stat.isDirectory()) { - findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result); - } - else { - if (inNodeModules) { - result.push(entryPath.substr(1)); - } - } - } -} -const result = []; -findNodeModulesFiles('', false, result); -fs_1.default.writeFileSync(process.argv[2], result.join('\n') + '\n'); -//# sourceMappingURL=listNodeModules.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/listNodeModules.ts b/build/azure-pipelines/common/listNodeModules.ts deleted file mode 100644 index fb85b25c..00000000 --- a/build/azure-pipelines/common/listNodeModules.ts +++ /dev/null @@ -1,44 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import path from 'path'; - -if (process.argv.length !== 3) { - console.error('Usage: node listNodeModules.js OUTPUT_FILE'); - process.exit(-1); -} - -const ROOT = path.join(__dirname, '../../../'); - -function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) { - const entries = fs.readdirSync(path.join(ROOT, location)); - for (const entry of entries) { - const entryPath = `${location}/${entry}`; - - if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) { - continue; - } - - let stat: fs.Stats; - try { - stat = fs.statSync(path.join(ROOT, entryPath)); - } catch (err) { - continue; - } - - if (stat.isDirectory()) { - findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result); - } else { - if (inNodeModules) { - result.push(entryPath.substr(1)); - } - } - } -} - -const result: string[] = []; -findNodeModulesFiles('', false, result); -fs.writeFileSync(process.argv[2], result.join('\n') + '\n'); diff --git a/build/azure-pipelines/common/publish.js b/build/azure-pipelines/common/publish.js deleted file mode 100644 index d65a4348..00000000 --- a/build/azure-pipelines/common/publish.js +++ /dev/null @@ -1,722 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.e = e; -exports.requestAZDOAPI = requestAZDOAPI; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const stream_1 = require("stream"); -const promises_1 = require("node:stream/promises"); -const yauzl_1 = __importDefault(require("yauzl")); -const crypto_1 = __importDefault(require("crypto")); -const retry_1 = require("./retry"); -const cosmos_1 = require("@azure/cosmos"); -const child_process_1 = __importDefault(require("child_process")); -const os_1 = __importDefault(require("os")); -const node_worker_threads_1 = require("node:worker_threads"); -const msal_node_1 = require("@azure/msal-node"); -const storage_blob_1 = require("@azure/storage-blob"); -const jws_1 = __importDefault(require("jws")); -const node_timers_1 = require("node:timers"); -function e(name) { - const result = process.env[name]; - if (typeof result !== 'string') { - throw new Error(`Missing env: ${name}`); - } - return result; -} -function hashStream(hashName, stream) { - return new Promise((c, e) => { - const shasum = crypto_1.default.createHash(hashName); - stream - .on('data', shasum.update.bind(shasum)) - .on('error', e) - .on('close', () => c(shasum.digest())); - }); -} -var StatusCode; -(function (StatusCode) { - StatusCode["Pass"] = "pass"; - StatusCode["Aborted"] = "aborted"; - StatusCode["Inprogress"] = "inprogress"; - StatusCode["FailCanRetry"] = "failCanRetry"; - StatusCode["FailDoNotRetry"] = "failDoNotRetry"; - StatusCode["PendingAnalysis"] = "pendingAnalysis"; - StatusCode["Cancelled"] = "cancelled"; -})(StatusCode || (StatusCode = {})); -function getCertificateBuffer(input) { - return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64'); -} -function getThumbprint(input, algorithm) { - const buffer = getCertificateBuffer(input); - return crypto_1.default.createHash(algorithm).update(buffer).digest(); -} -function getKeyFromPFX(pfx) { - const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx'); - const pemKeyPath = path_1.default.join(os_1.default.tmpdir(), 'key.pem'); - try { - const pfxCertificate = Buffer.from(pfx, 'base64'); - fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate); - child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`); - const raw = fs_1.default.readFileSync(pemKeyPath, 'utf-8'); - const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)[0]; - return result; - } - finally { - fs_1.default.rmSync(pfxCertificatePath, { force: true }); - fs_1.default.rmSync(pemKeyPath, { force: true }); - } -} -function getCertificatesFromPFX(pfx) { - const pfxCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pfx'); - const pemCertificatePath = path_1.default.join(os_1.default.tmpdir(), 'cert.pem'); - try { - const pfxCertificate = Buffer.from(pfx, 'base64'); - fs_1.default.writeFileSync(pfxCertificatePath, pfxCertificate); - child_process_1.default.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`); - const raw = fs_1.default.readFileSync(pemCertificatePath, 'utf-8'); - const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g); - return matches ? matches.reverse() : []; - } - finally { - fs_1.default.rmSync(pfxCertificatePath, { force: true }); - fs_1.default.rmSync(pemCertificatePath, { force: true }); - } -} -class ESRPReleaseService { - log; - clientId; - accessToken; - requestSigningCertificates; - requestSigningKey; - containerClient; - stagingSasToken; - static async create(log, tenantId, clientId, authCertificatePfx, requestSigningCertificatePfx, containerClient, stagingSasToken) { - const authKey = getKeyFromPFX(authCertificatePfx); - const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0]; - const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx); - const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx); - const app = new msal_node_1.ConfidentialClientApplication({ - auth: { - clientId, - authority: `https://login.microsoftonline.com/${tenantId}`, - clientCertificate: { - thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'), - privateKey: authKey, - x5c: authCertificate - } - } - }); - const response = await app.acquireTokenByClientCredential({ - scopes: ['https://api.esrp.microsoft.com/.default'] - }); - return new ESRPReleaseService(log, clientId, response.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken); - } - static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/'; - constructor(log, clientId, accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken) { - this.log = log; - this.clientId = clientId; - this.accessToken = accessToken; - this.requestSigningCertificates = requestSigningCertificates; - this.requestSigningKey = requestSigningKey; - this.containerClient = containerClient; - this.stagingSasToken = stagingSasToken; - } - async createRelease(version, filePath, friendlyFileName) { - const correlationId = crypto_1.default.randomUUID(); - const blobClient = this.containerClient.getBlockBlobClient(correlationId); - this.log(`Uploading ${filePath} to ${blobClient.url}`); - await blobClient.uploadFile(filePath); - this.log('Uploaded blob successfully'); - try { - this.log(`Submitting release for ${version}: ${filePath}`); - const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient); - this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`); - // Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times - for (let i = 0; i < 720; i++) { - await new Promise(c => setTimeout(c, 5000)); - const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId); - if (releaseStatus.status === 'pass') { - break; - } - else if (releaseStatus.status === 'aborted') { - this.log(JSON.stringify(releaseStatus)); - throw new Error(`Release was aborted`); - } - else if (releaseStatus.status !== 'inprogress') { - this.log(JSON.stringify(releaseStatus)); - throw new Error(`Unknown error when polling for release`); - } - } - const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId); - if (releaseDetails.status !== 'pass') { - throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`); - } - this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails[0].downloadUrl); - return releaseDetails.files[0].fileDownloadDetails[0].downloadUrl; - } - finally { - this.log(`Deleting blob ${blobClient.url}`); - await blobClient.delete(); - this.log('Deleted blob successfully'); - } - } - async submitRelease(version, filePath, friendlyFileName, correlationId, blobClient) { - const size = fs_1.default.statSync(filePath).size; - const hash = await hashStream('sha256', fs_1.default.createReadStream(filePath)); - const blobUrl = `${blobClient.url}?${this.stagingSasToken}`; - const message = { - customerCorrelationId: correlationId, - esrpCorrelationId: correlationId, - driEmail: ['joao.moreno@microsoft.com'], - createdBy: { userPrincipalName: 'jomo@microsoft.com' }, - owners: [{ owner: { userPrincipalName: 'jomo@microsoft.com' } }], - approvers: [{ approver: { userPrincipalName: 'jomo@microsoft.com' }, isAutoApproved: true, isMandatory: false }], - releaseInfo: { - title: 'VS Code', - properties: { - 'ReleaseContentType': 'InstallPackage' - }, - minimumNumberOfApprovers: 1 - }, - productInfo: { - name: 'VS Code', - version, - description: 'VS Code' - }, - accessPermissionsInfo: { - mainPublisher: 'VSCode', - channelDownloadEntityDetails: { - AllDownloadEntities: ['VSCode'] - } - }, - routingInfo: { - intent: 'filedownloadlinkgeneration' - }, - files: [{ - name: path_1.default.basename(filePath), - friendlyFileName, - tenantFileLocation: blobUrl, - tenantFileLocationType: 'AzureBlob', - sourceLocation: { - type: 'azureBlob', - blobUrl - }, - hashType: 'sha256', - hash: Array.from(hash), - sizeInBytes: size - }] - }; - message.jwsToken = await this.generateJwsToken(message); - const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${this.accessToken}` - }, - body: JSON.stringify(message) - }); - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to submit release: ${res.statusText}\n${text}`); - } - return await res.json(); - } - async getReleaseStatus(releaseId) { - const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`; - const res = await (0, retry_1.retry)(() => fetch(url, { - headers: { - 'Authorization': `Bearer ${this.accessToken}` - } - })); - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to get release status: ${res.statusText}\n${text}`); - } - return await res.json(); - } - async getReleaseDetails(releaseId) { - const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`; - const res = await (0, retry_1.retry)(() => fetch(url, { - headers: { - 'Authorization': `Bearer ${this.accessToken}` - } - })); - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to get release status: ${res.statusText}\n${text}`); - } - return await res.json(); - } - async generateJwsToken(message) { - return jws_1.default.sign({ - header: { - alg: 'RS256', - crit: ['exp', 'x5t'], - // Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483) - exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000, - // Release service uses hex format, not base64url :roll_eyes: - x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'), - // Release service uses a '.' separated string, not an array of strings :roll_eyes: - x5c: this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.'), - }, - payload: message, - privateKey: this.requestSigningKey, - }); - } -} -class State { - statePath; - set = new Set(); - constructor() { - const pipelineWorkspacePath = e('PIPELINE_WORKSPACE'); - const previousState = fs_1.default.readdirSync(pipelineWorkspacePath) - .map(name => /^artifacts_processed_(\d+)$/.exec(name)) - .filter((match) => !!match) - .map(match => ({ name: match[0], attempt: Number(match[1]) })) - .sort((a, b) => b.attempt - a.attempt)[0]; - if (previousState) { - const previousStatePath = path_1.default.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt'); - fs_1.default.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name)); - } - const stageAttempt = e('SYSTEM_STAGEATTEMPT'); - this.statePath = path_1.default.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`); - fs_1.default.mkdirSync(path_1.default.dirname(this.statePath), { recursive: true }); - fs_1.default.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join('')); - } - get size() { - return this.set.size; - } - has(name) { - return this.set.has(name); - } - add(name) { - this.set.add(name); - fs_1.default.appendFileSync(this.statePath, `${name}\n`); - } - [Symbol.iterator]() { - return this.set[Symbol.iterator](); - } -} -const azdoFetchOptions = { - headers: { - // Pretend we're a web browser to avoid download rate limits - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0', - 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7', - 'Accept-Encoding': 'gzip, deflate, br', - 'Accept-Language': 'en-US,en;q=0.9', - 'Referer': 'https://dev.azure.com', - Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}` - } -}; -async function requestAZDOAPI(path) { - const abortController = new AbortController(); - const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000); - try { - const res = await (0, retry_1.retry)(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal })); - if (!res.ok) { - throw new Error(`Unexpected status code: ${res.status}`); - } - return await res.json(); - } - finally { - clearTimeout(timeout); - } -} -async function getPipelineArtifacts() { - const result = await requestAZDOAPI('artifacts'); - return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name)); -} -async function getPipelineTimeline() { - return await requestAZDOAPI('timeline'); -} -async function downloadArtifact(artifact, downloadPath) { - const abortController = new AbortController(); - const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000); - try { - const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal }); - if (!res.ok) { - throw new Error(`Unexpected status code: ${res.status}`); - } - await (0, promises_1.pipeline)(stream_1.Readable.fromWeb(res.body), fs_1.default.createWriteStream(downloadPath)); - } - finally { - clearTimeout(timeout); - } -} -async function unzip(packagePath, outputPath) { - return new Promise((resolve, reject) => { - yauzl_1.default.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => { - if (err) { - return reject(err); - } - const result = []; - zipfile.on('entry', entry => { - if (/\/$/.test(entry.fileName)) { - zipfile.readEntry(); - } - else { - zipfile.openReadStream(entry, (err, istream) => { - if (err) { - return reject(err); - } - const filePath = path_1.default.join(outputPath, entry.fileName); - fs_1.default.mkdirSync(path_1.default.dirname(filePath), { recursive: true }); - const ostream = fs_1.default.createWriteStream(filePath); - ostream.on('finish', () => { - result.push(filePath); - zipfile.readEntry(); - }); - istream?.on('error', err => reject(err)); - istream.pipe(ostream); - }); - } - }); - zipfile.on('close', () => resolve(result)); - zipfile.readEntry(); - }); - }); -} -// Contains all of the logic for mapping details to our actual product names in CosmosDB -function getPlatform(product, os, arch, type) { - switch (os) { - case 'win32': - switch (product) { - case 'client': { - switch (type) { - case 'archive': - return `win32-${arch}-archive`; - case 'setup': - return `win32-${arch}`; - case 'user-setup': - return `win32-${arch}-user`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - } - case 'server': - return `server-win32-${arch}`; - case 'web': - return `server-win32-${arch}-web`; - case 'cli': - return `cli-win32-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'alpine': - switch (product) { - case 'server': - return `server-alpine-${arch}`; - case 'web': - return `server-alpine-${arch}-web`; - case 'cli': - return `cli-alpine-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'linux': - switch (type) { - case 'snap': - return `linux-snap-${arch}`; - case 'archive-unsigned': - switch (product) { - case 'client': - return `linux-${arch}`; - case 'server': - return `server-linux-${arch}`; - case 'web': - if (arch === 'standalone') { - return 'web-standalone'; - } - return `server-linux-${arch}-web`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'deb-package': - return `linux-deb-${arch}`; - case 'rpm-package': - return `linux-rpm-${arch}`; - case 'cli': - return `cli-linux-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'darwin': - switch (product) { - case 'client': - if (arch === 'x64') { - return 'darwin'; - } - return `darwin-${arch}`; - case 'server': - if (arch === 'x64') { - return 'server-darwin'; - } - return `server-darwin-${arch}`; - case 'web': - if (arch === 'x64') { - return 'server-darwin-web'; - } - return `server-darwin-${arch}-web`; - case 'cli': - return `cli-darwin-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } -} -// Contains all of the logic for mapping types to our actual types in CosmosDB -function getRealType(type) { - switch (type) { - case 'user-setup': - return 'setup'; - case 'deb-package': - case 'rpm-package': - return 'package'; - default: - return type; - } -} -async function withLease(client, fn) { - const lease = client.getBlobLeaseClient(); - for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes - try { - await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired - await lease.acquireLease(60); - try { - const abortController = new AbortController(); - const refresher = new Promise((c, e) => { - abortController.signal.onabort = () => { - (0, node_timers_1.clearInterval)(interval); - c(); - }; - const interval = (0, node_timers_1.setInterval)(() => { - lease.renewLease().catch(err => { - (0, node_timers_1.clearInterval)(interval); - e(new Error('Failed to renew lease ' + err)); - }); - }, 30_000); - }); - const result = await Promise.race([fn(), refresher]); - abortController.abort(); - return result; - } - finally { - await lease.releaseLease(); - } - } - catch (err) { - if (err.statusCode !== 409 && err.statusCode !== 412) { - throw err; - } - await new Promise(c => setTimeout(c, 5000)); - } - } - throw new Error('Failed to acquire lease on blob after 30 minutes'); -} -async function processArtifact(artifact, filePath) { - const log = (...args) => console.log(`[${artifact.name}]`, ...args); - const match = /^vscode_(?[^_]+)_(?[^_]+)(?:_legacy)?_(?[^_]+)_(?[^_]+)$/.exec(artifact.name); - if (!match) { - throw new Error(`Invalid artifact name: ${artifact.name}`); - } - const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS')); - const quality = e('VSCODE_QUALITY'); - const version = e('BUILD_SOURCEVERSION'); - const friendlyFileName = `${quality}/${version}/${path_1.default.basename(filePath)}`; - const blobServiceClient = new storage_blob_1.BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken }); - const leasesContainerClient = blobServiceClient.getContainerClient('leases'); - await leasesContainerClient.createIfNotExists(); - const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName); - log(`Acquiring lease for: ${friendlyFileName}`); - await withLease(leaseBlobClient, async () => { - log(`Successfully acquired lease for: ${friendlyFileName}`); - const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`; - const res = await (0, retry_1.retry)(() => fetch(url)); - if (res.status === 200) { - log(`Already released and provisioned: ${url}`); - } - else { - const stagingContainerClient = blobServiceClient.getContainerClient('staging'); - await stagingContainerClient.createIfNotExists(); - const now = new Date().valueOf(); - const oneHour = 60 * 60 * 1000; - const oneHourAgo = new Date(now - oneHour); - const oneHourFromNow = new Date(now + oneHour); - const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow); - const sasOptions = { containerName: 'staging', permissions: storage_blob_1.ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow }; - const stagingSasToken = (0, storage_blob_1.generateBlobSASQueryParameters)(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString(); - const releaseService = await ESRPReleaseService.create(log, e('RELEASE_TENANT_ID'), e('RELEASE_CLIENT_ID'), e('RELEASE_AUTH_CERT'), e('RELEASE_REQUEST_SIGNING_CERT'), stagingContainerClient, stagingSasToken); - await releaseService.createRelease(version, filePath, friendlyFileName); - } - const { product, os, arch, unprocessedType } = match.groups; - const platform = getPlatform(product, os, arch, unprocessedType); - const type = getRealType(unprocessedType); - const size = fs_1.default.statSync(filePath).size; - const stream = fs_1.default.createReadStream(filePath); - const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256 - const asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true }; - log('Creating asset...'); - const result = await (0, retry_1.retry)(async (attempt) => { - log(`Creating asset in Cosmos DB (attempt ${attempt})...`); - const client = new cosmos_1.CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT'), tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) }); - const scripts = client.database('builds').container(quality).scripts; - const { resource: result } = await scripts.storedProcedure('createAsset').execute('', [version, asset, true]); - return result; - }); - if (result === 'already exists') { - log('Asset already exists!'); - } - else { - log('Asset successfully created: ', JSON.stringify(asset, undefined, 2)); - } - }); - log(`Successfully released lease for: ${friendlyFileName}`); -} -// It is VERY important that we don't download artifacts too much too fast from AZDO. -// AZDO throttles us SEVERELY if we do. Not just that, but they also close open -// sockets, so the whole things turns to a grinding halt. So, downloading and extracting -// happens serially in the main thread, making the downloads are spaced out -// properly. For each extracted artifact, we spawn a worker thread to upload it to -// the CDN and finally update the build in Cosmos DB. -async function main() { - if (!node_worker_threads_1.isMainThread) { - const { artifact, artifactFilePath } = node_worker_threads_1.workerData; - await processArtifact(artifact, artifactFilePath); - return; - } - const done = new State(); - const processing = new Set(); - for (const name of done) { - console.log(`\u2705 ${name}`); - } - const stages = new Set(['Compile']); - if (e('VSCODE_BUILD_STAGE_LINUX') === 'True' || - e('VSCODE_BUILD_STAGE_ALPINE') === 'True' || - e('VSCODE_BUILD_STAGE_MACOS') === 'True' || - e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { - stages.add('CompileCLI'); - } - if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { - stages.add('Windows'); - } - if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') { - stages.add('Linux'); - } - if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') { - stages.add('Alpine'); - } - if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') { - stages.add('macOS'); - } - if (e('VSCODE_BUILD_STAGE_WEB') === 'True') { - stages.add('Web'); - } - let timeline; - let artifacts; - let resultPromise = Promise.resolve([]); - const operations = []; - while (true) { - [timeline, artifacts] = await Promise.all([(0, retry_1.retry)(() => getPipelineTimeline()), (0, retry_1.retry)(() => getPipelineArtifacts())]); - const stagesCompleted = new Set(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name)); - const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s)); - const artifactsInProgress = artifacts.filter(a => processing.has(a.name)); - if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) { - break; - } - else if (stagesInProgress.length > 0) { - console.log('Stages in progress:', stagesInProgress.join(', ')); - } - else if (artifactsInProgress.length > 0) { - console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', ')); - } - else { - console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`); - } - for (const artifact of artifacts) { - if (done.has(artifact.name) || processing.has(artifact.name)) { - continue; - } - console.log(`[${artifact.name}] Found new artifact`); - const artifactZipPath = path_1.default.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`); - await (0, retry_1.retry)(async (attempt) => { - const start = Date.now(); - console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`); - await downloadArtifact(artifact, artifactZipPath); - const archiveSize = fs_1.default.statSync(artifactZipPath).size; - const downloadDurationS = (Date.now() - start) / 1000; - const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS); - console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`); - }); - const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY')); - const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0]; - processing.add(artifact.name); - const promise = new Promise((resolve, reject) => { - const worker = new node_worker_threads_1.Worker(__filename, { workerData: { artifact, artifactFilePath } }); - worker.on('error', reject); - worker.on('exit', code => { - if (code === 0) { - resolve(); - } - else { - reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`)); - } - }); - }); - const operation = promise.then(() => { - processing.delete(artifact.name); - done.add(artifact.name); - console.log(`\u2705 ${artifact.name} `); - }); - operations.push({ name: artifact.name, operation }); - resultPromise = Promise.allSettled(operations.map(o => o.operation)); - } - await new Promise(c => setTimeout(c, 10_000)); - } - console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`); - const artifactsInProgress = operations.filter(o => processing.has(o.name)); - if (artifactsInProgress.length > 0) { - console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', ')); - } - const results = await resultPromise; - for (let i = 0; i < operations.length; i++) { - const result = results[i]; - if (result.status === 'rejected') { - console.error(`[${operations[i].name}]`, result.reason); - } - } - // Fail the job if any of the artifacts failed to publish - if (results.some(r => r.status === 'rejected')) { - throw new Error('Some artifacts failed to publish'); - } - // Also fail the job if any of the stages did not succeed - let shouldFail = false; - for (const stage of stages) { - const record = timeline.records.find(r => r.name === stage && r.type === 'Stage'); - if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') { - shouldFail = true; - console.error(`Stage ${stage} did not succeed: ${record.result}`); - } - } - if (shouldFail) { - throw new Error('Some stages did not succeed'); - } - console.log(`All ${done.size} artifacts published!`); -} -if (require.main === module) { - main().then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); -} -//# sourceMappingURL=publish.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/publish.ts b/build/azure-pipelines/common/publish.ts deleted file mode 100644 index 2b1c1500..00000000 --- a/build/azure-pipelines/common/publish.ts +++ /dev/null @@ -1,1081 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import path from 'path'; -import { Readable } from 'stream'; -import type { ReadableStream } from 'stream/web'; -import { pipeline } from 'node:stream/promises'; -import yauzl from 'yauzl'; -import crypto from 'crypto'; -import { retry } from './retry'; -import { CosmosClient } from '@azure/cosmos'; -import cp from 'child_process'; -import os from 'os'; -import { Worker, isMainThread, workerData } from 'node:worker_threads'; -import { ConfidentialClientApplication } from '@azure/msal-node'; -import { BlobClient, BlobServiceClient, BlockBlobClient, ContainerClient, ContainerSASPermissions, generateBlobSASQueryParameters } from '@azure/storage-blob'; -import jws from 'jws'; -import { clearInterval, setInterval } from 'node:timers'; - -export function e(name: string): string { - const result = process.env[name]; - - if (typeof result !== 'string') { - throw new Error(`Missing env: ${name}`); - } - - return result; -} - -function hashStream(hashName: string, stream: Readable): Promise { - return new Promise((c, e) => { - const shasum = crypto.createHash(hashName); - - stream - .on('data', shasum.update.bind(shasum)) - .on('error', e) - .on('close', () => c(shasum.digest())); - }); -} - -interface ReleaseSubmitResponse { - operationId: string; - esrpCorrelationId: string; - code?: string; - message?: string; - target?: string; - innerError?: any; -} - -interface ReleaseActivityInfo { - activityId: string; - activityType: string; - name: string; - status: string; - errorCode: number; - errorMessages: string[]; - beginTime?: Date; - endTime?: Date; - lastModifiedAt?: Date; -} - -interface InnerServiceError { - code: string; - details: { [key: string]: string }; - innerError?: InnerServiceError; -} - -interface ReleaseError { - errorCode: number; - errorMessages: string[]; -} - -const enum StatusCode { - Pass = 'pass', - Aborted = 'aborted', - Inprogress = 'inprogress', - FailCanRetry = 'failCanRetry', - FailDoNotRetry = 'failDoNotRetry', - PendingAnalysis = 'pendingAnalysis', - Cancelled = 'cancelled' -} - -interface ReleaseResultMessage { - activities: ReleaseActivityInfo[]; - childWorkflowType: string; - clientId: string; - customerCorrelationId: string; - errorInfo: InnerServiceError; - groupId: string; - lastModifiedAt: Date; - operationId: string; - releaseError: ReleaseError; - requestSubmittedAt: Date; - routedRegion: string; - status: StatusCode; - totalFileCount: number; - totalReleaseSize: number; - version: string; -} - -interface ReleaseFileInfo { - name?: string; - hash?: number[]; - sourceLocation?: FileLocation; - sizeInBytes?: number; - hashType?: FileHashType; - fileId?: any; - distributionRelativePath?: string; - partNumber?: string; - friendlyFileName?: string; - tenantFileLocationType?: string; - tenantFileLocation?: string; - signedEngineeringCopyLocation?: string; - encryptedDistributionBlobLocation?: string; - preEncryptedDistributionBlobLocation?: string; - secondaryDistributionHashRequired?: boolean; - secondaryDistributionHashType?: FileHashType; - lastModifiedAt?: Date; - cultureCodes?: string[]; - displayFileInDownloadCenter?: boolean; - isPrimaryFileInDownloadCenter?: boolean; - fileDownloadDetails?: FileDownloadDetails[]; -} - -interface ReleaseDetailsFileInfo extends ReleaseFileInfo { } - -interface ReleaseDetailsMessage extends ReleaseResultMessage { - clusterRegion: string; - correlationVector: string; - releaseCompletedAt?: Date; - releaseInfo: ReleaseInfo; - productInfo: ProductInfo; - createdBy: UserInfo; - owners: OwnerInfo[]; - accessPermissionsInfo: AccessPermissionsInfo; - files: ReleaseDetailsFileInfo[]; - comments: string[]; - cancellationReason: string; - downloadCenterInfo: DownloadCenterInfo; -} - - -interface ProductInfo { - name?: string; - version?: string; - description?: string; -} - -interface ReleaseInfo { - title?: string; - minimumNumberOfApprovers: number; - properties?: { [key: string]: string }; - isRevision?: boolean; - revisionNumber?: string; -} - -type FileLocationType = 'azureBlob'; - -interface FileLocation { - type: FileLocationType; - blobUrl: string; - uncPath?: string; - url?: string; -} - -type FileHashType = 'sha256' | 'sha1'; - -interface FileDownloadDetails { - portalName: string; - downloadUrl: string; -} - -interface RoutingInfo { - intent?: string; - contentType?: string; - contentOrigin?: string; - productState?: string; - audience?: string; -} - -interface ReleaseFileInfo { - name?: string; - hash?: number[]; - sourceLocation?: FileLocation; - sizeInBytes?: number; - hashType?: FileHashType; - fileId?: any; - distributionRelativePath?: string; - partNumber?: string; - friendlyFileName?: string; - tenantFileLocationType?: string; - tenantFileLocation?: string; - signedEngineeringCopyLocation?: string; - encryptedDistributionBlobLocation?: string; - preEncryptedDistributionBlobLocation?: string; - secondaryDistributionHashRequired?: boolean; - secondaryDistributionHashType?: FileHashType; - lastModifiedAt?: Date; - cultureCodes?: string[]; - displayFileInDownloadCenter?: boolean; - isPrimaryFileInDownloadCenter?: boolean; - fileDownloadDetails?: FileDownloadDetails[]; -} - -interface UserInfo { - userPrincipalName?: string; -} - -interface OwnerInfo { - owner: UserInfo; -} - -interface ApproverInfo { - approver: UserInfo; - isAutoApproved: boolean; - isMandatory: boolean; -} - -interface AccessPermissionsInfo { - mainPublisher?: string; - releasePublishers?: string[]; - channelDownloadEntityDetails?: { [key: string]: string[] }; -} - -interface DownloadCenterLocaleInfo { - cultureCode?: string; - downloadTitle?: string; - shortName?: string; - shortDescription?: string; - longDescription?: string; - instructions?: string; - additionalInfo?: string; - keywords?: string[]; - version?: string; - relatedLinks?: { [key: string]: URL }; -} - -interface DownloadCenterInfo { - downloadCenterId: number; - publishToDownloadCenter?: boolean; - publishingGroup?: string; - operatingSystems?: string[]; - relatedReleases?: string[]; - kbNumbers?: string[]; - sbNumbers?: string[]; - locales?: DownloadCenterLocaleInfo[]; - additionalProperties?: { [key: string]: string }; -} - -interface ReleaseRequestMessage { - driEmail: string[]; - groupId?: string; - customerCorrelationId: string; - esrpCorrelationId: string; - contextData?: { [key: string]: string }; - releaseInfo: ReleaseInfo; - productInfo: ProductInfo; - files: ReleaseFileInfo[]; - routingInfo?: RoutingInfo; - createdBy: UserInfo; - owners: OwnerInfo[]; - approvers: ApproverInfo[]; - accessPermissionsInfo: AccessPermissionsInfo; - jwsToken?: string; - publisherId?: string; - downloadCenterInfo?: DownloadCenterInfo; -} - -function getCertificateBuffer(input: string) { - return Buffer.from(input.replace(/-----BEGIN CERTIFICATE-----|-----END CERTIFICATE-----|\n/g, ''), 'base64'); -} - -function getThumbprint(input: string, algorithm: string): Buffer { - const buffer = getCertificateBuffer(input); - return crypto.createHash(algorithm).update(buffer).digest(); -} - -function getKeyFromPFX(pfx: string): string { - const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx'); - const pemKeyPath = path.join(os.tmpdir(), 'key.pem'); - - try { - const pfxCertificate = Buffer.from(pfx, 'base64'); - fs.writeFileSync(pfxCertificatePath, pfxCertificate); - cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nocerts -nodes -out "${pemKeyPath}" -passin pass:`); - const raw = fs.readFileSync(pemKeyPath, 'utf-8'); - const result = raw.match(/-----BEGIN PRIVATE KEY-----[\s\S]+?-----END PRIVATE KEY-----/g)![0]; - return result; - } finally { - fs.rmSync(pfxCertificatePath, { force: true }); - fs.rmSync(pemKeyPath, { force: true }); - } -} - -function getCertificatesFromPFX(pfx: string): string[] { - const pfxCertificatePath = path.join(os.tmpdir(), 'cert.pfx'); - const pemCertificatePath = path.join(os.tmpdir(), 'cert.pem'); - - try { - const pfxCertificate = Buffer.from(pfx, 'base64'); - fs.writeFileSync(pfxCertificatePath, pfxCertificate); - cp.execSync(`openssl pkcs12 -in "${pfxCertificatePath}" -nokeys -out "${pemCertificatePath}" -passin pass:`); - const raw = fs.readFileSync(pemCertificatePath, 'utf-8'); - const matches = raw.match(/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/g); - return matches ? matches.reverse() : []; - } finally { - fs.rmSync(pfxCertificatePath, { force: true }); - fs.rmSync(pemCertificatePath, { force: true }); - } -} - -class ESRPReleaseService { - - static async create( - log: (...args: any[]) => void, - tenantId: string, - clientId: string, - authCertificatePfx: string, - requestSigningCertificatePfx: string, - containerClient: ContainerClient, - stagingSasToken: string - ) { - const authKey = getKeyFromPFX(authCertificatePfx); - const authCertificate = getCertificatesFromPFX(authCertificatePfx)[0]; - const requestSigningKey = getKeyFromPFX(requestSigningCertificatePfx); - const requestSigningCertificates = getCertificatesFromPFX(requestSigningCertificatePfx); - - const app = new ConfidentialClientApplication({ - auth: { - clientId, - authority: `https://login.microsoftonline.com/${tenantId}`, - clientCertificate: { - thumbprintSha256: getThumbprint(authCertificate, 'sha256').toString('hex'), - privateKey: authKey, - x5c: authCertificate - } - } - }); - - const response = await app.acquireTokenByClientCredential({ - scopes: ['https://api.esrp.microsoft.com/.default'] - }); - - return new ESRPReleaseService(log, clientId, response!.accessToken, requestSigningCertificates, requestSigningKey, containerClient, stagingSasToken); - } - - private static API_URL = 'https://api.esrp.microsoft.com/api/v3/releaseservices/clients/'; - - private constructor( - private readonly log: (...args: any[]) => void, - private readonly clientId: string, - private readonly accessToken: string, - private readonly requestSigningCertificates: string[], - private readonly requestSigningKey: string, - private readonly containerClient: ContainerClient, - private readonly stagingSasToken: string - ) { } - - async createRelease(version: string, filePath: string, friendlyFileName: string) { - const correlationId = crypto.randomUUID(); - const blobClient = this.containerClient.getBlockBlobClient(correlationId); - - this.log(`Uploading ${filePath} to ${blobClient.url}`); - await blobClient.uploadFile(filePath); - this.log('Uploaded blob successfully'); - - try { - this.log(`Submitting release for ${version}: ${filePath}`); - const submitReleaseResult = await this.submitRelease(version, filePath, friendlyFileName, correlationId, blobClient); - - this.log(`Successfully submitted release ${submitReleaseResult.operationId}. Polling for completion...`); - - // Poll every 5 seconds, wait 60 minutes max -> poll 60/5*60=720 times - for (let i = 0; i < 720; i++) { - await new Promise(c => setTimeout(c, 5000)); - const releaseStatus = await this.getReleaseStatus(submitReleaseResult.operationId); - - if (releaseStatus.status === 'pass') { - break; - } else if (releaseStatus.status === 'aborted') { - this.log(JSON.stringify(releaseStatus)); - throw new Error(`Release was aborted`); - } else if (releaseStatus.status !== 'inprogress') { - this.log(JSON.stringify(releaseStatus)); - throw new Error(`Unknown error when polling for release`); - } - } - - const releaseDetails = await this.getReleaseDetails(submitReleaseResult.operationId); - - if (releaseDetails.status !== 'pass') { - throw new Error(`Timed out waiting for release: ${JSON.stringify(releaseDetails)}`); - } - - this.log('Successfully created release:', releaseDetails.files[0].fileDownloadDetails![0].downloadUrl); - return releaseDetails.files[0].fileDownloadDetails![0].downloadUrl; - } finally { - this.log(`Deleting blob ${blobClient.url}`); - await blobClient.delete(); - this.log('Deleted blob successfully'); - } - } - - private async submitRelease( - version: string, - filePath: string, - friendlyFileName: string, - correlationId: string, - blobClient: BlobClient - ): Promise { - const size = fs.statSync(filePath).size; - const hash = await hashStream('sha256', fs.createReadStream(filePath)); - const blobUrl = `${blobClient.url}?${this.stagingSasToken}`; - - const message: ReleaseRequestMessage = { - customerCorrelationId: correlationId, - esrpCorrelationId: correlationId, - driEmail: ['joao.moreno@microsoft.com'], - createdBy: { userPrincipalName: 'jomo@microsoft.com' }, - owners: [{ owner: { userPrincipalName: 'jomo@microsoft.com' } }], - approvers: [{ approver: { userPrincipalName: 'jomo@microsoft.com' }, isAutoApproved: true, isMandatory: false }], - releaseInfo: { - title: 'VS Code', - properties: { - 'ReleaseContentType': 'InstallPackage' - }, - minimumNumberOfApprovers: 1 - }, - productInfo: { - name: 'VS Code', - version, - description: 'VS Code' - }, - accessPermissionsInfo: { - mainPublisher: 'VSCode', - channelDownloadEntityDetails: { - AllDownloadEntities: ['VSCode'] - } - }, - routingInfo: { - intent: 'filedownloadlinkgeneration' - }, - files: [{ - name: path.basename(filePath), - friendlyFileName, - tenantFileLocation: blobUrl, - tenantFileLocationType: 'AzureBlob', - sourceLocation: { - type: 'azureBlob', - blobUrl - }, - hashType: 'sha256', - hash: Array.from(hash), - sizeInBytes: size - }] - }; - - message.jwsToken = await this.generateJwsToken(message); - - const res = await fetch(`${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${this.accessToken}` - }, - body: JSON.stringify(message) - }); - - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to submit release: ${res.statusText}\n${text}`); - } - - return await res.json() as ReleaseSubmitResponse; - } - - private async getReleaseStatus(releaseId: string): Promise { - const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grs/${releaseId}`; - - const res = await retry(() => fetch(url, { - headers: { - 'Authorization': `Bearer ${this.accessToken}` - } - })); - - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to get release status: ${res.statusText}\n${text}`); - } - - return await res.json() as ReleaseResultMessage; - } - - private async getReleaseDetails(releaseId: string): Promise { - const url = `${ESRPReleaseService.API_URL}${this.clientId}/workflows/release/operations/grd/${releaseId}`; - - const res = await retry(() => fetch(url, { - headers: { - 'Authorization': `Bearer ${this.accessToken}` - } - })); - - if (!res.ok) { - const text = await res.text(); - throw new Error(`Failed to get release status: ${res.statusText}\n${text}`); - } - - return await res.json() as ReleaseDetailsMessage; - } - - private async generateJwsToken(message: ReleaseRequestMessage): Promise { - return jws.sign({ - header: { - alg: 'RS256', - crit: ['exp', 'x5t'], - // Release service uses ticks, not seconds :roll_eyes: (https://stackoverflow.com/a/7968483) - exp: ((Date.now() + (6 * 60 * 1000)) * 10000) + 621355968000000000, - // Release service uses hex format, not base64url :roll_eyes: - x5t: getThumbprint(this.requestSigningCertificates[0], 'sha1').toString('hex'), - // Release service uses a '.' separated string, not an array of strings :roll_eyes: - x5c: this.requestSigningCertificates.map(c => getCertificateBuffer(c).toString('base64url')).join('.') as any, - }, - payload: message, - privateKey: this.requestSigningKey, - }); - } -} - -class State { - - private statePath: string; - private set = new Set(); - - constructor() { - const pipelineWorkspacePath = e('PIPELINE_WORKSPACE'); - const previousState = fs.readdirSync(pipelineWorkspacePath) - .map(name => /^artifacts_processed_(\d+)$/.exec(name)) - .filter((match): match is RegExpExecArray => !!match) - .map(match => ({ name: match![0], attempt: Number(match![1]) })) - .sort((a, b) => b.attempt - a.attempt)[0]; - - if (previousState) { - const previousStatePath = path.join(pipelineWorkspacePath, previousState.name, previousState.name + '.txt'); - fs.readFileSync(previousStatePath, 'utf8').split(/\n/).filter(name => !!name).forEach(name => this.set.add(name)); - } - - const stageAttempt = e('SYSTEM_STAGEATTEMPT'); - this.statePath = path.join(pipelineWorkspacePath, `artifacts_processed_${stageAttempt}`, `artifacts_processed_${stageAttempt}.txt`); - fs.mkdirSync(path.dirname(this.statePath), { recursive: true }); - fs.writeFileSync(this.statePath, [...this.set.values()].map(name => `${name}\n`).join('')); - } - - get size(): number { - return this.set.size; - } - - has(name: string): boolean { - return this.set.has(name); - } - - add(name: string): void { - this.set.add(name); - fs.appendFileSync(this.statePath, `${name}\n`); - } - - [Symbol.iterator](): IterableIterator { - return this.set[Symbol.iterator](); - } -} - -const azdoFetchOptions = { - headers: { - // Pretend we're a web browser to avoid download rate limits - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0', - 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7', - 'Accept-Encoding': 'gzip, deflate, br', - 'Accept-Language': 'en-US,en;q=0.9', - 'Referer': 'https://dev.azure.com', - Authorization: `Bearer ${e('SYSTEM_ACCESSTOKEN')}` - } -}; - -export async function requestAZDOAPI(path: string): Promise { - const abortController = new AbortController(); - const timeout = setTimeout(() => abortController.abort(), 2 * 60 * 1000); - - try { - const res = await retry(() => fetch(`${e('BUILDS_API_URL')}${path}?api-version=6.0`, { ...azdoFetchOptions, signal: abortController.signal })); - - if (!res.ok) { - throw new Error(`Unexpected status code: ${res.status}`); - } - - return await res.json(); - } finally { - clearTimeout(timeout); - } -} - -export interface Artifact { - readonly name: string; - readonly resource: { - readonly downloadUrl: string; - readonly properties: { - readonly artifactsize: number; - }; - }; -} - -async function getPipelineArtifacts(): Promise { - const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); - return result.value.filter(a => /^vscode_/.test(a.name) && !/sbom$/.test(a.name)); -} - -interface Timeline { - readonly records: { - readonly name: string; - readonly type: string; - readonly state: string; - readonly result: string; - }[]; -} - -async function getPipelineTimeline(): Promise { - return await requestAZDOAPI('timeline'); -} - -async function downloadArtifact(artifact: Artifact, downloadPath: string): Promise { - const abortController = new AbortController(); - const timeout = setTimeout(() => abortController.abort(), 4 * 60 * 1000); - - try { - const res = await fetch(artifact.resource.downloadUrl, { ...azdoFetchOptions, signal: abortController.signal }); - - if (!res.ok) { - throw new Error(`Unexpected status code: ${res.status}`); - } - - await pipeline(Readable.fromWeb(res.body as ReadableStream), fs.createWriteStream(downloadPath)); - } finally { - clearTimeout(timeout); - } -} - -async function unzip(packagePath: string, outputPath: string): Promise { - return new Promise((resolve, reject) => { - yauzl.open(packagePath, { lazyEntries: true, autoClose: true }, (err, zipfile) => { - if (err) { - return reject(err); - } - - const result: string[] = []; - zipfile!.on('entry', entry => { - if (/\/$/.test(entry.fileName)) { - zipfile!.readEntry(); - } else { - zipfile!.openReadStream(entry, (err, istream) => { - if (err) { - return reject(err); - } - - const filePath = path.join(outputPath, entry.fileName); - fs.mkdirSync(path.dirname(filePath), { recursive: true }); - - const ostream = fs.createWriteStream(filePath); - ostream.on('finish', () => { - result.push(filePath); - zipfile!.readEntry(); - }); - istream?.on('error', err => reject(err)); - istream!.pipe(ostream); - }); - } - }); - - zipfile!.on('close', () => resolve(result)); - zipfile!.readEntry(); - }); - }); -} - -interface Asset { - platform: string; - type: string; - url: string; - mooncakeUrl?: string; - prssUrl?: string; - hash: string; - sha256hash: string; - size: number; - supportsFastUpdate?: boolean; -} - -// Contains all of the logic for mapping details to our actual product names in CosmosDB -function getPlatform(product: string, os: string, arch: string, type: string): string { - switch (os) { - case 'win32': - switch (product) { - case 'client': { - switch (type) { - case 'archive': - return `win32-${arch}-archive`; - case 'setup': - return `win32-${arch}`; - case 'user-setup': - return `win32-${arch}-user`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - } - case 'server': - return `server-win32-${arch}`; - case 'web': - return `server-win32-${arch}-web`; - case 'cli': - return `cli-win32-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'alpine': - switch (product) { - case 'server': - return `server-alpine-${arch}`; - case 'web': - return `server-alpine-${arch}-web`; - case 'cli': - return `cli-alpine-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'linux': - switch (type) { - case 'snap': - return `linux-snap-${arch}`; - case 'archive-unsigned': - switch (product) { - case 'client': - return `linux-${arch}`; - case 'server': - return `server-linux-${arch}`; - case 'web': - if (arch === 'standalone') { - return 'web-standalone'; - } - return `server-linux-${arch}-web`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'deb-package': - return `linux-deb-${arch}`; - case 'rpm-package': - return `linux-rpm-${arch}`; - case 'cli': - return `cli-linux-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - case 'darwin': - switch (product) { - case 'client': - if (arch === 'x64') { - return 'darwin'; - } - return `darwin-${arch}`; - case 'server': - if (arch === 'x64') { - return 'server-darwin'; - } - return `server-darwin-${arch}`; - case 'web': - if (arch === 'x64') { - return 'server-darwin-web'; - } - return `server-darwin-${arch}-web`; - case 'cli': - return `cli-darwin-${arch}`; - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } - default: - throw new Error(`Unrecognized: ${product} ${os} ${arch} ${type}`); - } -} - -// Contains all of the logic for mapping types to our actual types in CosmosDB -function getRealType(type: string) { - switch (type) { - case 'user-setup': - return 'setup'; - case 'deb-package': - case 'rpm-package': - return 'package'; - default: - return type; - } -} - -async function withLease(client: BlockBlobClient, fn: () => Promise) { - const lease = client.getBlobLeaseClient(); - - for (let i = 0; i < 360; i++) { // Try to get lease for 30 minutes - try { - await client.uploadData(new ArrayBuffer()); // blob needs to exist for lease to be acquired - await lease.acquireLease(60); - - try { - const abortController = new AbortController(); - const refresher = new Promise((c, e) => { - abortController.signal.onabort = () => { - clearInterval(interval); - c(); - }; - - const interval = setInterval(() => { - lease.renewLease().catch(err => { - clearInterval(interval); - e(new Error('Failed to renew lease ' + err)); - }); - }, 30_000); - }); - - const result = await Promise.race([fn(), refresher]); - abortController.abort(); - return result; - } finally { - await lease.releaseLease(); - } - } catch (err) { - if (err.statusCode !== 409 && err.statusCode !== 412) { - throw err; - } - - await new Promise(c => setTimeout(c, 5000)); - } - } - - throw new Error('Failed to acquire lease on blob after 30 minutes'); -} - -async function processArtifact( - artifact: Artifact, - filePath: string -) { - const log = (...args: any[]) => console.log(`[${artifact.name}]`, ...args); - const match = /^vscode_(?[^_]+)_(?[^_]+)(?:_legacy)?_(?[^_]+)_(?[^_]+)$/.exec(artifact.name); - - if (!match) { - throw new Error(`Invalid artifact name: ${artifact.name}`); - } - - const { cosmosDBAccessToken, blobServiceAccessToken } = JSON.parse(e('PUBLISH_AUTH_TOKENS')); - const quality = e('VSCODE_QUALITY'); - const version = e('BUILD_SOURCEVERSION'); - const friendlyFileName = `${quality}/${version}/${path.basename(filePath)}`; - - const blobServiceClient = new BlobServiceClient(`https://${e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')}.blob.core.windows.net/`, { getToken: async () => blobServiceAccessToken }); - const leasesContainerClient = blobServiceClient.getContainerClient('leases'); - await leasesContainerClient.createIfNotExists(); - const leaseBlobClient = leasesContainerClient.getBlockBlobClient(friendlyFileName); - - log(`Acquiring lease for: ${friendlyFileName}`); - - await withLease(leaseBlobClient, async () => { - log(`Successfully acquired lease for: ${friendlyFileName}`); - - const url = `${e('PRSS_CDN_URL')}/${friendlyFileName}`; - const res = await retry(() => fetch(url)); - - if (res.status === 200) { - log(`Already released and provisioned: ${url}`); - } else { - const stagingContainerClient = blobServiceClient.getContainerClient('staging'); - await stagingContainerClient.createIfNotExists(); - - const now = new Date().valueOf(); - const oneHour = 60 * 60 * 1000; - const oneHourAgo = new Date(now - oneHour); - const oneHourFromNow = new Date(now + oneHour); - const userDelegationKey = await blobServiceClient.getUserDelegationKey(oneHourAgo, oneHourFromNow); - const sasOptions = { containerName: 'staging', permissions: ContainerSASPermissions.from({ read: true }), startsOn: oneHourAgo, expiresOn: oneHourFromNow }; - const stagingSasToken = generateBlobSASQueryParameters(sasOptions, userDelegationKey, e('VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME')).toString(); - - const releaseService = await ESRPReleaseService.create( - log, - e('RELEASE_TENANT_ID'), - e('RELEASE_CLIENT_ID'), - e('RELEASE_AUTH_CERT'), - e('RELEASE_REQUEST_SIGNING_CERT'), - stagingContainerClient, - stagingSasToken - ); - - await releaseService.createRelease(version, filePath, friendlyFileName); - } - - const { product, os, arch, unprocessedType } = match.groups!; - const platform = getPlatform(product, os, arch, unprocessedType); - const type = getRealType(unprocessedType); - const size = fs.statSync(filePath).size; - const stream = fs.createReadStream(filePath); - const [hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]); // CodeQL [SM04514] Using SHA1 only for legacy reasons, we are actually only respecting SHA256 - const asset: Asset = { platform, type, url, hash: hash.toString('hex'), sha256hash: sha256hash.toString('hex'), size, supportsFastUpdate: true }; - log('Creating asset...'); - - const result = await retry(async (attempt) => { - log(`Creating asset in Cosmos DB (attempt ${attempt})...`); - const client = new CosmosClient({ endpoint: e('AZURE_DOCUMENTDB_ENDPOINT')!, tokenProvider: () => Promise.resolve(`type=aad&ver=1.0&sig=${cosmosDBAccessToken.token}`) }); - const scripts = client.database('builds').container(quality).scripts; - const { resource: result } = await scripts.storedProcedure('createAsset').execute<'ok' | 'already exists'>('', [version, asset, true]); - return result; - }); - - if (result === 'already exists') { - log('Asset already exists!'); - } else { - log('Asset successfully created: ', JSON.stringify(asset, undefined, 2)); - } - }); - - log(`Successfully released lease for: ${friendlyFileName}`); -} - -// It is VERY important that we don't download artifacts too much too fast from AZDO. -// AZDO throttles us SEVERELY if we do. Not just that, but they also close open -// sockets, so the whole things turns to a grinding halt. So, downloading and extracting -// happens serially in the main thread, making the downloads are spaced out -// properly. For each extracted artifact, we spawn a worker thread to upload it to -// the CDN and finally update the build in Cosmos DB. -async function main() { - if (!isMainThread) { - const { artifact, artifactFilePath } = workerData; - await processArtifact(artifact, artifactFilePath); - return; - } - - const done = new State(); - const processing = new Set(); - - for (const name of done) { - console.log(`\u2705 ${name}`); - } - - const stages = new Set(['Compile']); - - if ( - e('VSCODE_BUILD_STAGE_LINUX') === 'True' || - e('VSCODE_BUILD_STAGE_ALPINE') === 'True' || - e('VSCODE_BUILD_STAGE_MACOS') === 'True' || - e('VSCODE_BUILD_STAGE_WINDOWS') === 'True' - ) { - stages.add('CompileCLI'); - } - - if (e('VSCODE_BUILD_STAGE_WINDOWS') === 'True') { stages.add('Windows'); } - if (e('VSCODE_BUILD_STAGE_LINUX') === 'True') { stages.add('Linux'); } - if (e('VSCODE_BUILD_STAGE_ALPINE') === 'True') { stages.add('Alpine'); } - if (e('VSCODE_BUILD_STAGE_MACOS') === 'True') { stages.add('macOS'); } - if (e('VSCODE_BUILD_STAGE_WEB') === 'True') { stages.add('Web'); } - - let timeline: Timeline; - let artifacts: Artifact[]; - let resultPromise = Promise.resolve[]>([]); - const operations: { name: string; operation: Promise }[] = []; - - while (true) { - [timeline, artifacts] = await Promise.all([retry(() => getPipelineTimeline()), retry(() => getPipelineArtifacts())]); - const stagesCompleted = new Set(timeline.records.filter(r => r.type === 'Stage' && r.state === 'completed' && stages.has(r.name)).map(r => r.name)); - const stagesInProgress = [...stages].filter(s => !stagesCompleted.has(s)); - const artifactsInProgress = artifacts.filter(a => processing.has(a.name)); - - if (stagesInProgress.length === 0 && artifacts.length === done.size + processing.size) { - break; - } else if (stagesInProgress.length > 0) { - console.log('Stages in progress:', stagesInProgress.join(', ')); - } else if (artifactsInProgress.length > 0) { - console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', ')); - } else { - console.log(`Waiting for a total of ${artifacts.length}, ${done.size} done, ${processing.size} in progress...`); - } - - for (const artifact of artifacts) { - if (done.has(artifact.name) || processing.has(artifact.name)) { - continue; - } - - console.log(`[${artifact.name}] Found new artifact`); - - const artifactZipPath = path.join(e('AGENT_TEMPDIRECTORY'), `${artifact.name}.zip`); - - await retry(async (attempt) => { - const start = Date.now(); - console.log(`[${artifact.name}] Downloading (attempt ${attempt})...`); - await downloadArtifact(artifact, artifactZipPath); - const archiveSize = fs.statSync(artifactZipPath).size; - const downloadDurationS = (Date.now() - start) / 1000; - const downloadSpeedKBS = Math.round((archiveSize / 1024) / downloadDurationS); - console.log(`[${artifact.name}] Successfully downloaded after ${Math.floor(downloadDurationS)} seconds(${downloadSpeedKBS} KB/s).`); - }); - - const artifactFilePaths = await unzip(artifactZipPath, e('AGENT_TEMPDIRECTORY')); - const artifactFilePath = artifactFilePaths.filter(p => !/_manifest/.test(p))[0]; - - processing.add(artifact.name); - const promise = new Promise((resolve, reject) => { - const worker = new Worker(__filename, { workerData: { artifact, artifactFilePath } }); - worker.on('error', reject); - worker.on('exit', code => { - if (code === 0) { - resolve(); - } else { - reject(new Error(`[${artifact.name}] Worker stopped with exit code ${code}`)); - } - }); - }); - - const operation = promise.then(() => { - processing.delete(artifact.name); - done.add(artifact.name); - console.log(`\u2705 ${artifact.name} `); - }); - - operations.push({ name: artifact.name, operation }); - resultPromise = Promise.allSettled(operations.map(o => o.operation)); - } - - await new Promise(c => setTimeout(c, 10_000)); - } - - console.log(`Found all ${done.size + processing.size} artifacts, waiting for ${processing.size} artifacts to finish publishing...`); - - const artifactsInProgress = operations.filter(o => processing.has(o.name)); - - if (artifactsInProgress.length > 0) { - console.log('Artifacts in progress:', artifactsInProgress.map(a => a.name).join(', ')); - } - - const results = await resultPromise; - - for (let i = 0; i < operations.length; i++) { - const result = results[i]; - - if (result.status === 'rejected') { - console.error(`[${operations[i].name}]`, result.reason); - } - } - - // Fail the job if any of the artifacts failed to publish - if (results.some(r => r.status === 'rejected')) { - throw new Error('Some artifacts failed to publish'); - } - - // Also fail the job if any of the stages did not succeed - let shouldFail = false; - - for (const stage of stages) { - const record = timeline.records.find(r => r.name === stage && r.type === 'Stage')!; - - if (record.result !== 'succeeded' && record.result !== 'succeededWithIssues') { - shouldFail = true; - console.error(`Stage ${stage} did not succeed: ${record.result}`); - } - } - - if (shouldFail) { - throw new Error('Some stages did not succeed'); - } - - console.log(`All ${done.size} artifacts published!`); -} - -if (require.main === module) { - main().then(() => { - process.exit(0); - }, err => { - console.error(err); - process.exit(1); - }); -} diff --git a/build/azure-pipelines/common/releaseBuild.js b/build/azure-pipelines/common/releaseBuild.js deleted file mode 100644 index fa69cb4e..00000000 --- a/build/azure-pipelines/common/releaseBuild.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const identity_1 = require("@azure/identity"); -const cosmos_1 = require("@azure/cosmos"); -const retry_1 = require("./retry"); -function getEnv(name) { - const result = process.env[name]; - if (typeof result === 'undefined') { - throw new Error('Missing env: ' + name); - } - return result; -} -function createDefaultConfig(quality) { - return { - id: quality, - frozen: false - }; -} -async function getConfig(client, quality) { - const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`; - const res = await client.database('builds').container('config').items.query(query).fetchAll(); - if (res.resources.length === 0) { - return createDefaultConfig(quality); - } - return res.resources[0]; -} -async function main(force) { - const commit = getEnv('BUILD_SOURCEVERSION'); - const quality = getEnv('VSCODE_QUALITY'); - const aadCredentials = new identity_1.ClientAssertionCredential(process.env['AZURE_TENANT_ID'], process.env['AZURE_CLIENT_ID'], () => Promise.resolve(process.env['AZURE_ID_TOKEN'])); - const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], aadCredentials }); - if (!force) { - const config = await getConfig(client, quality); - console.log('Quality config:', config); - if (config.frozen) { - console.log(`Skipping release because quality ${quality} is frozen.`); - return; - } - } - console.log(`Releasing build ${commit}...`); - const scripts = client.database('builds').container(quality).scripts; - await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit])); -} -const [, , force] = process.argv; -console.log(process.argv); -main(/^true$/i.test(force)).then(() => { - console.log('Build successfully released'); - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=releaseBuild.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/releaseBuild.ts b/build/azure-pipelines/common/releaseBuild.ts deleted file mode 100644 index b7762de7..00000000 --- a/build/azure-pipelines/common/releaseBuild.ts +++ /dev/null @@ -1,78 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { ClientAssertionCredential } from '@azure/identity'; -import { CosmosClient } from '@azure/cosmos'; -import { retry } from './retry'; - -function getEnv(name: string): string { - const result = process.env[name]; - - if (typeof result === 'undefined') { - throw new Error('Missing env: ' + name); - } - - return result; -} - -interface Config { - id: string; - frozen: boolean; -} - -function createDefaultConfig(quality: string): Config { - return { - id: quality, - frozen: false - }; -} - -async function getConfig(client: CosmosClient, quality: string): Promise { - const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`; - - const res = await client.database('builds').container('config').items.query(query).fetchAll(); - - if (res.resources.length === 0) { - return createDefaultConfig(quality); - } - - return res.resources[0] as Config; -} - -async function main(force: boolean): Promise { - const commit = getEnv('BUILD_SOURCEVERSION'); - const quality = getEnv('VSCODE_QUALITY'); - - const aadCredentials = new ClientAssertionCredential(process.env['AZURE_TENANT_ID']!, process.env['AZURE_CLIENT_ID']!, () => Promise.resolve(process.env['AZURE_ID_TOKEN']!)); - const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, aadCredentials }); - - if (!force) { - const config = await getConfig(client, quality); - - console.log('Quality config:', config); - - if (config.frozen) { - console.log(`Skipping release because quality ${quality} is frozen.`); - return; - } - } - - console.log(`Releasing build ${commit}...`); - - const scripts = client.database('builds').container(quality).scripts; - await retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit])); -} - -const [, , force] = process.argv; - -console.log(process.argv); - -main(/^true$/i.test(force)).then(() => { - console.log('Build successfully released'); - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); diff --git a/build/azure-pipelines/common/retry.js b/build/azure-pipelines/common/retry.js deleted file mode 100644 index 91f60bf2..00000000 --- a/build/azure-pipelines/common/retry.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.retry = retry; -async function retry(fn) { - let lastError; - for (let run = 1; run <= 10; run++) { - try { - return await fn(run); - } - catch (err) { - if (!/fetch failed|terminated|aborted|timeout|TimeoutError|Timeout Error|RestError|Client network socket disconnected|socket hang up|ECONNRESET|CredentialUnavailableError|endpoints_resolution_error|Audience validation failed|end of central directory record signature not found/i.test(err.message)) { - throw err; - } - lastError = err; - // maximum delay is 10th retry: ~3 seconds - const millis = Math.floor((Math.random() * 200) + (50 * Math.pow(1.5, run))); - await new Promise(c => setTimeout(c, millis)); - } - } - console.error(`Too many retries, aborting.`); - throw lastError; -} -//# sourceMappingURL=retry.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/retry.ts b/build/azure-pipelines/common/retry.ts deleted file mode 100644 index 9697093c..00000000 --- a/build/azure-pipelines/common/retry.ts +++ /dev/null @@ -1,27 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -export async function retry(fn: (attempt: number) => Promise): Promise { - let lastError: Error | undefined; - - for (let run = 1; run <= 10; run++) { - try { - return await fn(run); - } catch (err) { - if (!/fetch failed|terminated|aborted|timeout|TimeoutError|Timeout Error|RestError|Client network socket disconnected|socket hang up|ECONNRESET|CredentialUnavailableError|endpoints_resolution_error|Audience validation failed|end of central directory record signature not found/i.test(err.message)) { - throw err; - } - - lastError = err; - - // maximum delay is 10th retry: ~3 seconds - const millis = Math.floor((Math.random() * 200) + (50 * Math.pow(1.5, run))); - await new Promise(c => setTimeout(c, millis)); - } - } - - console.error(`Too many retries, aborting.`); - throw lastError; -} diff --git a/build/azure-pipelines/common/sign-win32.js b/build/azure-pipelines/common/sign-win32.js deleted file mode 100644 index f4e3f27c..00000000 --- a/build/azure-pipelines/common/sign-win32.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const sign_1 = require("./sign"); -const path_1 = __importDefault(require("path")); -(0, sign_1.main)([ - process.env['EsrpCliDllPath'], - 'sign-windows', - path_1.default.dirname(process.argv[2]), - path_1.default.basename(process.argv[2]) -]); -//# sourceMappingURL=sign-win32.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/sign-win32.ts b/build/azure-pipelines/common/sign-win32.ts deleted file mode 100644 index ad88435b..00000000 --- a/build/azure-pipelines/common/sign-win32.ts +++ /dev/null @@ -1,14 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { main } from './sign'; -import path from 'path'; - -main([ - process.env['EsrpCliDllPath']!, - 'sign-windows', - path.dirname(process.argv[2]), - path.basename(process.argv[2]) -]); diff --git a/build/azure-pipelines/common/sign.js b/build/azure-pipelines/common/sign.js deleted file mode 100644 index fd87772b..00000000 --- a/build/azure-pipelines/common/sign.js +++ /dev/null @@ -1,206 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Temp = void 0; -exports.main = main; -const child_process_1 = __importDefault(require("child_process")); -const fs_1 = __importDefault(require("fs")); -const crypto_1 = __importDefault(require("crypto")); -const path_1 = __importDefault(require("path")); -const os_1 = __importDefault(require("os")); -class Temp { - _files = []; - tmpNameSync() { - const file = path_1.default.join(os_1.default.tmpdir(), crypto_1.default.randomBytes(20).toString('hex')); - this._files.push(file); - return file; - } - dispose() { - for (const file of this._files) { - try { - fs_1.default.unlinkSync(file); - } - catch (err) { - // noop - } - } - } -} -exports.Temp = Temp; -function getParams(type) { - switch (type) { - case 'sign-windows': - return [ - { - keyCode: 'CP-230012', - operationSetCode: 'SigntoolSign', - parameters: [ - { parameterName: 'OpusName', parameterValue: 'VS Code' }, - { parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' }, - { parameterName: 'Append', parameterValue: '/as' }, - { parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' }, - { parameterName: 'PageHash', parameterValue: '/NPH' }, - { parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' } - ], - toolName: 'sign', - toolVersion: '1.0' - }, - { - keyCode: 'CP-230012', - operationSetCode: 'SigntoolVerify', - parameters: [ - { parameterName: 'VerifyAll', parameterValue: '/all' } - ], - toolName: 'sign', - toolVersion: '1.0' - } - ]; - case 'sign-windows-appx': - return [ - { - keyCode: 'CP-229979', - operationSetCode: 'SigntoolSign', - parameters: [ - { parameterName: 'OpusName', parameterValue: 'VS Code' }, - { parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' }, - { parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' }, - { parameterName: 'PageHash', parameterValue: '/NPH' }, - { parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' } - ], - toolName: 'sign', - toolVersion: '1.0' - }, - { - keyCode: 'CP-229979', - operationSetCode: 'SigntoolVerify', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - } - ]; - case 'sign-pgp': - return [{ - keyCode: 'CP-450779-Pgp', - operationSetCode: 'LinuxSign', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'sign-darwin': - return [{ - keyCode: 'CP-401337-Apple', - operationSetCode: 'MacAppDeveloperSign', - parameters: [{ parameterName: 'Hardening', parameterValue: '--options=runtime' }], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'notarize-darwin': - return [{ - keyCode: 'CP-401337-Apple', - operationSetCode: 'MacAppNotarize', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'nuget': - return [{ - keyCode: 'CP-401405', - operationSetCode: 'NuGetSign', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }, { - keyCode: 'CP-401405', - operationSetCode: 'NuGetVerify', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - default: - throw new Error(`Sign type ${type} not found`); - } -} -function main([esrpCliPath, type, folderPath, pattern]) { - const tmp = new Temp(); - process.on('exit', () => tmp.dispose()); - const key = crypto_1.default.randomBytes(32); - const iv = crypto_1.default.randomBytes(16); - const cipher = crypto_1.default.createCipheriv('aes-256-cbc', key, iv); - const encryptedToken = cipher.update(process.env['SYSTEM_ACCESSTOKEN'].trim(), 'utf8', 'hex') + cipher.final('hex'); - const encryptionDetailsPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(encryptionDetailsPath, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') })); - const encryptedTokenPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(encryptedTokenPath, encryptedToken); - const patternPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(patternPath, pattern); - const paramsPath = tmp.tmpNameSync(); - fs_1.default.writeFileSync(paramsPath, JSON.stringify(getParams(type))); - const dotnetVersion = child_process_1.default.execSync('dotnet --version', { encoding: 'utf8' }).trim(); - const adoTaskVersion = path_1.default.basename(path_1.default.dirname(path_1.default.dirname(esrpCliPath))); - const federatedTokenData = { - jobId: process.env['SYSTEM_JOBID'], - planId: process.env['SYSTEM_PLANID'], - projectId: process.env['SYSTEM_TEAMPROJECTID'], - hub: process.env['SYSTEM_HOSTTYPE'], - uri: process.env['SYSTEM_COLLECTIONURI'], - managedIdentityId: process.env['VSCODE_ESRP_CLIENT_ID'], - managedIdentityTenantId: process.env['VSCODE_ESRP_TENANT_ID'], - serviceConnectionId: process.env['VSCODE_ESRP_SERVICE_CONNECTION_ID'], - tempDirectory: os_1.default.tmpdir(), - systemAccessToken: encryptedTokenPath, - encryptionKey: encryptionDetailsPath - }; - const args = [ - esrpCliPath, - 'vsts.sign', - '-a', process.env['ESRP_CLIENT_ID'], - '-d', process.env['ESRP_TENANT_ID'], - '-k', JSON.stringify({ akv: 'vscode-esrp' }), - '-z', JSON.stringify({ akv: 'vscode-esrp', cert: 'esrp-sign' }), - '-f', folderPath, - '-p', patternPath, - '-u', 'false', - '-x', 'regularSigning', - '-b', 'input.json', - '-l', 'AzSecPack_PublisherPolicyProd.xml', - '-y', 'inlineSignParams', - '-j', paramsPath, - '-c', '9997', - '-t', '120', - '-g', '10', - '-v', 'Tls12', - '-s', 'https://api.esrp.microsoft.com/api/v1', - '-m', '0', - '-o', 'Microsoft', - '-i', 'https://www.microsoft.com', - '-n', '5', - '-r', 'true', - '-w', dotnetVersion, - '-skipAdoReportAttachment', 'false', - '-pendingAnalysisWaitTimeoutMinutes', '5', - '-adoTaskVersion', adoTaskVersion, - '-resourceUri', 'https://msazurecloud.onmicrosoft.com/api.esrp.microsoft.com', - '-esrpClientId', process.env['ESRP_CLIENT_ID'], - '-useMSIAuthentication', 'true', - '-federatedTokenData', JSON.stringify(federatedTokenData) - ]; - try { - child_process_1.default.execFileSync('dotnet', args, { stdio: 'inherit' }); - } - catch (err) { - console.error('ESRP failed'); - console.error(err); - process.exit(1); - } -} -if (require.main === module) { - main(process.argv.slice(2)); - process.exit(0); -} -//# sourceMappingURL=sign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/sign.ts b/build/azure-pipelines/common/sign.ts deleted file mode 100644 index 19a28848..00000000 --- a/build/azure-pipelines/common/sign.ts +++ /dev/null @@ -1,222 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import cp from 'child_process'; -import fs from 'fs'; -import crypto from 'crypto'; -import path from 'path'; -import os from 'os'; - -export class Temp { - private _files: string[] = []; - - tmpNameSync(): string { - const file = path.join(os.tmpdir(), crypto.randomBytes(20).toString('hex')); - this._files.push(file); - return file; - } - - dispose(): void { - for (const file of this._files) { - try { - fs.unlinkSync(file); - } catch (err) { - // noop - } - } - } -} - -interface Params { - readonly keyCode: string; - readonly operationSetCode: string; - readonly parameters: { - readonly parameterName: string; - readonly parameterValue: string; - }[]; - readonly toolName: string; - readonly toolVersion: string; -} - -function getParams(type: string): Params[] { - switch (type) { - case 'sign-windows': - return [ - { - keyCode: 'CP-230012', - operationSetCode: 'SigntoolSign', - parameters: [ - { parameterName: 'OpusName', parameterValue: 'VS Code' }, - { parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' }, - { parameterName: 'Append', parameterValue: '/as' }, - { parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' }, - { parameterName: 'PageHash', parameterValue: '/NPH' }, - { parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' } - ], - toolName: 'sign', - toolVersion: '1.0' - }, - { - keyCode: 'CP-230012', - operationSetCode: 'SigntoolVerify', - parameters: [ - { parameterName: 'VerifyAll', parameterValue: '/all' } - ], - toolName: 'sign', - toolVersion: '1.0' - } - ]; - case 'sign-windows-appx': - return [ - { - keyCode: 'CP-229979', - operationSetCode: 'SigntoolSign', - parameters: [ - { parameterName: 'OpusName', parameterValue: 'VS Code' }, - { parameterName: 'OpusInfo', parameterValue: 'https://code.visualstudio.com/' }, - { parameterName: 'FileDigest', parameterValue: '/fd "SHA256"' }, - { parameterName: 'PageHash', parameterValue: '/NPH' }, - { parameterName: 'TimeStamp', parameterValue: '/tr "http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer" /td sha256' } - ], - toolName: 'sign', - toolVersion: '1.0' - }, - { - keyCode: 'CP-229979', - operationSetCode: 'SigntoolVerify', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - } - ]; - case 'sign-pgp': - return [{ - keyCode: 'CP-450779-Pgp', - operationSetCode: 'LinuxSign', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'sign-darwin': - return [{ - keyCode: 'CP-401337-Apple', - operationSetCode: 'MacAppDeveloperSign', - parameters: [{ parameterName: 'Hardening', parameterValue: '--options=runtime' }], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'notarize-darwin': - return [{ - keyCode: 'CP-401337-Apple', - operationSetCode: 'MacAppNotarize', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - case 'nuget': - return [{ - keyCode: 'CP-401405', - operationSetCode: 'NuGetSign', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }, { - keyCode: 'CP-401405', - operationSetCode: 'NuGetVerify', - parameters: [], - toolName: 'sign', - toolVersion: '1.0' - }]; - default: - throw new Error(`Sign type ${type} not found`); - } -} - -export function main([esrpCliPath, type, folderPath, pattern]: string[]) { - const tmp = new Temp(); - process.on('exit', () => tmp.dispose()); - - const key = crypto.randomBytes(32); - const iv = crypto.randomBytes(16); - const cipher = crypto.createCipheriv('aes-256-cbc', key, iv); - const encryptedToken = cipher.update(process.env['SYSTEM_ACCESSTOKEN']!.trim(), 'utf8', 'hex') + cipher.final('hex'); - - const encryptionDetailsPath = tmp.tmpNameSync(); - fs.writeFileSync(encryptionDetailsPath, JSON.stringify({ key: key.toString('hex'), iv: iv.toString('hex') })); - - const encryptedTokenPath = tmp.tmpNameSync(); - fs.writeFileSync(encryptedTokenPath, encryptedToken); - - const patternPath = tmp.tmpNameSync(); - fs.writeFileSync(patternPath, pattern); - - const paramsPath = tmp.tmpNameSync(); - fs.writeFileSync(paramsPath, JSON.stringify(getParams(type))); - - const dotnetVersion = cp.execSync('dotnet --version', { encoding: 'utf8' }).trim(); - const adoTaskVersion = path.basename(path.dirname(path.dirname(esrpCliPath))); - - const federatedTokenData = { - jobId: process.env['SYSTEM_JOBID'], - planId: process.env['SYSTEM_PLANID'], - projectId: process.env['SYSTEM_TEAMPROJECTID'], - hub: process.env['SYSTEM_HOSTTYPE'], - uri: process.env['SYSTEM_COLLECTIONURI'], - managedIdentityId: process.env['VSCODE_ESRP_CLIENT_ID'], - managedIdentityTenantId: process.env['VSCODE_ESRP_TENANT_ID'], - serviceConnectionId: process.env['VSCODE_ESRP_SERVICE_CONNECTION_ID'], - tempDirectory: os.tmpdir(), - systemAccessToken: encryptedTokenPath, - encryptionKey: encryptionDetailsPath - }; - - const args = [ - esrpCliPath, - 'vsts.sign', - '-a', process.env['ESRP_CLIENT_ID']!, - '-d', process.env['ESRP_TENANT_ID']!, - '-k', JSON.stringify({ akv: 'vscode-esrp' }), - '-z', JSON.stringify({ akv: 'vscode-esrp', cert: 'esrp-sign' }), - '-f', folderPath, - '-p', patternPath, - '-u', 'false', - '-x', 'regularSigning', - '-b', 'input.json', - '-l', 'AzSecPack_PublisherPolicyProd.xml', - '-y', 'inlineSignParams', - '-j', paramsPath, - '-c', '9997', - '-t', '120', - '-g', '10', - '-v', 'Tls12', - '-s', 'https://api.esrp.microsoft.com/api/v1', - '-m', '0', - '-o', 'Microsoft', - '-i', 'https://www.microsoft.com', - '-n', '5', - '-r', 'true', - '-w', dotnetVersion, - '-skipAdoReportAttachment', 'false', - '-pendingAnalysisWaitTimeoutMinutes', '5', - '-adoTaskVersion', adoTaskVersion, - '-resourceUri', 'https://msazurecloud.onmicrosoft.com/api.esrp.microsoft.com', - '-esrpClientId', process.env['ESRP_CLIENT_ID']!, - '-useMSIAuthentication', 'true', - '-federatedTokenData', JSON.stringify(federatedTokenData) - ]; - - try { - cp.execFileSync('dotnet', args, { stdio: 'inherit' }); - } catch (err) { - console.error('ESRP failed'); - console.error(err); - process.exit(1); - } -} - -if (require.main === module) { - main(process.argv.slice(2)); - process.exit(0); -} diff --git a/build/azure-pipelines/common/telemetry-config.json b/build/azure-pipelines/common/telemetry-config.json deleted file mode 100644 index 46c8ef73..00000000 --- a/build/azure-pipelines/common/telemetry-config.json +++ /dev/null @@ -1,62 +0,0 @@ -[ - { - "eventPrefix": "typescript-language-features/", - "sourceDirs": [ - "../../s/extensions/typescript-language-features" - ], - "excludedDirs": [], - "applyEndpoints": true - }, - { - "eventPrefix": "git/", - "sourceDirs": [ - "../../s/extensions/git" - ], - "excludedDirs": [], - "applyEndpoints": true - }, - { - "eventPrefix": "extension-telemetry/", - "sourceDirs": [ - "vscode-extension-telemetry" - ], - "excludedDirs": [], - "applyEndpoints": true - }, - { - "eventPrefix": "vscode-markdown/", - "sourceDirs": [ - "../../s/extensions/markdown-language-features" - ], - "excludedDirs": [], - "applyEndpoints": true - }, - { - "eventPrefix": "html-language-features/", - "sourceDirs": [ - "../../s/extensions/html-language-features", - "vscode-html-languageservice" - ], - "excludedDirs": [], - "applyEndpoints": true - }, - { - "eventPrefix": "json-language-features/", - "sourceDirs": [ - "../../s/extensions/json-language-features", - "vscode-json-languageservice" - ], - "excludedDirs": [], - "applyEndpoints": true - }, - { - "eventPrefix": "ms-vscode.node/", - "sourceDirs": [ - "vscode-chrome-debug-core", - "vscode-node-debug" - ], - "excludedDirs": [], - "applyEndpoints": true, - "patchDebugEvents": true - } -] diff --git a/build/azure-pipelines/common/waitForArtifacts.js b/build/azure-pipelines/common/waitForArtifacts.js deleted file mode 100644 index b9ffb739..00000000 --- a/build/azure-pipelines/common/waitForArtifacts.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const publish_1 = require("../common/publish"); -const retry_1 = require("../common/retry"); -async function getPipelineArtifacts() { - const result = await (0, publish_1.requestAZDOAPI)('artifacts'); - return result.value.filter(a => !/sbom$/.test(a.name)); -} -async function main(artifacts) { - if (artifacts.length === 0) { - throw new Error(`Usage: node waitForArtifacts.js ...`); - } - // This loop will run for 30 minutes and waits to the x64 and arm64 artifacts - // to be uploaded to the pipeline by the `macOS` and `macOSARM64` jobs. As soon - // as these artifacts are found, the loop completes and the `macOSUnivesrsal` - // job resumes. - for (let index = 0; index < 60; index++) { - try { - console.log(`Waiting for artifacts (${artifacts.join(', ')}) to be uploaded (${index + 1}/60)...`); - const allArtifacts = await (0, retry_1.retry)(() => getPipelineArtifacts()); - console.log(` * Artifacts attached to the pipelines: ${allArtifacts.length > 0 ? allArtifacts.map(a => a.name).join(', ') : 'none'}`); - const foundArtifacts = allArtifacts.filter(a => artifacts.includes(a.name)); - console.log(` * Found artifacts: ${foundArtifacts.length > 0 ? foundArtifacts.map(a => a.name).join(', ') : 'none'}`); - if (foundArtifacts.length === artifacts.length) { - console.log(` * All artifacts were found`); - return; - } - } - catch (err) { - console.error(`ERROR: Failed to get pipeline artifacts: ${err}`); - } - await new Promise(c => setTimeout(c, 30_000)); - } - throw new Error(`ERROR: Artifacts (${artifacts.join(', ')}) were not uploaded within 30 minutes.`); -} -main(process.argv.splice(2)).then(() => { - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); -//# sourceMappingURL=waitForArtifacts.js.map \ No newline at end of file diff --git a/build/azure-pipelines/common/waitForArtifacts.ts b/build/azure-pipelines/common/waitForArtifacts.ts deleted file mode 100644 index 3fed6cd3..00000000 --- a/build/azure-pipelines/common/waitForArtifacts.ts +++ /dev/null @@ -1,51 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { Artifact, requestAZDOAPI } from '../common/publish'; -import { retry } from '../common/retry'; - -async function getPipelineArtifacts(): Promise { - const result = await requestAZDOAPI<{ readonly value: Artifact[] }>('artifacts'); - return result.value.filter(a => !/sbom$/.test(a.name)); -} - -async function main(artifacts: string[]): Promise { - if (artifacts.length === 0) { - throw new Error(`Usage: node waitForArtifacts.js ...`); - } - - // This loop will run for 30 minutes and waits to the x64 and arm64 artifacts - // to be uploaded to the pipeline by the `macOS` and `macOSARM64` jobs. As soon - // as these artifacts are found, the loop completes and the `macOSUnivesrsal` - // job resumes. - for (let index = 0; index < 60; index++) { - try { - console.log(`Waiting for artifacts (${artifacts.join(', ')}) to be uploaded (${index + 1}/60)...`); - const allArtifacts = await retry(() => getPipelineArtifacts()); - console.log(` * Artifacts attached to the pipelines: ${allArtifacts.length > 0 ? allArtifacts.map(a => a.name).join(', ') : 'none'}`); - - const foundArtifacts = allArtifacts.filter(a => artifacts.includes(a.name)); - console.log(` * Found artifacts: ${foundArtifacts.length > 0 ? foundArtifacts.map(a => a.name).join(', ') : 'none'}`); - - if (foundArtifacts.length === artifacts.length) { - console.log(` * All artifacts were found`); - return; - } - } catch (err) { - console.error(`ERROR: Failed to get pipeline artifacts: ${err}`); - } - - await new Promise(c => setTimeout(c, 30_000)); - } - - throw new Error(`ERROR: Artifacts (${artifacts.join(', ')}) were not uploaded within 30 minutes.`); -} - -main(process.argv.splice(2)).then(() => { - process.exit(0); -}, err => { - console.error(err); - process.exit(1); -}); diff --git a/build/azure-pipelines/config/CredScanSuppressions.json b/build/azure-pipelines/config/CredScanSuppressions.json deleted file mode 100644 index bf52c06c..00000000 --- a/build/azure-pipelines/config/CredScanSuppressions.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "tool": "Credential Scanner", - "suppressions": [ - { - "file": [ - "src/vs/base/test/common/uri.test.ts", - "src/vs/workbench/api/test/browser/extHostTelemetry.test.ts" - ], - "_justification": "These are dummy credentials in tests." - }, - { - "file": [ - ".build/linux/rpm/x86_64/rpmbuild/BUILD/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/x86_64/rpmbuild/BUILD/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/rpm/armv7hl/rpmbuild/BUILD/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/armv7hl/rpmbuild/BUILD/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/rpm/aarch64/rpmbuild/BUILD/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/aarch64/rpmbuild/BUILD/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-x64/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-x64/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-x64/stage/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-x64/stage/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-x64/prime/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-x64/prime/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-x64/parts/code/build/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-x64/parts/code/install/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-x64/parts/code/src/usr/share/code/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-x64/parts/code/build/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-x64/parts/code/install/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-x64/parts/code/src/usr/share/code/resources/app/extensions/emmet/dist/node/emmetNodeMain.js" - ], - "_justification": "These are safe to ignore, since they are built artifacts (stable)." - }, - { - "file": [ - ".build/linux/rpm/x86_64/rpmbuild/BUILD/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/x86_64/rpmbuild/BUILD/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/rpm/armv7hl/rpmbuild/BUILD/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/armv7hl/rpmbuild/BUILD/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/rpm/aarch64/rpmbuild/BUILD/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/aarch64/rpmbuild/BUILD/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-insiders-x64/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-insiders-x64/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-insiders-x64/stage/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-insiders-x64/stage/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-insiders-x64/prime/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-insiders-x64/prime/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-insiders-x64/parts/code/build/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-insiders-x64/parts/code/install/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-insiders-x64/parts/code/src/usr/share/code-insiders/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-insiders-x64/parts/code/build/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-insiders-x64/parts/code/install/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-insiders-x64/parts/code/src/usr/share/code-insiders/resources/app/extensions/emmet/dist/node/emmetNodeMain.js" - ], - "_justification": "These are safe to ignore, since they are built artifacts (insiders)." - }, - { - "file": [ - ".build/linux/rpm/x86_64/rpmbuild/BUILD/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/x86_64/rpmbuild/BUILD/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/rpm/armv7hl/rpmbuild/BUILD/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/armv7hl/rpmbuild/BUILD/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/rpm/aarch64/rpmbuild/BUILD/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/rpm/aarch64/rpmbuild/BUILD/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-exploration-x64/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-exploration-x64/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-exploration-x64/stage/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-exploration-x64/stage/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-exploration-x64/prime/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-exploration-x64/prime/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-exploration-x64/parts/code/build/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-exploration-x64/parts/code/install/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-exploration-x64/parts/code/src/usr/share/code-exploration/resources/app/extensions/github-authentication/dist/extension.js", - ".build/linux/snap/x64/code-exploration-x64/parts/code/build/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-exploration-x64/parts/code/install/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js", - ".build/linux/snap/x64/code-exploration-x64/parts/code/src/usr/share/code-exploration/resources/app/extensions/emmet/dist/node/emmetNodeMain.js" - ], - "_justification": "These are safe to ignore, since they are built artifacts (exploration)." - }, - { - "file": [ - ".build/web/extensions/github-authentication/dist/browser/extension.js", - ".build/web/extensions/emmet/dist/browser/emmetBrowserMain.js.map", - ".build/web/extensions/emmet/dist/browser/emmetBrowserMain.js" - ], - "_justification": "These are safe to ignore, since they are built artifacts (web)." - } - ] -} diff --git a/build/azure-pipelines/config/tsaoptions.json b/build/azure-pipelines/config/tsaoptions.json deleted file mode 100644 index e337b577..00000000 --- a/build/azure-pipelines/config/tsaoptions.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "codebaseName": "devdiv_microsoft_vscode", - "serviceTreeID": "79c048b2-322f-4ed5-a1ea-252a1250e4b3", - "instanceUrl": "https://devdiv.visualstudio.com/defaultcollection", - "projectName": "DevDiv", - "areaPath": "DevDiv\\VS Code (compliance tracking only)\\Visual Studio Code Client", - "notificationAliases": [ - "monacotools@microsoft.com" - ], - "validateToolOutput": "None", - "allTools": true -} diff --git a/build/azure-pipelines/darwin/app-entitlements.plist b/build/azure-pipelines/darwin/app-entitlements.plist deleted file mode 100644 index 4073eafc..00000000 --- a/build/azure-pipelines/darwin/app-entitlements.plist +++ /dev/null @@ -1,14 +0,0 @@ - - - - - com.apple.security.cs.allow-jit - - com.apple.security.device.audio-input - - com.apple.security.device.camera - - com.apple.security.automation.apple-events - - - diff --git a/build/azure-pipelines/darwin/cli-build-darwin.yml b/build/azure-pipelines/darwin/cli-build-darwin.yml deleted file mode 100644 index 1d8dffc4..00000000 --- a/build/azure-pipelines/darwin/cli-build-darwin.yml +++ /dev/null @@ -1,88 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - - name: VSCODE_BUILD_MACOS - type: boolean - default: false - - name: VSCODE_BUILD_MACOS_ARM64 - type: boolean - default: false - - name: VSCODE_CHECK_ONLY - type: boolean - default: false - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ../cli/cli-apply-patches.yml@self - - - task: Npm@1 - displayName: Download openssl prebuilt - inputs: - command: custom - customCommand: pack @vscode-internal/openssl-prebuilt@0.0.11 - customRegistry: useFeed - customFeed: "Monaco/openssl-prebuilt" - workingDir: $(Build.ArtifactStagingDirectory) - - - script: | - set -e - mkdir $(Build.ArtifactStagingDirectory)/openssl - tar -xvzf $(Build.ArtifactStagingDirectory)/vscode-internal-openssl-prebuilt-0.0.11.tgz --strip-components=1 --directory=$(Build.ArtifactStagingDirectory)/openssl - displayName: Extract openssl prebuilt - - - template: ../cli/install-rust-posix.yml@self - parameters: - targets: - - ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}: - - x86_64-apple-darwin - - ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}: - - aarch64-apple-darwin - - - ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: x86_64-apple-darwin - VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_darwin_x64_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-osx/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-osx/include - - - ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: aarch64-apple-darwin - VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_darwin_arm64_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-osx/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-osx/include - - - ${{ if not(parameters.VSCODE_CHECK_ONLY) }}: - - ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/unsigned_vscode_cli_darwin_x64_cli.zip - artifactName: unsigned_vscode_cli_darwin_x64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code macOS x64 CLI (unsigned)" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish unsigned_vscode_cli_darwin_x64_cli artifact - - - ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/unsigned_vscode_cli_darwin_arm64_cli.zip - artifactName: unsigned_vscode_cli_darwin_arm64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code macOS arm64 CLI (unsigned)" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish unsigned_vscode_cli_darwin_arm64_cli artifact diff --git a/build/azure-pipelines/darwin/codesign.js b/build/azure-pipelines/darwin/codesign.js deleted file mode 100644 index edc3a5f6..00000000 --- a/build/azure-pipelines/darwin/codesign.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const codesign_1 = require("../common/codesign"); -const publish_1 = require("../common/publish"); -async function main() { - const arch = (0, publish_1.e)('VSCODE_ARCH'); - const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath'); - const pipelineWorkspace = (0, publish_1.e)('PIPELINE_WORKSPACE'); - const folder = `${pipelineWorkspace}/unsigned_vscode_client_darwin_${arch}_archive`; - const glob = `VSCode-darwin-${arch}.zip`; - // Codesign - (0, codesign_1.printBanner)('Codesign'); - const codeSignTask = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-darwin', folder, glob); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign', codeSignTask); - // Notarize - (0, codesign_1.printBanner)('Notarize'); - const notarizeTask = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'notarize-darwin', folder, glob); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Notarize', notarizeTask); -} -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/darwin/codesign.ts b/build/azure-pipelines/darwin/codesign.ts deleted file mode 100644 index a9de0206..00000000 --- a/build/azure-pipelines/darwin/codesign.ts +++ /dev/null @@ -1,33 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; -import { e } from '../common/publish'; - -async function main() { - const arch = e('VSCODE_ARCH'); - const esrpCliDLLPath = e('EsrpCliDllPath'); - const pipelineWorkspace = e('PIPELINE_WORKSPACE'); - - const folder = `${pipelineWorkspace}/unsigned_vscode_client_darwin_${arch}_archive`; - const glob = `VSCode-darwin-${arch}.zip`; - - // Codesign - printBanner('Codesign'); - const codeSignTask = spawnCodesignProcess(esrpCliDLLPath, 'sign-darwin', folder, glob); - await streamProcessOutputAndCheckResult('Codesign', codeSignTask); - - // Notarize - printBanner('Notarize'); - const notarizeTask = spawnCodesignProcess(esrpCliDLLPath, 'notarize-darwin', folder, glob); - await streamProcessOutputAndCheckResult('Notarize', notarizeTask); -} - -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); diff --git a/build/azure-pipelines/darwin/helper-gpu-entitlements.plist b/build/azure-pipelines/darwin/helper-gpu-entitlements.plist deleted file mode 100644 index 4efe1ce5..00000000 --- a/build/azure-pipelines/darwin/helper-gpu-entitlements.plist +++ /dev/null @@ -1,8 +0,0 @@ - - - - - com.apple.security.cs.allow-jit - - - diff --git a/build/azure-pipelines/darwin/helper-plugin-entitlements.plist b/build/azure-pipelines/darwin/helper-plugin-entitlements.plist deleted file mode 100644 index 48f7bf5c..00000000 --- a/build/azure-pipelines/darwin/helper-plugin-entitlements.plist +++ /dev/null @@ -1,12 +0,0 @@ - - - - - com.apple.security.cs.allow-jit - - com.apple.security.cs.allow-unsigned-executable-memory - - com.apple.security.cs.disable-library-validation - - - diff --git a/build/azure-pipelines/darwin/helper-renderer-entitlements.plist b/build/azure-pipelines/darwin/helper-renderer-entitlements.plist deleted file mode 100644 index 4efe1ce5..00000000 --- a/build/azure-pipelines/darwin/helper-renderer-entitlements.plist +++ /dev/null @@ -1,8 +0,0 @@ - - - - - com.apple.security.cs.allow-jit - - - diff --git a/build/azure-pipelines/darwin/product-build-darwin-cli-sign.yml b/build/azure-pipelines/darwin/product-build-darwin-cli-sign.yml deleted file mode 100644 index b3d01ca7..00000000 --- a/build/azure-pipelines/darwin/product-build-darwin-cli-sign.yml +++ /dev/null @@ -1,66 +0,0 @@ -parameters: - - name: VSCODE_BUILD_MACOS - type: boolean - - name: VSCODE_BUILD_MACOS_ARM64 - type: boolean - - name: VSCODE_QUALITY - type: string - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - script: node build/setup-npm-registry.js $NPM_REGISTRY build - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install build dependencies - - - template: ../cli/cli-darwin-sign.yml@self - parameters: - VSCODE_CLI_ARTIFACTS: - - ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}: - - unsigned_vscode_cli_darwin_x64_cli - - ${{ if eq(parameters.VSCODE_BUILD_MACOS_ARM64, true) }}: - - unsigned_vscode_cli_darwin_arm64_cli diff --git a/build/azure-pipelines/darwin/product-build-darwin-test.yml b/build/azure-pipelines/darwin/product-build-darwin-test.yml deleted file mode 100644 index c542caca..00000000 --- a/build/azure-pipelines/darwin/product-build-darwin-test.yml +++ /dev/null @@ -1,223 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - - name: VSCODE_RUN_ELECTRON_TESTS - type: boolean - - name: VSCODE_RUN_BROWSER_TESTS - type: boolean - - name: VSCODE_RUN_REMOTE_TESTS - type: boolean - - name: VSCODE_TEST_ARTIFACT_NAME - type: string - - name: PUBLISH_TASK_NAME - type: string - default: PublishPipelineArtifact@0 - -steps: - - script: npm exec -- npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Download Electron and Playwright - retryCountOnTaskFailure: 3 - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: ./scripts/test.sh --tfs "Unit Tests" - displayName: ๐Ÿงช Run unit tests (Electron) - timeoutInMinutes: 15 - - script: npm run test-node - displayName: ๐Ÿงช Run unit tests (node.js) - timeoutInMinutes: 15 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run test-browser-no-install -- --browser webkit --tfs "Browser Unit Tests" - env: - DEBUG: "*browser*" - displayName: ๐Ÿงช Run unit tests (Browser, Webkit) - timeoutInMinutes: 30 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: ./scripts/test.sh --build --tfs "Unit Tests" - displayName: ๐Ÿงช Run unit tests (Electron) - timeoutInMinutes: 15 - - script: npm run test-node -- --build - displayName: ๐Ÿงช Run unit tests (node.js) - timeoutInMinutes: 15 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run test-browser-no-install -- --build --browser webkit --tfs "Browser Unit Tests" - env: - DEBUG: "*browser*" - displayName: ๐Ÿงช Run unit tests (Browser, Webkit) - timeoutInMinutes: 30 - - - script: | - set -e - npm run gulp \ - compile-extension:configuration-editing \ - compile-extension:css-language-features-server \ - compile-extension:emmet \ - compile-extension:git \ - compile-extension:github-authentication \ - compile-extension:html-language-features-server \ - compile-extension:ipynb \ - compile-extension:notebook-renderers \ - compile-extension:json-language-features-server \ - compile-extension:markdown-language-features \ - compile-extension-media \ - compile-extension:microsoft-authentication \ - compile-extension:typescript-language-features \ - compile-extension:vscode-api-tests \ - compile-extension:vscode-colorize-tests \ - compile-extension:vscode-colorize-perf-tests \ - compile-extension:vscode-test-resolver - displayName: Build integration tests - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: ./scripts/test-integration.sh --tfs "Integration Tests" - displayName: ๐Ÿงช Run integration tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: ./scripts/test-web-integration.sh --browser webkit - displayName: ๐Ÿงช Run integration tests (Browser, Webkit) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: ./scripts/test-remote-integration.sh - displayName: ๐Ÿงช Run integration tests (Remote) - timeoutInMinutes: 20 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: | - # Figure out the full absolute path of the product we just built - # including the remote server and configure the integration tests - # to run with these builds instead of running out of sources. - set -e - APP_ROOT="$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)" - APP_NAME="`ls $APP_ROOT | head -n 1`" - INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \ - ./scripts/test-integration.sh --build --tfs "Integration Tests" - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH) - displayName: ๐Ÿงช Run integration tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: ./scripts/test-web-integration.sh --browser webkit - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH)-web - displayName: ๐Ÿงช Run integration tests (Browser, Webkit) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: | - set -e - APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) - APP_NAME="`ls $APP_ROOT | head -n 1`" - INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \ - ./scripts/test-remote-integration.sh - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH) - displayName: ๐Ÿงช Run integration tests (Remote) - timeoutInMinutes: 20 - - - script: ps -ef - displayName: Diagnostics before smoke test run - continueOnError: true - condition: succeededOrFailed() - - # - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - # - script: npm run compile - # workingDirectory: test/smoke - # displayName: Compile smoke tests - - # - script: npm run gulp compile-extension-media - # displayName: Compile extensions for smoke tests - - # - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - # - script: npm run smoketest-no-compile -- --tracing - # timeoutInMinutes: 20 - # displayName: ๐Ÿงช Run smoke tests (Electron) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: | - set -e - APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) - APP_NAME="`ls $APP_ROOT | head -n 1`" - npm run smoketest-no-compile -- --tracing --build "$APP_ROOT/$APP_NAME" - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Electron) - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run smoketest-no-compile -- --web --tracing --headless - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH)-web - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Browser, Chromium) - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: | - set -e - npm run gulp compile-extension:vscode-test-resolver - APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) - APP_NAME="`ls $APP_ROOT | head -n 1`" - npm run smoketest-no-compile -- --tracing --remote --build "$APP_ROOT/$APP_NAME" - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-darwin-$(VSCODE_ARCH) - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Remote) - - - script: ps -ef - displayName: Diagnostics after smoke test run - continueOnError: true - condition: succeededOrFailed() - - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: .build/crashes - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: crash-dump-macos-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: crash-dump-macos-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Crash Reports" - continueOnError: true - condition: failed() - - # In order to properly symbolify above crash reports - # (if any), we need the compiled native modules too - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: node_modules - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: node-modules-macos-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: node-modules-macos-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Node Modules" - continueOnError: true - condition: failed() - - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: .build/logs - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: logs-macos-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: logs-macos-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Log Files" - continueOnError: true - condition: succeededOrFailed() - - - task: PublishTestResults@2 - displayName: Publish Tests Results - inputs: - testResultsFiles: "*-results.xml" - searchFolder: "$(Build.ArtifactStagingDirectory)/test-results" - condition: succeededOrFailed() diff --git a/build/azure-pipelines/darwin/product-build-darwin-universal.yml b/build/azure-pipelines/darwin/product-build-darwin-universal.yml deleted file mode 100644 index 9e9537fe..00000000 --- a/build/azure-pipelines/darwin/product-build-darwin-universal.yml +++ /dev/null @@ -1,152 +0,0 @@ -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key" - - - script: node build/setup-npm-registry.js $NPM_REGISTRY build - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install build dependencies - - - pwsh: node build/azure-pipelines/common/waitForArtifacts.js unsigned_vscode_client_darwin_x64_archive unsigned_vscode_client_darwin_arm64_archive - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: Wait for x64 and arm64 artifacts - - - download: current - artifact: unsigned_vscode_client_darwin_x64_archive - displayName: Download x64 artifact - - - download: current - artifact: unsigned_vscode_client_darwin_arm64_archive - displayName: Download arm64 artifact - - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - script: | - set -e - unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64 & - unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64 & - wait - DEBUG=* node build/darwin/create-universal-app.js $(agent.builddirectory) - displayName: Create Universal App - - - script: | - set -e - APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" - APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.js universal - displayName: Verify arch of Mach-O objects - - - script: | - set -e - security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain - security default-keychain -s $(agent.tempdirectory)/buildagent.keychain - security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain - echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12 - security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign - export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1) - security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain - DEBUG=electron-osx-sign* node build/darwin/sign.js $(agent.builddirectory) - displayName: Set Hardened Entitlements - - - script: | - set -e - mkdir -p $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive - pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip * && popd - displayName: Archive build - - - task: UseDotNet@2 - inputs: - version: 6.x - - - task: EsrpCodeSigning@5 - inputs: - UseMSIAuthentication: true - ConnectedServiceName: vscode-esrp - AppRegistrationClientId: $(ESRP_CLIENT_ID) - AppRegistrationTenantId: $(ESRP_TENANT_ID) - AuthAKVName: vscode-esrp - AuthSignCertName: esrp-sign - FolderPath: . - Pattern: noop - displayName: 'Install ESRP Tooling' - - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll sign-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Codesign - - - script: node build/azure-pipelines/common/sign $(Agent.RootDirectory)/_tasks/EsrpCodeSigning_*/*/net6.0/esrpcli.dll notarize-darwin $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive VSCode-darwin-$(VSCODE_ARCH).zip - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Notarize - - - script: unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip -d $(Build.ArtifactStagingDirectory)/VSCode-darwin-$(VSCODE_ARCH) - displayName: Extract signed app - - - script: | - set -e - APP_ROOT="$(Build.ArtifactStagingDirectory)/VSCode-darwin-$(VSCODE_ARCH)" - APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" - codesign -dv --deep --verbose=4 "$APP_PATH" - "$APP_PATH/Contents/Resources/app/bin/code" --export-default-configuration=.build - displayName: Verify signature - condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64')) - - - script: mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-x64.zip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin.zip - displayName: Rename x64 build to its legacy name - condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64')) - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-universal.zip - artifactName: vscode_client_darwin_$(VSCODE_ARCH)_archive - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/VSCode-darwin-$(VSCODE_ARCH) - sbomPackageName: "VS Code macOS $(VSCODE_ARCH)" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish client archive diff --git a/build/azure-pipelines/darwin/product-build-darwin.yml b/build/azure-pipelines/darwin/product-build-darwin.yml deleted file mode 100644 index a6072c8f..00000000 --- a/build/azure-pipelines/darwin/product-build-darwin.yml +++ /dev/null @@ -1,339 +0,0 @@ -parameters: - - name: VSCODE_ARCH - type: string - - name: VSCODE_QUALITY - type: string - - name: VSCODE_CIBUILD - type: boolean - - name: VSCODE_RUN_ELECTRON_TESTS - type: boolean - default: false - - name: VSCODE_RUN_BROWSER_TESTS - type: boolean - default: false - - name: VSCODE_RUN_REMOTE_TESTS - type: boolean - default: false - - name: VSCODE_TEST_ARTIFACT_NAME - type: string - default: "" - -steps: - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - checkout: self - fetchDepth: 1 - retryCountOnTaskFailure: 3 - - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password,macos-developer-certificate,macos-developer-certificate-key" - - - task: DownloadPipelineArtifact@2 - inputs: - artifact: Compilation - path: $(Build.ArtifactStagingDirectory) - displayName: Download compilation output - - - script: tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz - displayName: Extract compilation output - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - c++ --version - xcode-select -print-path - python3 -m pip install setuptools - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - npm_config_arch: $(VSCODE_ARCH) - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - # Avoid using dlopen to load Kerberos on macOS which can cause missing libraries - # https://github.com/mongodb-js/kerberos/commit/04044d2814ad1d01e77f1ce87f26b03d86692cf2 - # flipped the default to support legacy linux distros which shouldn't happen - # on macOS. - GYP_DEFINES: "kerberos_use_rtld=false" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/azure-pipelines/distro/mixin-npm - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Mixin distro node modules - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - template: ../common/install-builtin-extensions.yml@self - - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'oss')) }}: - - script: node build/lib/policies darwin - displayName: Generate policy definitions - retryCountOnTaskFailure: 3 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: | - set -e - npm run gulp vscode-darwin-$(VSCODE_ARCH)-min-ci - echo "##vso[task.setvariable variable=BUILT_CLIENT]true" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build client - - - script: | - set -e - npm run gulp vscode-reh-darwin-$(VSCODE_ARCH)-min-ci - mv ../vscode-reh-darwin-$(VSCODE_ARCH) ../vscode-server-darwin-$(VSCODE_ARCH) # TODO@joaomoreno - ARCHIVE_PATH=".build/darwin/server/vscode-server-darwin-$(VSCODE_ARCH).zip" - mkdir -p $(dirname $ARCHIVE_PATH) - (cd .. && zip -Xry $(Build.SourcesDirectory)/$ARCHIVE_PATH vscode-server-darwin-$(VSCODE_ARCH)) - echo "##vso[task.setvariable variable=SERVER_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server - - - script: | - set -e - npm run gulp vscode-reh-web-darwin-$(VSCODE_ARCH)-min-ci - mv ../vscode-reh-web-darwin-$(VSCODE_ARCH) ../vscode-server-darwin-$(VSCODE_ARCH)-web # TODO@joaomoreno - ARCHIVE_PATH=".build/darwin/server/vscode-server-darwin-$(VSCODE_ARCH)-web.zip" - mkdir -p $(dirname $ARCHIVE_PATH) - (cd .. && zip -Xry $(Build.SourcesDirectory)/$ARCHIVE_PATH vscode-server-darwin-$(VSCODE_ARCH)-web) - echo "##vso[task.setvariable variable=WEB_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server (web) - - - ${{ else }}: - - script: npm run gulp transpile-client-esbuild transpile-extensions - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Transpile - - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'oss')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - artifact: unsigned_vscode_cli_darwin_$(VSCODE_ARCH)_cli - patterns: "**" - path: $(Build.ArtifactStagingDirectory)/cli - displayName: Download VS Code CLI - - - script: | - set -e - APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" - APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" - unzip $(Build.ArtifactStagingDirectory)/cli/*.zip -d $(Build.ArtifactStagingDirectory)/cli - CLI_APP_NAME=$(node -p "require(\"$APP_PATH/Contents/Resources/app/product.json\").tunnelApplicationName") - APP_NAME=$(node -p "require(\"$APP_PATH/Contents/Resources/app/product.json\").applicationName") - mv "$(Build.ArtifactStagingDirectory)/cli/$APP_NAME" "$APP_PATH/Contents/Resources/app/bin/$CLI_APP_NAME" - chmod +x "$APP_PATH/Contents/Resources/app/bin/$CLI_APP_NAME" - displayName: Make CLI executable - - - script: | - set -e - APP_ROOT="$(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH)" - APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" node build/darwin/verify-macho.js $(VSCODE_ARCH) - APP_PATH="$(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)" node build/darwin/verify-macho.js $(VSCODE_ARCH) - displayName: Verify arch of Mach-O objects - - # Setting hardened entitlements is a requirement for: - # * Apple notarization - # * Running tests on Big Sur (because Big Sur has additional security precautions) - - script: | - set -e - security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain - security default-keychain -s $(agent.tempdirectory)/buildagent.keychain - security unlock-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain - echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12 - security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign - export CODESIGN_IDENTITY=$(security find-identity -v -p codesigning $(agent.tempdirectory)/buildagent.keychain | grep -oEi "([0-9A-F]{40})" | head -n 1) - security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain - DEBUG=electron-osx-sign* node build/darwin/sign.js $(agent.builddirectory) - displayName: Set Hardened Entitlements - - - script: | - set -e - ARCHIVE_PATH="$(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip" - mkdir -p $(dirname $ARCHIVE_PATH) - (cd ../VSCode-darwin-$(VSCODE_ARCH) && zip -Xry $ARCHIVE_PATH *) - echo "##vso[task.setvariable variable=CLIENT_PATH]$ARCHIVE_PATH" - condition: and(succeededOrFailed(), eq(variables['BUILT_CLIENT'], 'true')) - displayName: Package client - - - pwsh: node build/azure-pipelines/common/checkForArtifact.js CLIENT_ARCHIVE_UPLOADED unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: Check for client artifact - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(CLIENT_PATH) - artifactName: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive - sbomBuildDropPath: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH) - sbomPackageName: "VS Code macOS $(VSCODE_ARCH) (unsigned)" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeeded(), ne(variables['CLIENT_PATH'], ''), eq(variables['CLIENT_ARCHIVE_UPLOADED'], 'false')) - displayName: Publish client archive (unsigned) - - - task: UseDotNet@2 - inputs: - version: 6.x - - - task: EsrpCodeSigning@5 - inputs: - UseMSIAuthentication: true - ConnectedServiceName: vscode-esrp - AppRegistrationClientId: $(ESRP_CLIENT_ID) - AppRegistrationTenantId: $(ESRP_TENANT_ID) - AuthAKVName: vscode-esrp - AuthSignCertName: esrp-sign - FolderPath: . - Pattern: noop - displayName: 'Install ESRP Tooling' - - - pwsh: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $EsrpCodeSigningTool = (gci -directory -filter EsrpCodeSigning_* $(Agent.RootDirectory)/_tasks | Select-Object -last 1).FullName - $Version = (gci -directory $EsrpCodeSigningTool | Select-Object -last 1).FullName - echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll" - displayName: Find ESRP CLI - - - script: npx deemon --detach --wait node build/azure-pipelines/darwin/codesign.js - env: - EsrpCliDllPath: $(EsrpCliDllPath) - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Codesign & Notarize - - - ${{ if or(eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true), eq(parameters.VSCODE_RUN_BROWSER_TESTS, true), eq(parameters.VSCODE_RUN_REMOTE_TESTS, true)) }}: - - template: product-build-darwin-test.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_TEST_ARTIFACT_NAME: ${{ parameters.VSCODE_TEST_ARTIFACT_NAME }} - VSCODE_RUN_ELECTRON_TESTS: ${{ parameters.VSCODE_RUN_ELECTRON_TESTS }} - VSCODE_RUN_BROWSER_TESTS: ${{ parameters.VSCODE_RUN_BROWSER_TESTS }} - VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - PUBLISH_TASK_NAME: 1ES.PublishPipelineArtifact@1 - - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'oss')) }}: - - script: npx deemon --attach node build/azure-pipelines/darwin/codesign.js - condition: succeededOrFailed() - displayName: "Post-job: โœ๏ธ Codesign & Notarize" - - - script: unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip -d $(Build.ArtifactStagingDirectory)/VSCode-darwin-$(VSCODE_ARCH) - displayName: Extract signed app - - - script: | - set -e - APP_ROOT="$(Build.ArtifactStagingDirectory)/VSCode-darwin-$(VSCODE_ARCH)" - APP_NAME="`ls $APP_ROOT | head -n 1`" - APP_PATH="$APP_ROOT/$APP_NAME" - codesign -dv --deep --verbose=4 "$APP_PATH" - "$APP_PATH/Contents/Resources/app/bin/code" --export-default-configuration=.build - displayName: Verify signature - - - script: mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-x64.zip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin.zip - displayName: Rename x64 build to its legacy name - condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64')) - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - ${{ if eq(parameters.VSCODE_ARCH, 'arm64') }}: - targetPath: $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-arm64.zip - ${{ else }}: - targetPath: $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin.zip - artifactName: vscode_client_darwin_$(VSCODE_ARCH)_archive - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/VSCode-darwin-$(VSCODE_ARCH) - sbomPackageName: "VS Code macOS $(VSCODE_ARCH)" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish client archive - - - script: echo "##vso[task.setvariable variable=ARTIFACT_PREFIX]attempt$(System.JobAttempt)_" - condition: and(succeededOrFailed(), notIn(variables['Agent.JobStatus'], 'Succeeded', 'SucceededWithIssues')) - displayName: Generate artifact prefix - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SERVER_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_server_darwin_$(VSCODE_ARCH)_archive-unsigned - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH) - sbomPackageName: "VS Code macOS $(VSCODE_ARCH) Server" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['SERVER_PATH'], '')) - displayName: Publish server archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(WEB_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_web_darwin_$(VSCODE_ARCH)_archive-unsigned - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-server-darwin-$(VSCODE_ARCH)-web - sbomPackageName: "VS Code macOS $(VSCODE_ARCH) Web" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['WEB_PATH'], '')) - displayName: Publish web server archive diff --git a/build/azure-pipelines/distro-build.yml b/build/azure-pipelines/distro-build.yml deleted file mode 100644 index ae11345b..00000000 --- a/build/azure-pipelines/distro-build.yml +++ /dev/null @@ -1,16 +0,0 @@ -pool: - name: 1es-ubuntu-22.04-x64 - os: linux - -trigger: - branches: - include: ["main", "release/*"] -pr: none - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - template: ./distro/download-distro.yml@self diff --git a/build/azure-pipelines/distro/download-distro.yml b/build/azure-pipelines/distro/download-distro.yml deleted file mode 100644 index 5c9ed0e5..00000000 --- a/build/azure-pipelines/distro/download-distro.yml +++ /dev/null @@ -1,55 +0,0 @@ -steps: - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - # TODO@joaomoreno: Keep pwsh once we move out of running entire jobs in containers - - pwsh: | - "machine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$Home/_netrc" -Encoding ASCII - condition: and(succeeded(), contains(variables['Agent.OS'], 'windows')) - displayName: Setup distro auth (Windows) - - - pwsh: | - $ErrorActionPreference = "Stop" - $ArchivePath = "$(Agent.TempDirectory)/distro.zip" - $PackageJson = Get-Content -Path package.json -Raw | ConvertFrom-Json - $DistroVersion = $PackageJson.distro - - Invoke-WebRequest -Uri "https://api.github.com/repos/microsoft/vscode-distro/zipball/$DistroVersion" ` - -OutFile $ArchivePath ` - -Headers @{ "Accept" = "application/vnd.github+json"; "Authorization" = "Bearer $(github-distro-mixin-password)"; "X-GitHub-Api-Version" = "2022-11-28" } - - New-Item -ItemType Directory -Path .build -Force - Expand-Archive -Path $ArchivePath -DestinationPath .build - Rename-Item -Path ".build/microsoft-vscode-distro-$DistroVersion" -NewName distro - condition: and(succeeded(), contains(variables['Agent.OS'], 'windows')) - displayName: Download distro (Windows) - - - script: | - mkdir -p .build - cat << EOF | tee ~/.netrc .build/.netrc > /dev/null - machine github.com - login vscode - password $(github-distro-mixin-password) - EOF - condition: and(succeeded(), not(contains(variables['Agent.OS'], 'windows'))) - displayName: Setup distro auth (non-Windows) - - - script: | - set -e - ArchivePath="$(Agent.TempDirectory)/distro.zip" - DistroVersion=$(node -p "require('./package.json').distro") - - curl -H "Accept: application/vnd.github+json" \ - -H "Authorization: Bearer $(github-distro-mixin-password)" \ - -H "X-GitHub-Api-Version: 2022-11-28" \ - -o $ArchivePath \ - -L "https://api.github.com/repos/microsoft/vscode-distro/zipball/$DistroVersion" - - unzip $ArchivePath -d .build - mv .build/microsoft-vscode-distro-$DistroVersion .build/distro - condition: and(succeeded(), not(contains(variables['Agent.OS'], 'windows'))) - displayName: Download distro (non-Windows) diff --git a/build/azure-pipelines/distro/mixin-npm.js b/build/azure-pipelines/distro/mixin-npm.js deleted file mode 100644 index 87958a5d..00000000 --- a/build/azure-pipelines/distro/mixin-npm.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const { dirs } = require('../../npm/dirs'); -function log(...args) { - console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); -} -function mixin(mixinPath) { - if (!fs_1.default.existsSync(`${mixinPath}/node_modules`)) { - log(`Skipping distro npm dependencies: ${mixinPath} (no node_modules)`); - return; - } - log(`Mixing in distro npm dependencies: ${mixinPath}`); - const distroPackageJson = JSON.parse(fs_1.default.readFileSync(`${mixinPath}/package.json`, 'utf8')); - const targetPath = path_1.default.relative('.build/distro/npm', mixinPath); - for (const dependency of Object.keys(distroPackageJson.dependencies)) { - fs_1.default.rmSync(`./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true }); - fs_1.default.cpSync(`${mixinPath}/node_modules/${dependency}`, `./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true, dereference: true }); - } - log(`Mixed in distro npm dependencies: ${mixinPath} โœ”๏ธŽ`); -} -function main() { - log(`Mixing in distro npm dependencies...`); - const mixinPaths = dirs.filter(d => /^.build\/distro\/npm/.test(d)); - for (const mixinPath of mixinPaths) { - mixin(mixinPath); - } -} -main(); -//# sourceMappingURL=mixin-npm.js.map \ No newline at end of file diff --git a/build/azure-pipelines/distro/mixin-npm.ts b/build/azure-pipelines/distro/mixin-npm.ts deleted file mode 100644 index 6e32f10d..00000000 --- a/build/azure-pipelines/distro/mixin-npm.ts +++ /dev/null @@ -1,43 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import path from 'path'; -const { dirs } = require('../../npm/dirs') as { dirs: string[] }; - -function log(...args: any[]): void { - console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); -} - -function mixin(mixinPath: string) { - if (!fs.existsSync(`${mixinPath}/node_modules`)) { - log(`Skipping distro npm dependencies: ${mixinPath} (no node_modules)`); - return; - } - - log(`Mixing in distro npm dependencies: ${mixinPath}`); - - const distroPackageJson = JSON.parse(fs.readFileSync(`${mixinPath}/package.json`, 'utf8')); - const targetPath = path.relative('.build/distro/npm', mixinPath); - - for (const dependency of Object.keys(distroPackageJson.dependencies)) { - fs.rmSync(`./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true }); - fs.cpSync(`${mixinPath}/node_modules/${dependency}`, `./${targetPath}/node_modules/${dependency}`, { recursive: true, force: true, dereference: true }); - } - - log(`Mixed in distro npm dependencies: ${mixinPath} โœ”๏ธŽ`); -} - -function main() { - log(`Mixing in distro npm dependencies...`); - - const mixinPaths = dirs.filter(d => /^.build\/distro\/npm/.test(d)); - - for (const mixinPath of mixinPaths) { - mixin(mixinPath); - } -} - -main(); diff --git a/build/azure-pipelines/distro/mixin-quality.js b/build/azure-pipelines/distro/mixin-quality.js deleted file mode 100644 index 335f63ca..00000000 --- a/build/azure-pipelines/distro/mixin-quality.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -function log(...args) { - console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); -} -function main() { - const quality = process.env['VSCODE_QUALITY']; - if (!quality) { - throw new Error('Missing VSCODE_QUALITY, skipping mixin'); - } - log(`Mixing in distro quality...`); - const basePath = `.build/distro/mixin/${quality}`; - for (const name of fs_1.default.readdirSync(basePath)) { - const distroPath = path_1.default.join(basePath, name); - const ossPath = path_1.default.relative(basePath, distroPath); - if (ossPath === 'product.json') { - const distro = JSON.parse(fs_1.default.readFileSync(distroPath, 'utf8')); - const oss = JSON.parse(fs_1.default.readFileSync(ossPath, 'utf8')); - let builtInExtensions = oss.builtInExtensions; - if (Array.isArray(distro.builtInExtensions)) { - log('Overwriting built-in extensions:', distro.builtInExtensions.map(e => e.name)); - builtInExtensions = distro.builtInExtensions; - } - else if (distro.builtInExtensions) { - const include = distro.builtInExtensions['include'] ?? []; - const exclude = distro.builtInExtensions['exclude'] ?? []; - log('OSS built-in extensions:', builtInExtensions.map(e => e.name)); - log('Including built-in extensions:', include.map(e => e.name)); - log('Excluding built-in extensions:', exclude); - builtInExtensions = builtInExtensions.filter(ext => !include.find(e => e.name === ext.name) && !exclude.find(name => name === ext.name)); - builtInExtensions = [...builtInExtensions, ...include]; - log('Final built-in extensions:', builtInExtensions.map(e => e.name)); - } - else { - log('Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name)); - } - const result = { webBuiltInExtensions: oss.webBuiltInExtensions, ...distro, builtInExtensions }; - fs_1.default.writeFileSync(ossPath, JSON.stringify(result, null, '\t'), 'utf8'); - } - else { - fs_1.default.cpSync(distroPath, ossPath, { force: true, recursive: true }); - } - log(distroPath, 'โœ”๏ธŽ'); - } -} -main(); -//# sourceMappingURL=mixin-quality.js.map \ No newline at end of file diff --git a/build/azure-pipelines/distro/mixin-quality.ts b/build/azure-pipelines/distro/mixin-quality.ts deleted file mode 100644 index 29c90f00..00000000 --- a/build/azure-pipelines/distro/mixin-quality.ts +++ /dev/null @@ -1,80 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import path from 'path'; - -interface IBuiltInExtension { - readonly name: string; - readonly version: string; - readonly repo: string; - readonly metadata: any; -} - -interface OSSProduct { - readonly builtInExtensions: IBuiltInExtension[]; - readonly webBuiltInExtensions?: IBuiltInExtension[]; -} - -interface Product { - readonly builtInExtensions?: IBuiltInExtension[] | { 'include'?: IBuiltInExtension[]; 'exclude'?: string[] }; - readonly webBuiltInExtensions?: IBuiltInExtension[]; -} - -function log(...args: any[]): void { - console.log(`[${new Date().toLocaleTimeString('en', { hour12: false })}]`, '[distro]', ...args); -} - -function main() { - const quality = process.env['VSCODE_QUALITY']; - - if (!quality) { - throw new Error('Missing VSCODE_QUALITY, skipping mixin'); - } - - log(`Mixing in distro quality...`); - - const basePath = `.build/distro/mixin/${quality}`; - - for (const name of fs.readdirSync(basePath)) { - const distroPath = path.join(basePath, name); - const ossPath = path.relative(basePath, distroPath); - - if (ossPath === 'product.json') { - const distro = JSON.parse(fs.readFileSync(distroPath, 'utf8')) as Product; - const oss = JSON.parse(fs.readFileSync(ossPath, 'utf8')) as OSSProduct; - let builtInExtensions = oss.builtInExtensions; - - if (Array.isArray(distro.builtInExtensions)) { - log('Overwriting built-in extensions:', distro.builtInExtensions.map(e => e.name)); - - builtInExtensions = distro.builtInExtensions; - } else if (distro.builtInExtensions) { - const include = distro.builtInExtensions['include'] ?? []; - const exclude = distro.builtInExtensions['exclude'] ?? []; - - log('OSS built-in extensions:', builtInExtensions.map(e => e.name)); - log('Including built-in extensions:', include.map(e => e.name)); - log('Excluding built-in extensions:', exclude); - - builtInExtensions = builtInExtensions.filter(ext => !include.find(e => e.name === ext.name) && !exclude.find(name => name === ext.name)); - builtInExtensions = [...builtInExtensions, ...include]; - - log('Final built-in extensions:', builtInExtensions.map(e => e.name)); - } else { - log('Inheriting OSS built-in extensions', builtInExtensions.map(e => e.name)); - } - - const result = { webBuiltInExtensions: oss.webBuiltInExtensions, ...distro, builtInExtensions }; - fs.writeFileSync(ossPath, JSON.stringify(result, null, '\t'), 'utf8'); - } else { - fs.cpSync(distroPath, ossPath, { force: true, recursive: true }); - } - - log(distroPath, 'โœ”๏ธŽ'); - } -} - -main(); diff --git a/build/azure-pipelines/linux/.gitignore b/build/azure-pipelines/linux/.gitignore deleted file mode 100644 index 0f46fa70..00000000 --- a/build/azure-pipelines/linux/.gitignore +++ /dev/null @@ -1 +0,0 @@ -pat \ No newline at end of file diff --git a/build/azure-pipelines/linux/apt-retry.sh b/build/azure-pipelines/linux/apt-retry.sh deleted file mode 100755 index 358f34cd..00000000 --- a/build/azure-pipelines/linux/apt-retry.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -################################################################################ -## Copied from https://github.com/actions/runner-images/blob/ubuntu22/20240825.1/images/ubuntu/scripts/build/configure-apt-mock.sh -################################################################################ - -i=1 -while [ $i -le 30 ];do - err=$(mktemp) - "$@" 2>$err - - # no errors, break the loop and continue normal flow - test -f $err || break - cat $err >&2 - - retry=false - - if grep -q 'Could not get lock' $err;then - # apt db locked needs retry - retry=true - elif grep -q 'Could not open file /var/lib/apt/lists' $err;then - # apt update is not completed, needs retry - retry=true - elif grep -q 'IPC connect call failed' $err;then - # the delay should help with gpg-agent not ready - retry=true - elif grep -q 'Temporary failure in name resolution' $err;then - # It looks like DNS is not updated with random generated hostname yet - retry=true - elif grep -q 'dpkg frontend is locked by another process' $err;then - # dpkg process is busy by another process - retry=true - fi - - rm $err - if [ $retry = false ]; then - break - fi - - sleep 5 - echo "...retry $i" - i=$((i + 1)) -done diff --git a/build/azure-pipelines/linux/build-snap.sh b/build/azure-pipelines/linux/build-snap.sh deleted file mode 100755 index 144f41ca..00000000 --- a/build/azure-pipelines/linux/build-snap.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash -set -e - -# Get snapcraft version -snapcraft --version - -# Make sure we get latest packages -sudo apt-get update -sudo apt-get upgrade -y -sudo apt-get install -y curl apt-transport-https ca-certificates - -# Define variables -SNAP_ROOT="$(pwd)/.build/linux/snap/$VSCODE_ARCH" - -# Create snap package -BUILD_VERSION="$(date +%s)" -SNAP_FILENAME="code-$VSCODE_QUALITY-$VSCODE_ARCH-$BUILD_VERSION.snap" -SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME" -case $VSCODE_ARCH in - x64) SNAPCRAFT_TARGET_ARGS="" ;; - *) SNAPCRAFT_TARGET_ARGS="--target-arch $VSCODE_ARCH" ;; -esac -(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap $SNAPCRAFT_TARGET_ARGS --output "$SNAP_PATH") diff --git a/build/azure-pipelines/linux/cli-build-linux.yml b/build/azure-pipelines/linux/cli-build-linux.yml deleted file mode 100644 index dba94939..00000000 --- a/build/azure-pipelines/linux/cli-build-linux.yml +++ /dev/null @@ -1,159 +0,0 @@ -parameters: - - name: VSCODE_BUILD_LINUX - type: boolean - default: false - - name: VSCODE_BUILD_LINUX_ARM64 - type: boolean - default: false - - name: VSCODE_BUILD_LINUX_ARMHF - type: boolean - default: false - - name: VSCODE_CHECK_ONLY - type: boolean - default: false - - name: VSCODE_QUALITY - type: string - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ../cli/cli-apply-patches.yml@self - - - task: Npm@1 - displayName: Download openssl prebuilt - inputs: - command: custom - customCommand: pack @vscode-internal/openssl-prebuilt@0.0.11 - customRegistry: useFeed - customFeed: "Monaco/openssl-prebuilt" - workingDir: $(Build.ArtifactStagingDirectory) - - - script: | - set -e - mkdir $(Build.ArtifactStagingDirectory)/openssl - tar -xvzf $(Build.ArtifactStagingDirectory)/vscode-internal-openssl-prebuilt-0.0.11.tgz --strip-components=1 --directory=$(Build.ArtifactStagingDirectory)/openssl - displayName: Extract openssl prebuilt - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/setup-npm-registry.js $NPM_REGISTRY build - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install build dependencies - - - script: | - set -e - mkdir -p $(Build.SourcesDirectory)/.build - displayName: Create .build folder for misc dependencies - - - template: ../cli/install-rust-posix.yml@self - parameters: - targets: - - ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARM64, true) }}: - - aarch64-unknown-linux-gnu - - ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}: - - x86_64-unknown-linux-gnu - - ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true) }}: - - armv7-unknown-linux-gnueabihf - - - ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARM64, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: aarch64-unknown-linux-gnu - VSCODE_CLI_ARTIFACT: vscode_cli_linux_arm64_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-linux/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-linux/include - SYSROOT_ARCH: arm64 - - - ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: x86_64-unknown-linux-gnu - VSCODE_CLI_ARTIFACT: vscode_cli_linux_x64_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-linux/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-linux/include - SYSROOT_ARCH: amd64 - - - ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: armv7-unknown-linux-gnueabihf - VSCODE_CLI_ARTIFACT: vscode_cli_linux_armhf_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm-linux/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm-linux/include - SYSROOT_ARCH: armhf - - - ${{ if not(parameters.VSCODE_CHECK_ONLY) }}: - - ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/vscode_cli_linux_armhf_cli.tar.gz - artifactName: vscode_cli_linux_armhf_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code Linux armhf CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish vscode_cli_linux_armhf_cli artifact - - - ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/vscode_cli_linux_x64_cli.tar.gz - artifactName: vscode_cli_linux_x64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code Linux x64 CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish vscode_cli_linux_x64_cli artifact - - - ${{ if eq(parameters.VSCODE_BUILD_LINUX_ARM64, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/vscode_cli_linux_arm64_cli.tar.gz - artifactName: vscode_cli_linux_arm64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code Linux arm64 CLI" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish vscode_cli_linux_arm64_cli artifact diff --git a/build/azure-pipelines/linux/codesign.js b/build/azure-pipelines/linux/codesign.js deleted file mode 100644 index 98b97db5..00000000 --- a/build/azure-pipelines/linux/codesign.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const codesign_1 = require("../common/codesign"); -const publish_1 = require("../common/publish"); -async function main() { - const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath'); - // Start the code sign processes in parallel - // 1. Codesign deb package - // 2. Codesign rpm package - const codesignTask1 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-pgp', '.build/linux/deb', '*.deb'); - const codesignTask2 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-pgp', '.build/linux/rpm', '*.rpm'); - // Codesign deb package - (0, codesign_1.printBanner)('Codesign deb package'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign deb package', codesignTask1); - // Codesign rpm package - (0, codesign_1.printBanner)('Codesign rpm package'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign rpm package', codesignTask2); -} -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/linux/codesign.ts b/build/azure-pipelines/linux/codesign.ts deleted file mode 100644 index 1f74cc21..00000000 --- a/build/azure-pipelines/linux/codesign.ts +++ /dev/null @@ -1,32 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; -import { e } from '../common/publish'; - -async function main() { - const esrpCliDLLPath = e('EsrpCliDllPath'); - - // Start the code sign processes in parallel - // 1. Codesign deb package - // 2. Codesign rpm package - const codesignTask1 = spawnCodesignProcess(esrpCliDLLPath, 'sign-pgp', '.build/linux/deb', '*.deb'); - const codesignTask2 = spawnCodesignProcess(esrpCliDLLPath, 'sign-pgp', '.build/linux/rpm', '*.rpm'); - - // Codesign deb package - printBanner('Codesign deb package'); - await streamProcessOutputAndCheckResult('Codesign deb package', codesignTask1); - - // Codesign rpm package - printBanner('Codesign rpm package'); - await streamProcessOutputAndCheckResult('Codesign rpm package', codesignTask2); -} - -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); diff --git a/build/azure-pipelines/linux/product-build-linux-test.yml b/build/azure-pipelines/linux/product-build-linux-test.yml deleted file mode 100644 index 7e932535..00000000 --- a/build/azure-pipelines/linux/product-build-linux-test.yml +++ /dev/null @@ -1,254 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - - name: VSCODE_RUN_ELECTRON_TESTS - type: boolean - - name: VSCODE_RUN_BROWSER_TESTS - type: boolean - - name: VSCODE_RUN_REMOTE_TESTS - type: boolean - - name: VSCODE_TEST_ARTIFACT_NAME - type: string - - name: PUBLISH_TASK_NAME - type: string - default: PublishPipelineArtifact@0 - -steps: - - script: npm exec -- npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Download Electron and Playwright - retryCountOnTaskFailure: 3 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: | - set -e - APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH) - ELECTRON_ROOT=.build/electron - sudo chown root $APP_ROOT/chrome-sandbox - sudo chown root $ELECTRON_ROOT/chrome-sandbox - sudo chmod 4755 $APP_ROOT/chrome-sandbox - sudo chmod 4755 $ELECTRON_ROOT/chrome-sandbox - stat $APP_ROOT/chrome-sandbox - stat $ELECTRON_ROOT/chrome-sandbox - displayName: Change setuid helper binary permission - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: ./scripts/test.sh --tfs "Unit Tests" - env: - DISPLAY: ":10" - displayName: ๐Ÿงช Run unit tests (Electron) - timeoutInMinutes: 15 - - script: npm run test-node - displayName: ๐Ÿงช Run unit tests (node.js) - timeoutInMinutes: 15 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run test-browser-no-install -- --browser chromium --tfs "Browser Unit Tests" - env: - DEBUG: "*browser*" - displayName: ๐Ÿงช Run unit tests (Browser, Chromium) - timeoutInMinutes: 15 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: ./scripts/test.sh --build --tfs "Unit Tests" - displayName: ๐Ÿงช Run unit tests (Electron) - timeoutInMinutes: 15 - - script: npm run test-node -- --build - displayName: ๐Ÿงช Run unit tests (node.js) - timeoutInMinutes: 15 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run test-browser-no-install -- --build --browser chromium --tfs "Browser Unit Tests" - env: - DEBUG: "*browser*" - displayName: ๐Ÿงช Run unit tests (Browser, Chromium) - timeoutInMinutes: 15 - - - script: | - set -e - npm run gulp \ - compile-extension:configuration-editing \ - compile-extension:css-language-features-server \ - compile-extension:emmet \ - compile-extension:git \ - compile-extension:github-authentication \ - compile-extension:html-language-features-server \ - compile-extension:ipynb \ - compile-extension:notebook-renderers \ - compile-extension:json-language-features-server \ - compile-extension:markdown-language-features \ - compile-extension-media \ - compile-extension:microsoft-authentication \ - compile-extension:typescript-language-features \ - compile-extension:vscode-api-tests \ - compile-extension:vscode-colorize-tests \ - compile-extension:vscode-colorize-perf-tests \ - compile-extension:vscode-test-resolver - displayName: Build integration tests - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: ./scripts/test-integration.sh --tfs "Integration Tests" - env: - DISPLAY: ":10" - displayName: ๐Ÿงช Run integration tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: ./scripts/test-web-integration.sh --browser chromium - displayName: ๐Ÿงช Run integration tests (Browser, Chromium) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: ./scripts/test-remote-integration.sh - displayName: ๐Ÿงช Run integration tests (Remote) - timeoutInMinutes: 20 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: | - # Figure out the full absolute path of the product we just built - # including the remote server and configure the integration tests - # to run with these builds instead of running out of sources. - set -e - APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH) - APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName") - INTEGRATION_TEST_APP_NAME="$APP_NAME" \ - INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ - ./scripts/test-integration.sh --build --tfs "Integration Tests" - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-linux-$(VSCODE_ARCH) - displayName: ๐Ÿงช Run integration tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: ./scripts/test-web-integration.sh --browser chromium - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-linux-$(VSCODE_ARCH)-web - displayName: ๐Ÿงช Run integration tests (Browser, Chromium) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: | - set -e - APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH) - APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName") - INTEGRATION_TEST_APP_NAME="$APP_NAME" \ - INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \ - ./scripts/test-remote-integration.sh - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-linux-$(VSCODE_ARCH) - displayName: ๐Ÿงช Run integration tests (Remote) - timeoutInMinutes: 20 - - - script: | - set -e - ps -ef - cat /proc/sys/fs/inotify/max_user_watches - lsof | wc -l - displayName: Diagnostics before smoke test run (processes, max_user_watches, number of opened file handles) - continueOnError: true - condition: succeededOrFailed() - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - script: npm run compile - workingDirectory: test/smoke - displayName: Compile smoke tests - - - script: npm run gulp node - displayName: Download node.js for remote smoke tests - retryCountOnTaskFailure: 3 - - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: npm run smoketest-no-compile -- --tracing - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Electron) - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run smoketest-no-compile -- --web --tracing --headless - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Browser, Chromium) - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: npm run smoketest-no-compile -- --remote --tracing - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Remote) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - script: npm run smoketest-no-compile -- --tracing --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)" - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Electron) - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - script: npm run smoketest-no-compile -- --web --tracing --headless - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)/vscode-server-linux-$(VSCODE_ARCH)-web - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Browser, Chromium) - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - script: | - set -e - APP_PATH=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH) - VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-server-linux-$(VSCODE_ARCH)" \ - npm run smoketest-no-compile -- --tracing --remote --build "$APP_PATH" - timeoutInMinutes: 20 - displayName: ๐Ÿงช Run smoke tests (Remote) - - - script: | - set -e - ps -ef - cat /proc/sys/fs/inotify/max_user_watches - lsof | wc -l - displayName: Diagnostics after smoke test run (processes, max_user_watches, number of opened file handles) - continueOnError: true - condition: succeededOrFailed() - - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: .build/crashes - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: crash-dump-linux-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: crash-dump-linux-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Crash Reports" - continueOnError: true - condition: failed() - - # In order to properly symbolify above crash reports - # (if any), we need the compiled native modules too - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: node_modules - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: node-modules-linux-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: node-modules-linux-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Node Modules" - continueOnError: true - condition: failed() - - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: .build/logs - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: logs-linux-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: logs-linux-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Log Files" - continueOnError: true - condition: succeededOrFailed() - - - task: PublishTestResults@2 - displayName: Publish Tests Results - inputs: - testResultsFiles: "*-results.xml" - searchFolder: "$(Build.ArtifactStagingDirectory)/test-results" - condition: succeededOrFailed() diff --git a/build/azure-pipelines/linux/product-build-linux.yml b/build/azure-pipelines/linux/product-build-linux.yml deleted file mode 100644 index dcf3964e..00000000 --- a/build/azure-pipelines/linux/product-build-linux.yml +++ /dev/null @@ -1,446 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - - name: VSCODE_ARCH - type: string - - name: VSCODE_CIBUILD - type: boolean - - name: VSCODE_RUN_ELECTRON_TESTS - type: boolean - default: false - - name: VSCODE_RUN_BROWSER_TESTS - type: boolean - default: false - - name: VSCODE_RUN_REMOTE_TESTS - type: boolean - default: false - - name: VSCODE_TEST_ARTIFACT_NAME - type: string - default: "" - -steps: - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - checkout: self - fetchDepth: 1 - retryCountOnTaskFailure: 3 - - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - task: DownloadPipelineArtifact@2 - inputs: - artifact: Compilation - path: $(Build.ArtifactStagingDirectory) - displayName: Download compilation output - - - script: tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz - displayName: Extract compilation output - - - script: | - set -e - # Start X server - ./build/azure-pipelines/linux/apt-retry.sh sudo apt-get update - ./build/azure-pipelines/linux/apt-retry.sh sudo apt-get install -y pkg-config \ - xvfb \ - libgtk-3-0 \ - libxkbfile-dev \ - libkrb5-dev \ - libgbm1 \ - rpm - sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb - sudo chmod +x /etc/init.d/xvfb - sudo update-rc.d xvfb defaults - sudo service xvfb start - displayName: Setup system services - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install build dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - # Step will be used by both verify glibcxx version for remote server and building rpm package, - # hence avoid adding it behind NODE_MODULES_RESTORED condition. - - script: | - set -e - SYSROOT_ARCH=$VSCODE_ARCH - if [ "$SYSROOT_ARCH" == "x64" ]; then - SYSROOT_ARCH="amd64" - fi - export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0 - SYSROOT_ARCH="$SYSROOT_ARCH" VSCODE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getVSCodeSysroot } = require("./build/linux/debian/install-sysroot.js"); await getVSCodeSysroot(process.env["SYSROOT_ARCH"]); })()' - env: - VSCODE_ARCH: $(VSCODE_ARCH) - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Download vscode sysroots - - - script: | - set -e - - source ./build/azure-pipelines/linux/setup-env.sh - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - npm_config_arch: $(NPM_ARCH) - VSCODE_ARCH: $(VSCODE_ARCH) - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/azure-pipelines/distro/mixin-npm - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Mixin distro node modules - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - template: ../common/install-builtin-extensions.yml@self - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: | - set -e - npm run gulp vscode-linux-$(VSCODE_ARCH)-min-ci - ARCHIVE_PATH=".build/linux/client/code-${{ parameters.VSCODE_QUALITY }}-$(VSCODE_ARCH)-$(date +%s).tar.gz" - mkdir -p $(dirname $ARCHIVE_PATH) - echo "##vso[task.setvariable variable=CLIENT_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build client - - - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - task: DownloadPipelineArtifact@2 - inputs: - artifact: $(ARTIFACT_PREFIX)vscode_cli_linux_$(VSCODE_ARCH)_cli - patterns: "**" - path: $(Build.ArtifactStagingDirectory)/cli - displayName: Download VS Code CLI - - - script: | - set -e - tar -xzvf $(Build.ArtifactStagingDirectory)/cli/*.tar.gz -C $(Build.ArtifactStagingDirectory)/cli - CLI_APP_NAME=$(node -p "require(\"$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)/resources/app/product.json\").tunnelApplicationName") - APP_NAME=$(node -p "require(\"$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)/resources/app/product.json\").applicationName") - mv $(Build.ArtifactStagingDirectory)/cli/$APP_NAME $(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)/bin/$CLI_APP_NAME - displayName: Mix in CLI - - - script: | - set -e - tar -czf $CLIENT_PATH -C .. VSCode-linux-$(VSCODE_ARCH) - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Archive client - - - script: | - set -e - npm run gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci - mv ../vscode-reh-linux-$(VSCODE_ARCH) ../vscode-server-linux-$(VSCODE_ARCH) # TODO@joaomoreno - ARCHIVE_PATH=".build/linux/server/vscode-server-linux-$(VSCODE_ARCH).tar.gz" - UNARCHIVE_PATH="`pwd`/../vscode-server-linux-$(VSCODE_ARCH)" - mkdir -p $(dirname $ARCHIVE_PATH) - tar --owner=0 --group=0 -czf $ARCHIVE_PATH -C .. vscode-server-linux-$(VSCODE_ARCH) - echo "##vso[task.setvariable variable=SERVER_PATH]$ARCHIVE_PATH" - echo "##vso[task.setvariable variable=SERVER_UNARCHIVE_PATH]$UNARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server - - - script: | - set -e - npm run gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci - mv ../vscode-reh-web-linux-$(VSCODE_ARCH) ../vscode-server-linux-$(VSCODE_ARCH)-web # TODO@joaomoreno - ARCHIVE_PATH=".build/linux/web/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz" - mkdir -p $(dirname $ARCHIVE_PATH) - tar --owner=0 --group=0 -czf $ARCHIVE_PATH -C .. vscode-server-linux-$(VSCODE_ARCH)-web - echo "##vso[task.setvariable variable=WEB_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server (web) - - - ${{ if or(eq(parameters.VSCODE_ARCH, 'x64'), eq(parameters.VSCODE_ARCH, 'arm64')) }}: - - script: | - set -e - - EXPECTED_GLIBC_VERSION="2.28" \ - EXPECTED_GLIBCXX_VERSION="3.4.25" \ - VSCODE_SYSROOT_DIR="$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0" \ - ./build/azure-pipelines/linux/verify-glibc-requirements.sh - env: - SEARCH_PATH: $(SERVER_UNARCHIVE_PATH) - npm_config_arch: $(NPM_ARCH) - VSCODE_ARCH: $(VSCODE_ARCH) - displayName: Check GLIBC and GLIBCXX dependencies in server archive - - - ${{ else }}: - - script: | - set -e - - EXPECTED_GLIBC_VERSION="2.28" \ - EXPECTED_GLIBCXX_VERSION="3.4.26" \ - VSCODE_SYSROOT_DIR="$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-8.5.0" \ - ./build/azure-pipelines/linux/verify-glibc-requirements.sh - env: - SEARCH_PATH: $(SERVER_UNARCHIVE_PATH) - npm_config_arch: $(NPM_ARCH) - VSCODE_ARCH: $(VSCODE_ARCH) - displayName: Check GLIBC and GLIBCXX dependencies in server archive - - - ${{ else }}: - - script: npm run gulp "transpile-client-esbuild" "transpile-extensions" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Transpile client and extensions - - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'oss')) }}: - - script: | - set -e - npm run gulp "vscode-linux-$(VSCODE_ARCH)-prepare-deb" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Prepare deb package - - - script: | - set -e - npm run gulp "vscode-linux-$(VSCODE_ARCH)-build-deb" - file_output=$(file $(ls .build/linux/deb/*/deb/*.deb)) - if [[ "$file_output" != *"data compression xz"* ]]; then - echo "Error: unknown compression. $file_output" - exit 1 - fi - echo "##vso[task.setvariable variable=DEB_PATH]$(ls .build/linux/deb/*/deb/*.deb)" - displayName: Build deb package - - - script: | - set -e - TRIPLE="" - if [ "$VSCODE_ARCH" == "x64" ]; then - TRIPLE="x86_64-linux-gnu" - elif [ "$VSCODE_ARCH" == "arm64" ]; then - TRIPLE="aarch64-linux-gnu" - elif [ "$VSCODE_ARCH" == "armhf" ]; then - TRIPLE="arm-rpi-linux-gnueabihf" - fi - export VSCODE_SYSROOT_DIR=$(Build.SourcesDirectory)/.build/sysroots/glibc-2.28-gcc-10.5.0 - export STRIP="$VSCODE_SYSROOT_DIR/$TRIPLE/$TRIPLE/bin/strip" - npm run gulp "vscode-linux-$(VSCODE_ARCH)-prepare-rpm" - env: - VSCODE_ARCH: $(VSCODE_ARCH) - displayName: Prepare rpm package - - - script: | - set -e - npm run gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm" - echo "##vso[task.setvariable variable=RPM_PATH]$(ls .build/linux/rpm/*/*.rpm)" - displayName: Build rpm package - - - ${{ if eq(parameters.VSCODE_ARCH, 'x64') }}: - - task: Docker@1 - inputs: - azureSubscriptionEndpoint: vscode - azureContainerRegistry: vscodehub.azurecr.io - command: login - displayName: Login to Container Registry - - - script: | - set -e - npm run gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap" - sudo -E docker run -e VSCODE_ARCH -e VSCODE_QUALITY -v $(pwd):/work -w /work vscodehub.azurecr.io/vscode-linux-build-agent:snapcraft-x64 /bin/bash -c "./build/azure-pipelines/linux/build-snap.sh" - - SNAP_ROOT="$(pwd)/.build/linux/snap/$(VSCODE_ARCH)" - SNAP_EXTRACTED_PATH=$(find $SNAP_ROOT -maxdepth 1 -type d -name 'code-*') - SNAP_PATH=$(find $SNAP_ROOT -maxdepth 1 -type f -name '*.snap') - - # SBOM tool doesn't like recursive symlinks - sudo find $SNAP_EXTRACTED_PATH -type l -delete - - echo "##vso[task.setvariable variable=SNAP_EXTRACTED_PATH]$SNAP_EXTRACTED_PATH" - echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH" - env: - VSCODE_ARCH: $(VSCODE_ARCH) - displayName: Build snap package - - - task: UseDotNet@2 - inputs: - version: 6.x - - - task: EsrpCodeSigning@5 - inputs: - UseMSIAuthentication: true - ConnectedServiceName: vscode-esrp - AppRegistrationClientId: $(ESRP_CLIENT_ID) - AppRegistrationTenantId: $(ESRP_TENANT_ID) - AuthAKVName: vscode-esrp - AuthSignCertName: esrp-sign - FolderPath: . - Pattern: noop - displayName: 'Install ESRP Tooling' - - - pwsh: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $EsrpCodeSigningTool = (gci -directory -filter EsrpCodeSigning_* $(Agent.RootDirectory)/_tasks | Select-Object -last 1).FullName - $Version = (gci -directory $EsrpCodeSigningTool | Select-Object -last 1).FullName - echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version/net6.0/esrpcli.dll" - displayName: Find ESRP CLI - - - script: npx deemon --detach --wait node build/azure-pipelines/linux/codesign.js - env: - EsrpCliDllPath: $(EsrpCliDllPath) - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Codesign deb & rpm - - - ${{ if or(eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true), eq(parameters.VSCODE_RUN_BROWSER_TESTS, true), eq(parameters.VSCODE_RUN_REMOTE_TESTS, true)) }}: - - template: product-build-linux-test.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_RUN_ELECTRON_TESTS: ${{ parameters.VSCODE_RUN_ELECTRON_TESTS }} - VSCODE_RUN_BROWSER_TESTS: ${{ parameters.VSCODE_RUN_BROWSER_TESTS }} - VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} - VSCODE_TEST_ARTIFACT_NAME: ${{ parameters.VSCODE_TEST_ARTIFACT_NAME }} - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - PUBLISH_TASK_NAME: 1ES.PublishPipelineArtifact@1 - - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'oss')) }}: - - script: npx deemon --attach node build/azure-pipelines/linux/codesign.js - condition: succeededOrFailed() - displayName: "โœ๏ธ Post-job: Codesign deb & rpm" - - - script: echo "##vso[task.setvariable variable=ARTIFACT_PREFIX]attempt$(System.JobAttempt)_" - condition: and(succeededOrFailed(), notIn(variables['Agent.JobStatus'], 'Succeeded', 'SucceededWithIssues')) - displayName: Generate artifact prefix - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(CLIENT_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_client_linux_$(VSCODE_ARCH)_archive-unsigned - sbomBuildDropPath: $(Agent.BuildDirectory)/VSCode-linux-$(VSCODE_ARCH) - sbomPackageName: "VS Code Linux $(VSCODE_ARCH) (unsigned)" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['CLIENT_PATH'], '')) - displayName: Publish client archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SERVER_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_server_linux_$(VSCODE_ARCH)_archive-unsigned - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH) - sbomPackageName: "VS Code Linux $(VSCODE_ARCH) Server" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['SERVER_PATH'], '')) - displayName: Publish server archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(WEB_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_web_linux_$(VSCODE_ARCH)_archive-unsigned - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web - sbomPackageName: "VS Code Linux $(VSCODE_ARCH) Web" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['WEB_PATH'], '')) - displayName: Publish web server archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(DEB_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_client_linux_$(VSCODE_ARCH)_deb-package - sbomBuildDropPath: .build/linux/deb - sbomPackageName: "VS Code Linux $(VSCODE_ARCH) DEB" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['DEB_PATH'], '')) - displayName: Publish deb package - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(RPM_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_client_linux_$(VSCODE_ARCH)_rpm-package - sbomBuildDropPath: .build/linux/rpm - sbomPackageName: "VS Code Linux $(VSCODE_ARCH) RPM" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['RPM_PATH'], '')) - displayName: Publish rpm package - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SNAP_PATH) - artifactName: vscode_client_linux_$(VSCODE_ARCH)_snap - sbomBuildDropPath: $(SNAP_EXTRACTED_PATH) - sbomPackageName: "VS Code Linux $(VSCODE_ARCH) SNAP" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['SNAP_PATH'], '')) - displayName: Publish snap package diff --git a/build/azure-pipelines/linux/setup-env.sh b/build/azure-pipelines/linux/setup-env.sh deleted file mode 100755 index 3120a3ee..00000000 --- a/build/azure-pipelines/linux/setup-env.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash - -set -e - -SYSROOT_ARCH=$SDLC_CORE_ARCH -if [ "$SYSROOT_ARCH" == "x64" ]; then - SYSROOT_ARCH="amd64" -fi - -export SDLC_CORE_CLIENT_SYSROOT_DIR=$PWD/.build/sysroots/glibc-2.28-gcc-10.5.0 -export SDLC_CORE_REMOTE_SYSROOT_DIR=$PWD/.build/sysroots/glibc-2.28-gcc-8.5.0 -if [ -d "$SDLC_CORE_CLIENT_SYSROOT_DIR" ]; then - echo "Using cached client sysroot" -else - echo "Downloading client sysroot" - SYSROOT_ARCH="$SYSROOT_ARCH" SDLC_CORE_SYSROOT_DIR="$SDLC_CORE_CLIENT_SYSROOT_DIR" node -e '(async () => { const { getSDLCcoreSysroot } = require("./build/linux/debian/install-sysroot.js"); await getSDLCcoreSysroot(process.env["SYSROOT_ARCH"]); })()' -fi - -if [ -d "$SDLC_CORE_REMOTE_SYSROOT_DIR" ]; then - echo "Using cached remote sysroot" -else - echo "Downloading remote sysroot" - SYSROOT_ARCH="$SYSROOT_ARCH" SDLC_CORE_SYSROOT_DIR="$SDLC_CORE_REMOTE_SYSROOT_DIR" SDLC_CORE_SYSROOT_PREFIX="-glibc-2.28-gcc-8.5.0" node -e '(async () => { const { getSDLCcoreSysroot } = require("./build/linux/debian/install-sysroot.js"); await getSDLCcoreSysroot(process.env["SYSROOT_ARCH"]); })()' -fi - -if [ "$npm_config_arch" == "x64" ]; then - # Download clang based on chromium revision used by vscode - curl -s https://raw.githubusercontent.com/chromium/chromium/138.0.7204.100/tools/clang/scripts/update.py | python - --output-dir=$PWD/.build/CR_Clang --host-os=linux - - # Download libcxx headers and objects from upstream electron releases - DEBUG=libcxx-fetcher \ - SDLC_CORE_LIBCXX_OBJECTS_DIR=$PWD/.build/libcxx-objects \ - SDLC_CORE_LIBCXX_HEADERS_DIR=$PWD/.build/libcxx_headers \ - SDLC_CORE_LIBCXXABI_HEADERS_DIR=$PWD/.build/libcxxabi_headers \ - SDLC_CORE_ARCH="$npm_config_arch" \ - node build/linux/libcxx-fetcher.js - - # Set compiler toolchain - export CC="$PWD/.build/CR_Clang/bin/clang --gcc-toolchain=$SDLC_CORE_CLIENT_SYSROOT_DIR/x86_64-linux-gnu" - export CXX="$PWD/.build/CR_Clang/bin/clang++ --gcc-toolchain=$SDLC_CORE_CLIENT_SYSROOT_DIR/x86_64-linux-gnu" - export CXXFLAGS="-nostdinc++ -D__NO_INLINE__ -DSPDLOG_USE_STD_FORMAT -I$PWD/.build/libcxx_headers -isystem$PWD/.build/libcxx_headers/include -isystem$PWD/.build/libcxxabi_headers/include -fPIC -flto=thin -fsplit-lto-unit -D_LIBCPP_ABI_NAMESPACE=Cr -D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_EXTENSIVE --sysroot=$SDLC_CORE_CLIENT_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot" - export LDFLAGS="-stdlib=libc++ --sysroot=$SDLC_CORE_CLIENT_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot -fuse-ld=lld -flto=thin -L$PWD/.build/libcxx-objects -lc++abi -L$SDLC_CORE_CLIENT_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/usr/lib/x86_64-linux-gnu -L$SDLC_CORE_CLIENT_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/lib/x86_64-linux-gnu -Wl,--lto-O0" - - # Set compiler toolchain for remote server - export SDLC_CORE_REMOTE_CC=$SDLC_CORE_REMOTE_SYSROOT_DIR/x86_64-linux-gnu/bin/x86_64-linux-gnu-gcc - export SDLC_CORE_REMOTE_CXX=$SDLC_CORE_REMOTE_SYSROOT_DIR/x86_64-linux-gnu/bin/x86_64-linux-gnu-g++ - export SDLC_CORE_REMOTE_CXXFLAGS="--sysroot=$SDLC_CORE_REMOTE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot" - export SDLC_CORE_REMOTE_LDFLAGS="--sysroot=$SDLC_CORE_REMOTE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot -L$SDLC_CORE_REMOTE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/usr/lib/x86_64-linux-gnu -L$SDLC_CORE_REMOTE_SYSROOT_DIR/x86_64-linux-gnu/x86_64-linux-gnu/sysroot/lib/x86_64-linux-gnu" -elif [ "$npm_config_arch" == "arm64" ]; then - # Set compiler toolchain for client native modules - export CC=$VSCODE_CLIENT_SYSROOT_DIR/aarch64-linux-gnu/bin/aarch64-linux-gnu-gcc - export CXX=$VSCODE_CLIENT_SYSROOT_DIR/aarch64-linux-gnu/bin/aarch64-linux-gnu-g++ - export CXXFLAGS="--sysroot=$VSCODE_CLIENT_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot" - export LDFLAGS="--sysroot=$VSCODE_CLIENT_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot -L$VSCODE_CLIENT_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot/usr/lib/aarch64-linux-gnu -L$VSCODE_CLIENT_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot/lib/aarch64-linux-gnu" - - # Set compiler toolchain for remote server - export VSCODE_REMOTE_CC=$VSCODE_REMOTE_SYSROOT_DIR/aarch64-linux-gnu/bin/aarch64-linux-gnu-gcc - export VSCODE_REMOTE_CXX=$VSCODE_REMOTE_SYSROOT_DIR/aarch64-linux-gnu/bin/aarch64-linux-gnu-g++ - export VSCODE_REMOTE_CXXFLAGS="--sysroot=$VSCODE_REMOTE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot" - export VSCODE_REMOTE_LDFLAGS="--sysroot=$VSCODE_REMOTE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot -L$VSCODE_REMOTE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot/usr/lib/aarch64-linux-gnu -L$VSCODE_REMOTE_SYSROOT_DIR/aarch64-linux-gnu/aarch64-linux-gnu/sysroot/lib/aarch64-linux-gnu" -elif [ "$npm_config_arch" == "arm" ]; then - # Set compiler toolchain for client native modules - export CC=$VSCODE_CLIENT_SYSROOT_DIR/arm-rpi-linux-gnueabihf/bin/arm-rpi-linux-gnueabihf-gcc - export CXX=$VSCODE_CLIENT_SYSROOT_DIR/arm-rpi-linux-gnueabihf/bin/arm-rpi-linux-gnueabihf-g++ - export CXXFLAGS="--sysroot=$VSCODE_CLIENT_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot" - export LDFLAGS="--sysroot=$VSCODE_CLIENT_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot -L$VSCODE_CLIENT_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot/usr/lib/arm-linux-gnueabihf -L$VSCODE_CLIENT_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot/lib/arm-linux-gnueabihf" - - # Set compiler toolchain for remote server - export VSCODE_REMOTE_CC=$VSCODE_REMOTE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/bin/arm-rpi-linux-gnueabihf-gcc - export VSCODE_REMOTE_CXX=$VSCODE_REMOTE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/bin/arm-rpi-linux-gnueabihf-g++ - export VSCODE_REMOTE_CXXFLAGS="--sysroot=$VSCODE_REMOTE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot" - export VSCODE_REMOTE_LDFLAGS="--sysroot=$VSCODE_REMOTE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot -L$VSCODE_REMOTE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot/usr/lib/arm-linux-gnueabihf -L$VSCODE_REMOTE_SYSROOT_DIR/arm-rpi-linux-gnueabihf/arm-rpi-linux-gnueabihf/sysroot/lib/arm-linux-gnueabihf" -fi diff --git a/build/azure-pipelines/linux/verify-glibc-requirements.sh b/build/azure-pipelines/linux/verify-glibc-requirements.sh deleted file mode 100755 index 52941776..00000000 --- a/build/azure-pipelines/linux/verify-glibc-requirements.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env bash - -set -e - -TRIPLE="x86_64-linux-gnu" -if [ "$VSCODE_ARCH" == "arm64" ]; then - TRIPLE="aarch64-linux-gnu" -elif [ "$VSCODE_ARCH" == "armhf" ]; then - TRIPLE="arm-rpi-linux-gnueabihf" -fi - -# Get all files with .node extension from server folder -files=$(find $SEARCH_PATH -name "*.node" -not -path "*prebuilds*" -not -path "*extensions/node_modules/@parcel/watcher*" -o -type f -executable -name "node") - -echo "Verifying requirements for files: $files" - -for file in $files; do - glibc_version="$EXPECTED_GLIBC_VERSION" - glibcxx_version="$EXPECTED_GLIBCXX_VERSION" - while IFS= read -r line; do - if [[ $line == *"GLIBC_"* ]]; then - version=$(echo "$line" | awk '{if ($5 ~ /^[0-9a-fA-F]+$/) print $6; else print $5}' | tr -d '()') - version=${version#*_} - if [[ $(printf "%s\n%s" "$version" "$glibc_version" | sort -V | tail -n1) == "$version" ]]; then - glibc_version=$version - fi - elif [[ $line == *"GLIBCXX_"* ]]; then - version=$(echo "$line" | awk '{if ($5 ~ /^[0-9a-fA-F]+$/) print $6; else print $5}' | tr -d '()') - version=${version#*_} - if [[ $(printf "%s\n%s" "$version" "$glibcxx_version" | sort -V | tail -n1) == "$version" ]]; then - glibcxx_version=$version - fi - fi - done < <("$VSCODE_SYSROOT_DIR/$TRIPLE/$TRIPLE/bin/objdump" -T "$file") - - if [[ "$glibc_version" != "$EXPECTED_GLIBC_VERSION" ]]; then - echo "Error: File $file has dependency on GLIBC > $EXPECTED_GLIBC_VERSION, found $glibc_version" - exit 1 - fi - if [[ "$glibcxx_version" != "$EXPECTED_GLIBCXX_VERSION" ]]; then - echo "Error: File $file has dependency on GLIBCXX > $EXPECTED_GLIBCXX_VERSION, found $glibcxx_version" - fi -done diff --git a/build/azure-pipelines/linux/xvfb.init b/build/azure-pipelines/linux/xvfb.init deleted file mode 100644 index 2365c09f..00000000 --- a/build/azure-pipelines/linux/xvfb.init +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/bash -# -# /etc/rc.d/init.d/xvfbd -# -# chkconfig: 345 95 28 -# description: Starts/Stops X Virtual Framebuffer server -# processname: Xvfb -# -### BEGIN INIT INFO -# Provides: xvfb -# Required-Start: $remote_fs $syslog -# Required-Stop: $remote_fs $syslog -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# Short-Description: Start xvfb at boot time -# Description: Enable xvfb provided by daemon. -### END INIT INFO - -[ "${NETWORKING}" = "no" ] && exit 0 - -PROG="/usr/bin/Xvfb" -PROG_OPTIONS=":10 -ac -screen 0 1024x768x24" -PROG_OUTPUT="/tmp/Xvfb.out" - -case "$1" in - start) - echo "Starting : X Virtual Frame Buffer " - $PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 & - disown -ar - ;; - stop) - echo "Shutting down : X Virtual Frame Buffer" - killproc $PROG - RETVAL=$? - [ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb - /var/run/Xvfb.pid - echo - ;; - restart|reload) - $0 stop - $0 start - RETVAL=$? - ;; - status) - status Xvfb - RETVAL=$? - ;; - *) - echo $"Usage: $0 (start|stop|restart|reload|status)" - exit 1 -esac - -exit $RETVAL diff --git a/build/azure-pipelines/oss/product-build-pr-cache-darwin.yml b/build/azure-pipelines/oss/product-build-pr-cache-darwin.yml deleted file mode 100644 index d382918a..00000000 --- a/build/azure-pipelines/oss/product-build-pr-cache-darwin.yml +++ /dev/null @@ -1,79 +0,0 @@ -steps: - - checkout: self - fetchDepth: 1 - retryCountOnTaskFailure: 3 - - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js darwin $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - c++ --version - xcode-select -print-path - python3 -m pip install setuptools - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - npm_config_arch: $(VSCODE_ARCH) - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - # Avoid using dlopen to load Kerberos on macOS which can cause missing libraries - # https://github.com/mongodb-js/kerberos/commit/04044d2814ad1d01e77f1ce87f26b03d86692cf2 - # flipped the default to support legacy linux distros which shouldn't happen - # on macOS. - GYP_DEFINES: "kerberos_use_rtld=false" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive diff --git a/build/azure-pipelines/oss/product-build-pr-cache-linux.yml b/build/azure-pipelines/oss/product-build-pr-cache-linux.yml deleted file mode 100644 index b4a2cc3a..00000000 --- a/build/azure-pipelines/oss/product-build-pr-cache-linux.yml +++ /dev/null @@ -1,76 +0,0 @@ -steps: - - checkout: self - fetchDepth: 1 - retryCountOnTaskFailure: 3 - - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js linux $VSCODE_ARCH $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - ./build/azure-pipelines/linux/apt-retry.sh sudo apt-get update - ./build/azure-pipelines/linux/apt-retry.sh sudo apt-get install -y libkrb5-dev - displayName: Setup system services - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive diff --git a/build/azure-pipelines/oss/product-build-pr-cache-win32.yml b/build/azure-pipelines/oss/product-build-pr-cache-win32.yml deleted file mode 100644 index f4a82587..00000000 --- a/build/azure-pipelines/oss/product-build-pr-cache-win32.yml +++ /dev/null @@ -1,71 +0,0 @@ -steps: - - checkout: self - fetchDepth: 1 - retryCountOnTaskFailure: 3 - - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - powershell: node build/setup-npm-registry.js $env:NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - pwsh: | - mkdir .build -ea 0 - node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - powershell: 7z.exe x .build/node_modules_cache/cache.7z -aoa - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - exec { npm config set registry "$env:NPM_REGISTRY" } - $NpmrcPath = (npm config get userconfig) - echo "##vso[task.setvariable variable=NPMRC_PATH]$NpmrcPath" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm ci } - env: - npm_config_arch: $(VSCODE_ARCH) - npm_config_foreground_scripts: "true" - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - retryCountOnTaskFailure: 5 - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } - exec { mkdir -Force .build/node_modules_cache } - exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive diff --git a/build/azure-pipelines/product-build-macos.yml b/build/azure-pipelines/product-build-macos.yml deleted file mode 100644 index cc8985c0..00000000 --- a/build/azure-pipelines/product-build-macos.yml +++ /dev/null @@ -1,136 +0,0 @@ -pr: none - -trigger: none - -parameters: - - name: VSCODE_QUALITY - displayName: Quality - type: string - default: insider - - name: NPM_REGISTRY - displayName: "Custom NPM Registry" - type: string - default: 'https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/npm/registry/' - - name: CARGO_REGISTRY - displayName: "Custom Cargo Registry" - type: string - default: 'sparse+https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/Cargo/index/' - -variables: - - name: NPM_REGISTRY - ${{ if in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }}: # disable terrapin when in VSCODE_CIBUILD - value: none - ${{ else }}: - value: ${{ parameters.NPM_REGISTRY }} - - name: CARGO_REGISTRY - value: ${{ parameters.CARGO_REGISTRY }} - - name: VSCODE_QUALITY - value: ${{ parameters.VSCODE_QUALITY }} - - name: VSCODE_CIBUILD - value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }} - - name: skipComponentGovernanceDetection - value: true - - name: ComponentDetection.Timeout - value: 600 - - name: Codeql.SkipTaskAutoInjection - value: true - - name: ARTIFACT_PREFIX - value: '' - -name: "$(Date:yyyyMMdd).$(Rev:r) (${{ parameters.VSCODE_QUALITY }})" - -resources: - repositories: - - repository: 1ESPipelines - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - -extends: - template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines - parameters: - sdl: - tsa: - enabled: true - configFile: $(Build.SourcesDirectory)/build/azure-pipelines/config/tsaoptions.json - binskim: - analyzeTargetGlob: '+:file|$(Agent.BuildDirectory)/VSCode-*/**/*.exe;+:file|$(Agent.BuildDirectory)/VSCode-*/**/*.node;+:file|$(Agent.BuildDirectory)/VSCode-*/**/*.dll;-:file|$(Build.SourcesDirectory)/.build/**/system-setup/VSCodeSetup*.exe;-:file|$(Build.SourcesDirectory)/.build/**/user-setup/VSCodeUserSetup*.exe' - codeql: - runSourceLanguagesInSourceAnalysis: true - compiled: - enabled: false - justificationForDisabling: "CodeQL breaks ESRP CodeSign on macOS (ICM #520035761, githubcustomers/microsoft-codeql-support#198)" - credscan: - suppressionsFile: $(Build.SourcesDirectory)/build/azure-pipelines/config/CredScanSuppressions.json - eslint: - enabled: true - enableExclusions: true - exclusionsFilePath: $(Build.SourcesDirectory)/.eslint-ignore - sourceAnalysisPool: 1es-windows-2022-x64 - createAdoIssuesForJustificationsForDisablement: false - containers: - ubuntu-2004-arm64: - image: onebranch.azurecr.io/linux/ubuntu-2004-arm64:latest - stages: - - stage: Compile - jobs: - - job: Compile - timeoutInMinutes: 90 - pool: - name: ACESLabTest - os: macOS - steps: - - template: build/azure-pipelines/product-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - - - stage: macOS - dependsOn: - - Compile - pool: - name: ACESLabTest - os: macOS - variables: - BUILDSECMON_OPT_IN: true - jobs: - - job: macOSElectronTest - displayName: Electron Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: arm64 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - - job: macOSBrowserTest - displayName: Browser Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: arm64 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - - job: macOSRemoteTest - displayName: Remote Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: arm64 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true diff --git a/build/azure-pipelines/product-build-pr.yml b/build/azure-pipelines/product-build-pr.yml deleted file mode 100644 index f7aff453..00000000 --- a/build/azure-pipelines/product-build-pr.yml +++ /dev/null @@ -1,231 +0,0 @@ -trigger: - - release/* - -pr: - branches: - include: ["release/*"] - -variables: - - name: Codeql.SkipTaskAutoInjection - value: true - - name: skipComponentGovernanceDetection - value: true - - name: NPM_REGISTRY - value: "none" - - name: CARGO_REGISTRY - value: "none" - - name: VSCODE_CIBUILD - value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }} - - name: VSCODE_QUALITY - value: oss - - name: VSCODE_STEP_ON_IT - value: false - -stages: - - ${{ if ne(variables['VSCODE_CIBUILD'], true) }}: - - stage: Compile - displayName: Compile & Hygiene - dependsOn: [] - jobs: - - job: Compile - displayName: Compile & Hygiene - pool: 1es-oss-ubuntu-22.04-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: product-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - - - stage: Test - displayName: Test - dependsOn: [] - jobs: - - job: Linuxx64ElectronTest - displayName: Linux (Electron) - pool: 1es-oss-ubuntu-22.04-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - - job: Linuxx64BrowserTest - displayName: Linux (Browser) - pool: 1es-oss-ubuntu-22.04-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - - job: Linuxx64RemoteTest - displayName: Linux (Remote) - pool: 1es-oss-ubuntu-22.04-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true - - - job: LinuxCLI - displayName: Linux (CLI) - pool: 1es-oss-ubuntu-22.04-x64 - timeoutInMinutes: 30 - steps: - - template: cli/test.yml@self - - - job: Windowsx64ElectronTests - displayName: Windows (Electron) - pool: 1es-oss-windows-2022-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: win32/product-build-win32.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - - job: Windowsx64BrowserTests - displayName: Windows (Browser) - pool: 1es-oss-windows-2022-x64 - timeoutInMinutes: 60 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: win32/product-build-win32.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - - job: Windowsx64RemoteTests - displayName: Windows (Remote) - pool: 1es-oss-windows-2022-x64 - timeoutInMinutes: 60 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: win32/product-build-win32.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true - - - job: macOSx64ElectronTests - displayName: macOS (Electron) - pool: - vmImage: macOS-14 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - - job: macOSx64BrowserTests - displayName: macOS (Browser) - pool: - vmImage: macOS-14 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - - job: macOSx64RemoteTests - displayName: macOS (Remote) - pool: - vmImage: macOS-14 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true - - - ${{ if eq(variables['VSCODE_CIBUILD'], true) }}: - - stage: NodeModuleCache - jobs: - - job: Linuxx64MaintainNodeModulesCache - displayName: Linux (Maintain node_modules cache) - pool: 1es-oss-ubuntu-22.04-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: oss/product-build-pr-cache-linux.yml@self - - - job: Windowsx64MaintainNodeModulesCache - displayName: Windows (Maintain node_modules cache) - pool: 1es-oss-windows-2022-x64 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: oss/product-build-pr-cache-win32.yml@self - - - job: macOSx64MaintainNodeModulesCache - displayName: macOS (Maintain node_modules cache) - pool: - vmImage: macOS-14 - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: oss/product-build-pr-cache-darwin.yml@self diff --git a/build/azure-pipelines/product-build.yml b/build/azure-pipelines/product-build.yml deleted file mode 100644 index 12e7b05d..00000000 --- a/build/azure-pipelines/product-build.yml +++ /dev/null @@ -1,713 +0,0 @@ -pr: none - -schedules: - - cron: "0 5 * * Mon-Fri" - displayName: Mon-Fri at 7:00 - branches: - include: - - main - -trigger: - batch: true - branches: - include: ["main", "release/*"] - -parameters: - - name: VSCODE_QUALITY - displayName: Quality - type: string - default: insider - values: - - exploration - - insider - - stable - - name: NPM_REGISTRY - displayName: "Custom NPM Registry" - type: string - default: 'https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/npm/registry/' - - name: CARGO_REGISTRY - displayName: "Custom Cargo Registry" - type: string - default: 'sparse+https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/Cargo/index/' - - name: VSCODE_BUILD_WIN32 - displayName: "๐ŸŽฏ Windows x64" - type: boolean - default: true - - name: VSCODE_BUILD_WIN32_ARM64 - displayName: "๐ŸŽฏ Windows arm64" - type: boolean - default: true - - name: VSCODE_BUILD_LINUX - displayName: "๐ŸŽฏ Linux x64" - type: boolean - default: true - - name: VSCODE_BUILD_LINUX_ARM64 - displayName: "๐ŸŽฏ Linux arm64" - type: boolean - default: true - - name: VSCODE_BUILD_LINUX_ARMHF - displayName: "๐ŸŽฏ Linux armhf" - type: boolean - default: true - - name: VSCODE_BUILD_ALPINE - displayName: "๐ŸŽฏ Alpine x64" - type: boolean - default: true - - name: VSCODE_BUILD_ALPINE_ARM64 - displayName: "๐ŸŽฏ Alpine arm64" - type: boolean - default: true - - name: VSCODE_BUILD_MACOS - displayName: "๐ŸŽฏ macOS x64" - type: boolean - default: true - - name: VSCODE_BUILD_MACOS_ARM64 - displayName: "๐ŸŽฏ macOS arm64" - type: boolean - default: true - - name: VSCODE_BUILD_MACOS_UNIVERSAL - displayName: "๐ŸŽฏ macOS universal" - type: boolean - default: true - - name: VSCODE_BUILD_WEB - displayName: "๐ŸŽฏ Web" - type: boolean - default: true - - name: VSCODE_PUBLISH - displayName: "Publish to builds.code.visualstudio.com" - type: boolean - default: true - - name: VSCODE_RELEASE - displayName: "Release build if successful" - type: boolean - default: false - - name: VSCODE_COMPILE_ONLY - displayName: "Run Compile stage exclusively" - type: boolean - default: false - - name: VSCODE_STEP_ON_IT - displayName: "Skip tests" - type: boolean - default: false - -variables: - - name: VSCODE_PRIVATE_BUILD - value: ${{ ne(variables['Build.Repository.Uri'], 'https://github.com/microsoft/vscode.git') }} - - name: NPM_REGISTRY - ${{ if in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }}: # disable terrapin when in VSCODE_CIBUILD - value: none - ${{ else }}: - value: ${{ parameters.NPM_REGISTRY }} - - name: CARGO_REGISTRY - value: ${{ parameters.CARGO_REGISTRY }} - - name: VSCODE_QUALITY - value: ${{ parameters.VSCODE_QUALITY }} - - name: VSCODE_BUILD_STAGE_WINDOWS - value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }} - - name: VSCODE_BUILD_STAGE_LINUX - value: ${{ or(eq(parameters.VSCODE_BUILD_LINUX, true), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }} - - name: VSCODE_BUILD_STAGE_ALPINE - value: ${{ or(eq(parameters.VSCODE_BUILD_ALPINE, true), eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true)) }} - - name: VSCODE_BUILD_STAGE_MACOS - value: ${{ or(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }} - - name: VSCODE_BUILD_STAGE_WEB - value: ${{ eq(parameters.VSCODE_BUILD_WEB, true) }} - - name: VSCODE_CIBUILD - value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }} - - name: VSCODE_PUBLISH - value: ${{ and(eq(parameters.VSCODE_PUBLISH, true), eq(variables.VSCODE_CIBUILD, false), eq(parameters.VSCODE_COMPILE_ONLY, false)) }} - - name: VSCODE_SCHEDULEDBUILD - value: ${{ eq(variables['Build.Reason'], 'Schedule') }} - - name: VSCODE_STEP_ON_IT - value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }} - - name: VSCODE_BUILD_MACOS_UNIVERSAL - value: ${{ and(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }} - - name: VSCODE_STAGING_BLOB_STORAGE_ACCOUNT_NAME - value: vscodeesrp - - name: PRSS_CDN_URL - value: https://vscode.download.prss.microsoft.com/dbazure/download - - name: VSCODE_ESRP_SERVICE_CONNECTION_ID - value: fe07e6ce-6ffb-4df9-8d27-d129523a3f3e - - name: VSCODE_ESRP_TENANT_ID - value: 975f013f-7f24-47e8-a7d3-abc4752bf346 - - name: VSCODE_ESRP_CLIENT_ID - value: 4ac7ed59-b5e9-4f66-9c30-8d1afa72d32d - - name: ESRP_TENANT_ID - value: 975f013f-7f24-47e8-a7d3-abc4752bf346 - - name: ESRP_CLIENT_ID - value: c24324f7-e65f-4c45-8702-ed2d4c35df99 - - name: AZURE_DOCUMENTDB_ENDPOINT - value: https://vscode.documents.azure.com/ - - name: VSCODE_MIXIN_REPO - value: microsoft/vscode-distro - - name: skipComponentGovernanceDetection - value: true - - name: ComponentDetection.Timeout - value: 600 - - name: Codeql.SkipTaskAutoInjection - value: true - - name: ARTIFACT_PREFIX - value: '' - -name: "$(Date:yyyyMMdd).$(Rev:r) (${{ parameters.VSCODE_QUALITY }})" - -resources: - pipelines: - - pipeline: vscode-7pm-kick-off - source: 'VS Code 7PM Kick-Off' - trigger: true - repositories: - - repository: 1ESPipelines - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release - -extends: - template: v1/1ES.Official.PipelineTemplate.yml@1esPipelines - parameters: - sdl: - tsa: - enabled: true - configFile: $(Build.SourcesDirectory)/build/azure-pipelines/config/tsaoptions.json - binskim: - analyzeTargetGlob: '+:file|$(Agent.BuildDirectory)/VSCode-*/**/*.exe;+:file|$(Agent.BuildDirectory)/VSCode-*/**/*.node;+:file|$(Agent.BuildDirectory)/VSCode-*/**/*.dll;-:file|$(Build.SourcesDirectory)/.build/**/system-setup/VSCodeSetup*.exe;-:file|$(Build.SourcesDirectory)/.build/**/user-setup/VSCodeUserSetup*.exe' - codeql: - runSourceLanguagesInSourceAnalysis: true - compiled: - enabled: false - justificationForDisabling: "CodeQL breaks ESRP CodeSign on macOS (ICM #520035761, githubcustomers/microsoft-codeql-support#198)" - credscan: - suppressionsFile: $(Build.SourcesDirectory)/build/azure-pipelines/config/CredScanSuppressions.json - eslint: - enabled: true - enableExclusions: true - exclusionsFilePath: $(Build.SourcesDirectory)/.eslint-ignore - sourceAnalysisPool: 1es-windows-2022-x64 - createAdoIssuesForJustificationsForDisablement: false - containers: - ubuntu-2004-arm64: - image: onebranch.azurecr.io/linux/ubuntu-2004-arm64:latest - stages: - - stage: Compile - jobs: - - job: Compile - timeoutInMinutes: 90 - pool: - name: AcesShared - os: macOS - steps: - - template: build/azure-pipelines/product-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - - - ${{ if or(eq(parameters.VSCODE_BUILD_LINUX, true),eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true),eq(parameters.VSCODE_BUILD_LINUX_ARM64, true),eq(parameters.VSCODE_BUILD_ALPINE, true),eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true),eq(parameters.VSCODE_BUILD_MACOS, true),eq(parameters.VSCODE_BUILD_MACOS_ARM64, true),eq(parameters.VSCODE_BUILD_WIN32, true),eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - stage: CompileCLI - dependsOn: [] - jobs: - - ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}: - - job: CLILinuxX64 - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - steps: - - template: build/azure-pipelines/linux/cli-build-linux.yml@self - parameters: - VSCODE_CHECK_ONLY: ${{ variables.VSCODE_CIBUILD }} - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_LINUX: ${{ parameters.VSCODE_BUILD_LINUX }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}: - - job: CLILinuxGnuARM - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - steps: - - template: build/azure-pipelines/linux/cli-build-linux.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_LINUX_ARMHF: ${{ parameters.VSCODE_BUILD_LINUX_ARMHF }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}: - - job: CLILinuxGnuAarch64 - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - steps: - - template: build/azure-pipelines/linux/cli-build-linux.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_LINUX_ARM64: ${{ parameters.VSCODE_BUILD_LINUX_ARM64 }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_ALPINE, true)) }}: - - job: CLIAlpineX64 - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - steps: - - template: build/azure-pipelines/alpine/cli-build-alpine.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_ALPINE: ${{ parameters.VSCODE_BUILD_ALPINE }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true)) }}: - - job: CLIAlpineARM64 - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - steps: - - template: build/azure-pipelines/alpine/cli-build-alpine.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_ALPINE_ARM64: ${{ parameters.VSCODE_BUILD_ALPINE_ARM64 }} - - - ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}: - - job: CLIMacOSX64 - pool: - name: Azure Pipelines - image: macOS-13 - os: macOS - variables: - # todo@connor4312 to diagnose build flakes - - name: MSRUSTUP_LOG - value: debug - steps: - - template: build/azure-pipelines/darwin/cli-build-darwin.yml@self - parameters: - VSCODE_CHECK_ONLY: ${{ variables.VSCODE_CIBUILD }} - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_MACOS: ${{ parameters.VSCODE_BUILD_MACOS }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}: - - job: CLIMacOSARM64 - pool: - name: Azure Pipelines - image: macOS-13 - os: macOS - variables: - # todo@connor4312 to diagnose build flakes - - name: MSRUSTUP_LOG - value: debug - steps: - - template: build/azure-pipelines/darwin/cli-build-darwin.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_MACOS_ARM64: ${{ parameters.VSCODE_BUILD_MACOS_ARM64 }} - - - ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}: - - job: CLIWindowsX64 - pool: - name: 1es-windows-2022-x64 - os: windows - steps: - - template: build/azure-pipelines/win32/cli-build-win32.yml@self - parameters: - VSCODE_CHECK_ONLY: ${{ variables.VSCODE_CIBUILD }} - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_WIN32: ${{ parameters.VSCODE_BUILD_WIN32 }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - job: CLIWindowsARM64 - pool: - name: 1es-windows-2022-x64 - os: windows - steps: - - template: build/azure-pipelines/win32/cli-build-win32.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_WIN32_ARM64: ${{ parameters.VSCODE_BUILD_WIN32_ARM64 }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}: - - stage: APIScan - dependsOn: [] - pool: - name: 1es-windows-2022-x64 - os: windows - jobs: - - job: WindowsAPIScan - steps: - - template: build/azure-pipelines/win32/sdl-scan-win32.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - - - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true)) }}: - - stage: Windows - dependsOn: - - Compile - - ${{ if or(eq(parameters.VSCODE_BUILD_LINUX, true),eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true),eq(parameters.VSCODE_BUILD_LINUX_ARM64, true),eq(parameters.VSCODE_BUILD_ALPINE, true),eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true),eq(parameters.VSCODE_BUILD_MACOS, true),eq(parameters.VSCODE_BUILD_MACOS_ARM64, true),eq(parameters.VSCODE_BUILD_WIN32, true),eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - CompileCLI - pool: - name: 1es-windows-2022-x64 - os: windows - jobs: - - ${{ if eq(variables['VSCODE_CIBUILD'], true) }}: - - job: WindowsElectronTests - displayName: Electron Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: build/azure-pipelines/win32/product-build-win32.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_ARCH: x64 - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - job: WindowsBrowserTests - displayName: Browser Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: build/azure-pipelines/win32/product-build-win32.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_ARCH: x64 - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - job: WindowsRemoteTests - displayName: Remote Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - steps: - - template: build/azure-pipelines/win32/product-build-win32.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_ARCH: x64 - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32, true)) }}: - - job: Windows - timeoutInMinutes: 120 - variables: - VSCODE_ARCH: x64 - templateContext: - sdl: - suppression: - suppressionFile: $(Build.SourcesDirectory)\.config\guardian\.gdnsuppress - steps: - - template: build/azure-pipelines/win32/product-build-win32.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_ARCH: x64 - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_RUN_ELECTRON_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - VSCODE_RUN_BROWSER_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - VSCODE_RUN_REMOTE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - - - job: WindowsCLISign - timeoutInMinutes: 90 - steps: - - template: build/azure-pipelines/win32/product-build-win32-cli-sign.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_WIN32: ${{ parameters.VSCODE_BUILD_WIN32 }} - VSCODE_BUILD_WIN32_ARM64: ${{ parameters.VSCODE_BUILD_WIN32_ARM64 }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - job: WindowsARM64 - timeoutInMinutes: 90 - variables: - VSCODE_ARCH: arm64 - templateContext: - sdl: - suppression: - suppressionFile: $(Build.SourcesDirectory)\.config\guardian\.gdnsuppress - steps: - - template: build/azure-pipelines/win32/product-build-win32.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_ARCH: arm64 - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - - - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_LINUX'], true)) }}: - - stage: Linux - dependsOn: - - Compile - - ${{ if or(eq(parameters.VSCODE_BUILD_LINUX, true),eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true),eq(parameters.VSCODE_BUILD_LINUX_ARM64, true),eq(parameters.VSCODE_BUILD_ALPINE, true),eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true),eq(parameters.VSCODE_BUILD_MACOS, true),eq(parameters.VSCODE_BUILD_MACOS_ARM64, true),eq(parameters.VSCODE_BUILD_WIN32, true),eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - CompileCLI - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - jobs: - - ${{ if eq(variables['VSCODE_CIBUILD'], true) }}: - - job: Linuxx64ElectronTest - displayName: Electron Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: build/azure-pipelines/linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - job: Linuxx64BrowserTest - displayName: Browser Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: build/azure-pipelines/linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - job: Linuxx64RemoteTest - displayName: Remote Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: build/azure-pipelines/linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}: - - job: Linuxx64 - timeoutInMinutes: 90 - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - DISPLAY: ":10" - steps: - - template: build/azure-pipelines/linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_RUN_ELECTRON_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - VSCODE_RUN_BROWSER_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - VSCODE_RUN_REMOTE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}: - - job: LinuxArmhf - variables: - VSCODE_ARCH: armhf - NPM_ARCH: arm - steps: - - template: build/azure-pipelines/linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: armhf - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}: - - job: LinuxArm64 - variables: - VSCODE_ARCH: arm64 - NPM_ARCH: arm64 - steps: - - template: build/azure-pipelines/linux/product-build-linux.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_ALPINE'], true)) }}: - - stage: Alpine - dependsOn: - - Compile - - ${{ if or(eq(parameters.VSCODE_BUILD_LINUX, true),eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true),eq(parameters.VSCODE_BUILD_LINUX_ARM64, true),eq(parameters.VSCODE_BUILD_ALPINE, true),eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true),eq(parameters.VSCODE_BUILD_MACOS, true),eq(parameters.VSCODE_BUILD_MACOS_ARM64, true),eq(parameters.VSCODE_BUILD_WIN32, true),eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - CompileCLI - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - jobs: - - ${{ if eq(parameters.VSCODE_BUILD_ALPINE, true) }}: - - job: LinuxAlpine - variables: - VSCODE_ARCH: x64 - NPM_ARCH: x64 - steps: - - template: build/azure-pipelines/alpine/product-build-alpine.yml@self - - - ${{ if eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true) }}: - - job: LinuxAlpineArm64 - timeoutInMinutes: 120 - variables: - VSCODE_ARCH: arm64 - NPM_ARCH: arm64 - steps: - - template: build/azure-pipelines/alpine/product-build-alpine.yml@self - - - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_MACOS'], true)) }}: - - stage: macOS - dependsOn: - - Compile - - ${{ if or(eq(parameters.VSCODE_BUILD_LINUX, true),eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true),eq(parameters.VSCODE_BUILD_LINUX_ARM64, true),eq(parameters.VSCODE_BUILD_ALPINE, true),eq(parameters.VSCODE_BUILD_ALPINE_ARM64, true),eq(parameters.VSCODE_BUILD_MACOS, true),eq(parameters.VSCODE_BUILD_MACOS_ARM64, true),eq(parameters.VSCODE_BUILD_WIN32, true),eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}: - - CompileCLI - pool: - name: AcesShared - os: macOS - variables: - BUILDSECMON_OPT_IN: true - jobs: - - ${{ if eq(variables['VSCODE_CIBUILD'], true) }}: - - job: macOSElectronTest - displayName: Electron Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: arm64 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: electron - VSCODE_RUN_ELECTRON_TESTS: true - - job: macOSBrowserTest - displayName: Browser Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: arm64 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: browser - VSCODE_RUN_BROWSER_TESTS: true - - job: macOSRemoteTest - displayName: Remote Tests - timeoutInMinutes: 30 - variables: - VSCODE_ARCH: arm64 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_TEST_ARTIFACT_NAME: remote - VSCODE_RUN_REMOTE_TESTS: true - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS, true)) }}: - - job: macOS - timeoutInMinutes: 90 - variables: - VSCODE_ARCH: x64 - BUILDS_API_URL: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/ - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: x64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - - - job: macOSCLI - timeoutInMinutes: 90 - steps: - - template: build/azure-pipelines/darwin/product-build-darwin-cli-sign.yml@self - parameters: - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_BUILD_MACOS: ${{ parameters.VSCODE_BUILD_MACOS }} - VSCODE_BUILD_MACOS_ARM64: ${{ parameters.VSCODE_BUILD_MACOS_ARM64 }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}: - - job: macOSARM64 - timeoutInMinutes: 90 - variables: - VSCODE_ARCH: arm64 - BUILDS_API_URL: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/ - steps: - - template: build/azure-pipelines/darwin/product-build-darwin.yml@self - parameters: - VSCODE_ARCH: arm64 - VSCODE_QUALITY: ${{ variables.VSCODE_QUALITY }} - VSCODE_CIBUILD: ${{ variables.VSCODE_CIBUILD }} - VSCODE_RUN_ELECTRON_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - VSCODE_RUN_BROWSER_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - VSCODE_RUN_REMOTE_TESTS: ${{ eq(parameters.VSCODE_STEP_ON_IT, false) }} - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true)) }}: - - job: macOSUniversal - timeoutInMinutes: 90 - variables: - VSCODE_ARCH: universal - BUILDS_API_URL: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/ - steps: - - template: build/azure-pipelines/darwin/product-build-darwin-universal.yml@self - - - ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WEB'], true)) }}: - - stage: Web - dependsOn: - - Compile - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - jobs: - - ${{ if eq(parameters.VSCODE_BUILD_WEB, true) }}: - - job: Web - variables: - VSCODE_ARCH: x64 - steps: - - template: build/azure-pipelines/web/product-build-web.yml@self - - - ${{ if eq(variables['VSCODE_PUBLISH'], 'true') }}: - - stage: Publish - dependsOn: [] - pool: - name: 1es-windows-2022-x64 - os: windows - variables: - - name: BUILDS_API_URL - value: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/ - jobs: - - job: PublishBuild - timeoutInMinutes: 180 - displayName: Publish Build - steps: - - template: build/azure-pipelines/product-publish.yml@self - - - ${{ if and(parameters.VSCODE_RELEASE, eq(variables['VSCODE_PRIVATE_BUILD'], false)) }}: - - stage: ApproveRelease - dependsOn: [] # run in parallel to compile stage - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - jobs: - - job: ApproveRelease - displayName: "Approve Release" - variables: - - group: VSCodePeerApproval - - name: skipComponentGovernanceDetection - value: true - - - ${{ if or(and(parameters.VSCODE_RELEASE, eq(variables['VSCODE_PRIVATE_BUILD'], false)), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true))) }}: - - stage: Release - dependsOn: - - Publish - - ${{ if and(parameters.VSCODE_RELEASE, eq(variables['VSCODE_PRIVATE_BUILD'], false)) }}: - - ApproveRelease - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - jobs: - - job: ReleaseBuild - displayName: Release Build - steps: - - template: build/azure-pipelines/product-release.yml@self - parameters: - VSCODE_RELEASE: ${{ parameters.VSCODE_RELEASE }} diff --git a/build/azure-pipelines/product-compile.yml b/build/azure-pipelines/product-compile.yml deleted file mode 100644 index a69942b9..00000000 --- a/build/azure-pipelines/product-compile.yml +++ /dev/null @@ -1,176 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ./distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js compile $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - script: sudo apt update -y && sudo apt install -y build-essential pkg-config libx11-dev libx11-xcb-dev libxkbfile-dev libnotify-bin libkrb5-dev - displayName: Install build tools - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/azure-pipelines/distro/mixin-npm - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Mixin distro node modules - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - script: npm run compile - workingDirectory: build - displayName: Compile /build/ folder - - - script: .github/workflows/check-clean-git-state.sh - displayName: Check /build/ folder - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - template: common/install-builtin-extensions.yml@self - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - script: npm exec -- npm-run-all -lp core-ci-pr extensions-ci-pr hygiene eslint valid-layers-check define-class-fields-check vscode-dts-compile-check tsec-compile-check - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Compile & Hygiene (OSS) - - ${{ else }}: - - script: npm exec -- npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check define-class-fields-check vscode-dts-compile-check tsec-compile-check - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Compile & Hygiene (non-OSS) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - script: | - set -e - npm run compile - displayName: Compile smoke test suites (non-OSS) - workingDirectory: test/smoke - condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) - - - script: | - set -e - npm run compile - displayName: Compile integration test suites (non-OSS) - workingDirectory: test/integration/browser - condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false')) - - - task: AzureCLI@2 - displayName: Fetch secrets - inputs: - azureSubscription: vscode - scriptType: pscore - scriptLocation: inlineScript - addSpnToEnvironment: true - inlineScript: | - Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId" - Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId" - Write-Host "##vso[task.setvariable variable=AZURE_ID_TOKEN;issecret=true]$env:idToken" - - - script: | - set -e - AZURE_STORAGE_ACCOUNT="vscodeweb" \ - AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ - AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ - AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-sourcemaps - displayName: Upload sourcemaps to Azure - - - script: ./build/azure-pipelines/common/extract-telemetry.sh - displayName: Generate lists of telemetry events - - - script: tar -cz --exclude='.build/node_modules_cache' --exclude='.build/node_modules_list.txt' --exclude='.build/distro' -f $(Build.ArtifactStagingDirectory)/compilation.tar.gz $(ls -d .build out-* test/integration/browser/out test/smoke/out test/automation/out 2>/dev/null) - displayName: Compress compilation artifact - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz - artifactName: Compilation - sbomEnabled: false - displayName: Publish compilation artifact - - - script: npm run download-builtin-extensions-cg - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Download component details of built-in extensions - - - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 - displayName: "Component Detection" - inputs: - sourceScanPath: $(Build.SourcesDirectory) - alertWarningLevel: Medium - continueOnError: true diff --git a/build/azure-pipelines/product-npm-package-validate.yml b/build/azure-pipelines/product-npm-package-validate.yml deleted file mode 100644 index 05f2cd8e..00000000 --- a/build/azure-pipelines/product-npm-package-validate.yml +++ /dev/null @@ -1,94 +0,0 @@ -trigger: none - -pr: - branches: - include: ["main"] - paths: - include: ["package.json", "package-lock.json"] - -variables: - - name: NPM_REGISTRY - value: "https://pkgs.dev.azure.com/monacotools/Monaco/_packaging/vscode/npm/registry/" - - name: VSCODE_CIBUILD - value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }} - - name: VSCODE_QUALITY - value: oss - -jobs: - - ${{ if ne(variables['VSCODE_CIBUILD'], true) }}: - - job: ValidateNpmPackage - displayName: Valiate NPM package against Terrapin - pool: - name: 1es-ubuntu-22.04-x64 - os: linux - timeoutInMinutes: 40000 - continueOnError: true - variables: - VSCODE_ARCH: x64 - steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - echo "NPMRC Path: $(npm config get userconfig)" - echo "NPM Registry: $(npm config get registry)" - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: sudo apt update -y && sudo apt install -y build-essential pkg-config libx11-dev libx11-xcb-dev libxkbfile-dev libnotify-bin libkrb5-dev - displayName: Install build tools - condition: succeeded() - - - script: | - set -e - - for attempt in {1..6}; do - if [ $attempt -gt 1 ]; then - echo "Attempt $attempt: Waiting for 1 hour before retrying..." - sleep 3600 - fi - - echo "Attempt $attempt: Running npm ci" - if npm i --ignore-scripts; then - if node build/npm/postinstall.js; then - echo "npm i succeeded on attempt $attempt" - exit 0 - else - echo "node build/npm/postinstall.js failed on attempt $attempt" - fi - else - echo "npm i failed on attempt $attempt" - fi - done - - echo "npm i failed after 6 attempts" - exit 1 - env: - npm_command: 'install --ignore-scripts' - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install dependencies with retries - timeoutInMinutes: 400 - - - script: .github/workflows/check-clean-git-state.sh - displayName: Check clean git state diff --git a/build/azure-pipelines/product-publish.yml b/build/azure-pipelines/product-publish.yml deleted file mode 100644 index 27d6c2b3..00000000 --- a/build/azure-pipelines/product-publish.yml +++ /dev/null @@ -1,96 +0,0 @@ -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get ESRP Secrets" - inputs: - azureSubscription: vscode-esrp - KeyVaultName: vscode-esrp - SecretsFilter: esrp-auth,esrp-sign - - # allow-any-unicode-next-line - - pwsh: Write-Host "##vso[build.addbuildtag]๐Ÿš€" - displayName: Add build tag - - - pwsh: | - npm ci - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install build dependencies - - - download: current - patterns: "**/artifacts_processed_*.txt" - displayName: Download all artifacts_processed text files - - - task: AzureCLI@2 - displayName: Fetch secrets - inputs: - azureSubscription: vscode - scriptType: pscore - scriptLocation: inlineScript - addSpnToEnvironment: true - inlineScript: | - Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId" - Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId" - Write-Host "##vso[task.setvariable variable=AZURE_ID_TOKEN;issecret=true]$env:idToken" - - - pwsh: | - . build/azure-pipelines/win32/exec.ps1 - - if (Test-Path "$(Pipeline.Workspace)/artifacts_processed_*/artifacts_processed_*.txt") { - Write-Host "Artifacts already processed so a build must have already been created." - return - } - - $VERSION = node -p "require('./package.json').version" - Write-Host "Creating build with version: $VERSION" - exec { node build/azure-pipelines/common/createBuild.js $VERSION } - env: - AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" - AZURE_CLIENT_ID: "$(AZURE_CLIENT_ID)" - AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" - displayName: Create build if it hasn't been created before - - - pwsh: | - $publishAuthTokens = (node build/azure-pipelines/common/getPublishAuthTokens) - Write-Host "##vso[task.setvariable variable=PUBLISH_AUTH_TOKENS;issecret=true]$publishAuthTokens" - env: - AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" - AZURE_CLIENT_ID: "$(AZURE_CLIENT_ID)" - AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" - displayName: Get publish auth tokens - - - pwsh: node build/azure-pipelines/common/publish.js - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - AZURE_TENANT_ID: "$(AZURE_TENANT_ID)" - AZURE_CLIENT_ID: "$(AZURE_CLIENT_ID)" - AZURE_ID_TOKEN: "$(AZURE_ID_TOKEN)" - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - PUBLISH_AUTH_TOKENS: "$(PUBLISH_AUTH_TOKENS)" - RELEASE_TENANT_ID: "$(ESRP_TENANT_ID)" - RELEASE_CLIENT_ID: "$(ESRP_CLIENT_ID)" - RELEASE_AUTH_CERT: "$(esrp-auth)" - RELEASE_REQUEST_SIGNING_CERT: "$(esrp-sign)" - displayName: Process artifacts - retryCountOnTaskFailure: 3 - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt - artifactName: artifacts_processed_$(System.StageAttempt) - sbomEnabled: false - displayName: Publish the artifacts processed for this stage attempt - condition: always() diff --git a/build/azure-pipelines/product-release.yml b/build/azure-pipelines/product-release.yml deleted file mode 100644 index 87896f93..00000000 --- a/build/azure-pipelines/product-release.yml +++ /dev/null @@ -1,34 +0,0 @@ -parameters: - - name: VSCODE_RELEASE - type: boolean - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - task: AzureCLI@2 - displayName: Fetch secrets - inputs: - azureSubscription: vscode - scriptType: pscore - scriptLocation: inlineScript - addSpnToEnvironment: true - inlineScript: | - Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId" - Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId" - Write-Host "##vso[task.setvariable variable=AZURE_ID_TOKEN;issecret=true]$env:idToken" - - - script: npm ci - workingDirectory: build - displayName: Install /build dependencies - - - script: | - set -e - AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ - AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ - AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/common/releaseBuild.js ${{ parameters.VSCODE_RELEASE }} - displayName: Release build diff --git a/build/azure-pipelines/publish-types/check-version.js b/build/azure-pipelines/publish-types/check-version.js deleted file mode 100644 index 5bd80a69..00000000 --- a/build/azure-pipelines/publish-types/check-version.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const child_process_1 = __importDefault(require("child_process")); -let tag = ''; -try { - tag = child_process_1.default - .execSync('git describe --tags `git rev-list --tags --max-count=1`') - .toString() - .trim(); - if (!isValidTag(tag)) { - throw Error(`Invalid tag ${tag}`); - } -} -catch (err) { - console.error(err); - console.error('Failed to update types'); - process.exit(1); -} -function isValidTag(t) { - if (t.split('.').length !== 3) { - return false; - } - const [major, minor, bug] = t.split('.'); - // Only release for tags like 1.34.0 - if (bug !== '0') { - return false; - } - if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) { - return false; - } - return true; -} -//# sourceMappingURL=check-version.js.map \ No newline at end of file diff --git a/build/azure-pipelines/publish-types/check-version.ts b/build/azure-pipelines/publish-types/check-version.ts deleted file mode 100644 index 4496ed93..00000000 --- a/build/azure-pipelines/publish-types/check-version.ts +++ /dev/null @@ -1,41 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import cp from 'child_process'; - -let tag = ''; -try { - tag = cp - .execSync('git describe --tags `git rev-list --tags --max-count=1`') - .toString() - .trim(); - - if (!isValidTag(tag)) { - throw Error(`Invalid tag ${tag}`); - } -} catch (err) { - console.error(err); - console.error('Failed to update types'); - process.exit(1); -} - -function isValidTag(t: string) { - if (t.split('.').length !== 3) { - return false; - } - - const [major, minor, bug] = t.split('.'); - - // Only release for tags like 1.34.0 - if (bug !== '0') { - return false; - } - - if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) { - return false; - } - - return true; -} diff --git a/build/azure-pipelines/publish-types/publish-types.yml b/build/azure-pipelines/publish-types/publish-types.yml deleted file mode 100644 index 5f60ae5a..00000000 --- a/build/azure-pipelines/publish-types/publish-types.yml +++ /dev/null @@ -1,84 +0,0 @@ -# Publish @types/vscode for each release - -trigger: - branches: - include: ["refs/tags/*"] - -pr: none - -pool: - vmImage: ubuntu-latest - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - bash: | - TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`) - CHANNEL="C1C14HJ2F" - - if [ "$TAG_VERSION" == "1.999.0" ]; then - MESSAGE=". Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local." - - curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \ - -H 'Content-type: application/json; charset=utf-8' \ - --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \ - https://slack.com/api/chat.postMessage - - exit 1 - fi - displayName: Check 1.999.0 tag - - - bash: | - # Install build dependencies - (cd build && npm ci) - node build/azure-pipelines/publish-types/check-version.js - displayName: Check version - - - bash: | - git config --global user.email "vscode@microsoft.com" - git config --global user.name "VSCode" - - git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1 - node build/azure-pipelines/publish-types/update-types.js - - TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`) - - cd DefinitelyTyped - - git diff --color | cat - git add -A - git status - git checkout -b "vscode-types-$TAG_VERSION" - git commit -m "VS Code $TAG_VERSION Extension API" - git push origin "vscode-types-$TAG_VERSION" - - displayName: Push update to DefinitelyTyped - - - bash: | - TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`) - CHANNEL="C1C14HJ2F" - - MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame champion, please open this link, examine changes and create a PR:" - LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details." - MESSAGE2="[@jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode." - - curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \ - -H 'Content-type: application/json; charset=utf-8' \ - --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \ - https://slack.com/api/chat.postMessage - - curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \ - -H 'Content-type: application/json; charset=utf-8' \ - --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \ - https://slack.com/api/chat.postMessage - - curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \ - -H 'Content-type: application/json; charset=utf-8' \ - --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \ - https://slack.com/api/chat.postMessage - - displayName: Send message linking to changes on Slack diff --git a/build/azure-pipelines/publish-types/update-types.js b/build/azure-pipelines/publish-types/update-types.js deleted file mode 100644 index 29f9bfcf..00000000 --- a/build/azure-pipelines/publish-types/update-types.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(require("fs")); -const child_process_1 = __importDefault(require("child_process")); -const path_1 = __importDefault(require("path")); -let tag = ''; -try { - tag = child_process_1.default - .execSync('git describe --tags `git rev-list --tags --max-count=1`') - .toString() - .trim(); - const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vscode-dts/vscode.d.ts`; - const outPath = path_1.default.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts'); - child_process_1.default.execSync(`curl ${dtsUri} --output ${outPath}`); - updateDTSFile(outPath, tag); - console.log(`Done updating vscode.d.ts at ${outPath}`); -} -catch (err) { - console.error(err); - console.error('Failed to update types'); - process.exit(1); -} -function updateDTSFile(outPath, tag) { - const oldContent = fs_1.default.readFileSync(outPath, 'utf-8'); - const newContent = getNewFileContent(oldContent, tag); - fs_1.default.writeFileSync(outPath, newContent); -} -function repeat(str, times) { - const result = new Array(times); - for (let i = 0; i < times; i++) { - result[i] = str; - } - return result.join(''); -} -function convertTabsToSpaces(str) { - return str.replace(/\t/gm, value => repeat(' ', value.length)); -} -function getNewFileContent(content, tag) { - const oldheader = [ - `/*---------------------------------------------------------------------------------------------`, - ` * Copyright (c) Microsoft Corporation. All rights reserved.`, - ` * Licensed under the MIT License. See License.txt in the project root for license information.`, - ` *--------------------------------------------------------------------------------------------*/` - ].join('\n'); - return convertTabsToSpaces(getNewFileHeader(tag) + content.slice(oldheader.length)); -} -function getNewFileHeader(tag) { - const [major, minor] = tag.split('.'); - const shorttag = `${major}.${minor}`; - const header = [ - `// Type definitions for Visual Studio Code ${shorttag}`, - `// Project: https://github.com/microsoft/vscode`, - `// Definitions by: Visual Studio Code Team, Microsoft `, - `// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`, - ``, - `/*---------------------------------------------------------------------------------------------`, - ` * Copyright (c) Microsoft Corporation. All rights reserved.`, - ` * Licensed under the MIT License.`, - ` * See https://github.com/microsoft/vscode/blob/main/LICENSE.txt for license information.`, - ` *--------------------------------------------------------------------------------------------*/`, - ``, - `/**`, - ` * Type Definition for Visual Studio Code ${shorttag} Extension API`, - ` * See https://code.visualstudio.com/api for more information`, - ` */` - ].join('\n'); - return header; -} -//# sourceMappingURL=update-types.js.map \ No newline at end of file diff --git a/build/azure-pipelines/publish-types/update-types.ts b/build/azure-pipelines/publish-types/update-types.ts deleted file mode 100644 index 3bb02b7a..00000000 --- a/build/azure-pipelines/publish-types/update-types.ts +++ /dev/null @@ -1,83 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import fs from 'fs'; -import cp from 'child_process'; -import path from 'path'; - -let tag = ''; -try { - tag = cp - .execSync('git describe --tags `git rev-list --tags --max-count=1`') - .toString() - .trim(); - - const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vscode-dts/vscode.d.ts`; - const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts'); - cp.execFileSync('curl', [dtsUri, '--output', outPath]); - - updateDTSFile(outPath, tag); - - console.log(`Done updating vscode.d.ts at ${outPath}`); -} catch (err) { - console.error(err); - console.error('Failed to update types'); - process.exit(1); -} - -function updateDTSFile(outPath: string, tag: string) { - const oldContent = fs.readFileSync(outPath, 'utf-8'); - const newContent = getNewFileContent(oldContent, tag); - - fs.writeFileSync(outPath, newContent); -} - -function repeat(str: string, times: number): string { - const result = new Array(times); - for (let i = 0; i < times; i++) { - result[i] = str; - } - return result.join(''); -} - -function convertTabsToSpaces(str: string): string { - return str.replace(/\t/gm, value => repeat(' ', value.length)); -} - -function getNewFileContent(content: string, tag: string) { - const oldheader = [ - `/*---------------------------------------------------------------------------------------------`, - ` * Copyright (c) Microsoft Corporation. All rights reserved.`, - ` * Licensed under the MIT License. See License.txt in the project root for license information.`, - ` *--------------------------------------------------------------------------------------------*/` - ].join('\n'); - - return convertTabsToSpaces(getNewFileHeader(tag) + content.slice(oldheader.length)); -} - -function getNewFileHeader(tag: string) { - const [major, minor] = tag.split('.'); - const shorttag = `${major}.${minor}`; - - const header = [ - `// Type definitions for Visual Studio Code ${shorttag}`, - `// Project: https://github.com/microsoft/vscode`, - `// Definitions by: Visual Studio Code Team, Microsoft `, - `// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`, - ``, - `/*---------------------------------------------------------------------------------------------`, - ` * Copyright (c) Microsoft Corporation. All rights reserved.`, - ` * Licensed under the MIT License.`, - ` * See https://github.com/microsoft/vscode/blob/main/LICENSE.txt for license information.`, - ` *--------------------------------------------------------------------------------------------*/`, - ``, - `/**`, - ` * Type Definition for Visual Studio Code ${shorttag} Extension API`, - ` * See https://code.visualstudio.com/api for more information`, - ` */` - ].join('\n'); - - return header; -} diff --git a/build/azure-pipelines/web/product-build-web.yml b/build/azure-pipelines/web/product-build-web.yml deleted file mode 100644 index 3f94460d..00000000 --- a/build/azure-pipelines/web/product-build-web.yml +++ /dev/null @@ -1,173 +0,0 @@ -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - task: DownloadPipelineArtifact@2 - inputs: - artifact: Compilation - path: $(Build.ArtifactStagingDirectory) - displayName: Download compilation output - - - script: tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz - displayName: Extract compilation output - - - script: node build/setup-npm-registry.js $NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - script: mkdir -p .build && node build/azure-pipelines/common/computeNodeModulesCacheKey.js web $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - script: tar -xzf .build/node_modules_cache/cache.tgz - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - script: | - set -e - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - npm config set registry "$NPM_REGISTRY" - echo "##vso[task.setvariable variable=NPMRC_PATH]$(npm config get userconfig)" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - script: | - set -e - ./build/azure-pipelines/linux/apt-retry.sh sudo apt-get update - ./build/azure-pipelines/linux/apt-retry.sh sudo apt-get install -y libkrb5-dev - displayName: Setup system services - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: | - set -e - - for i in {1..5}; do # try 5 times - npm ci && break - if [ $i -eq 5 ]; then - echo "Npm install failed too many times" >&2 - exit 1 - fi - echo "Npm install failed $i, trying again..." - done - env: - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - script: node build/azure-pipelines/distro/mixin-npm - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Mixin distro node modules - - - script: | - set -e - node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt - mkdir -p .build/node_modules_cache - tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive - - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - template: ../common/install-builtin-extensions.yml@self - - - script: | - set -e - npm run gulp vscode-web-min-ci - ARCHIVE_PATH=".build/web/vscode-web.tar.gz" - mkdir -p $(dirname $ARCHIVE_PATH) - tar --owner=0 --group=0 -czf $ARCHIVE_PATH -C .. vscode-web - echo "##vso[task.setvariable variable=WEB_PATH]$ARCHIVE_PATH" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build - - - task: AzureCLI@2 - displayName: Fetch secrets from Azure - inputs: - azureSubscription: vscode - scriptType: pscore - scriptLocation: inlineScript - addSpnToEnvironment: true - inlineScript: | - Write-Host "##vso[task.setvariable variable=AZURE_TENANT_ID]$env:tenantId" - Write-Host "##vso[task.setvariable variable=AZURE_CLIENT_ID]$env:servicePrincipalId" - Write-Host "##vso[task.setvariable variable=AZURE_ID_TOKEN;issecret=true]$env:idToken" - - - script: | - set -e - AZURE_STORAGE_ACCOUNT="vscodeweb" \ - AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ - AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ - AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-cdn - displayName: Upload to CDN - - - script: | - set -e - AZURE_STORAGE_ACCOUNT="vscodeweb" \ - AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ - AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ - AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.js.map - displayName: Upload sourcemaps (Web Main) - - - script: | - set -e - AZURE_STORAGE_ACCOUNT="vscodeweb" \ - AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ - AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ - AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.main.internal.js.map - displayName: Upload sourcemaps (Web Internal) - - - script: | - set -e - AZURE_STORAGE_ACCOUNT="vscodeweb" \ - AZURE_TENANT_ID="$(AZURE_TENANT_ID)" \ - AZURE_CLIENT_ID="$(AZURE_CLIENT_ID)" \ - AZURE_ID_TOKEN="$(AZURE_ID_TOKEN)" \ - node build/azure-pipelines/upload-nlsmetadata - displayName: Upload NLS Metadata - - - script: echo "##vso[task.setvariable variable=ARTIFACT_PREFIX]attempt$(System.JobAttempt)_" - condition: and(succeededOrFailed(), notIn(variables['Agent.JobStatus'], 'Succeeded', 'SucceededWithIssues')) - displayName: Generate artifact prefix - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(WEB_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_web_linux_standalone_archive-unsigned - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-web - sbomPackageName: "VS Code Web" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['WEB_PATH'], '')) - displayName: Publish web archive diff --git a/build/azure-pipelines/win32/cli-build-win32.yml b/build/azure-pipelines/win32/cli-build-win32.yml deleted file mode 100644 index d61f0e72..00000000 --- a/build/azure-pipelines/win32/cli-build-win32.yml +++ /dev/null @@ -1,91 +0,0 @@ -parameters: - - name: VSCODE_BUILD_WIN32 - type: boolean - default: false - - name: VSCODE_BUILD_WIN32_ARM64 - type: boolean - default: false - - name: VSCODE_CHECK_ONLY - type: boolean - default: false - - name: VSCODE_QUALITY - type: string - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ../cli/cli-apply-patches.yml@self - - - task: Npm@1 - displayName: Download openssl prebuilt - inputs: - command: custom - customCommand: pack @vscode-internal/openssl-prebuilt@0.0.11 - customRegistry: useFeed - customFeed: "Monaco/openssl-prebuilt" - workingDir: $(Build.ArtifactStagingDirectory) - - - powershell: | - mkdir $(Build.ArtifactStagingDirectory)/openssl - tar -xvzf $(Build.ArtifactStagingDirectory)/vscode-internal-openssl-prebuilt-0.0.11.tgz --strip-components=1 --directory=$(Build.ArtifactStagingDirectory)/openssl - displayName: Extract openssl prebuilt - - - template: ../cli/install-rust-win32.yml@self - parameters: - targets: - - ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}: - - x86_64-pc-windows-msvc - - ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}: - - aarch64-pc-windows-msvc - - - ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: x86_64-pc-windows-msvc - VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_win32_x64_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-windows-static/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/x64-windows-static/include - RUSTFLAGS: "-Ctarget-feature=+crt-static -Clink-args=/guard:cf -Clink-args=/CETCOMPAT" - CFLAGS: "/guard:cf /Qspectre" - - - ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}: - - template: ../cli/cli-compile.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_CLI_TARGET: aarch64-pc-windows-msvc - VSCODE_CLI_ARTIFACT: unsigned_vscode_cli_win32_arm64_cli - VSCODE_CHECK_ONLY: ${{ parameters.VSCODE_CHECK_ONLY }} - VSCODE_CLI_ENV: - OPENSSL_LIB_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-windows-static/lib - OPENSSL_INCLUDE_DIR: $(Build.ArtifactStagingDirectory)/openssl/arm64-windows-static/include - RUSTFLAGS: "-C target-feature=+crt-static -Clink-args=/guard:cf -Clink-args=/CETCOMPAT:NO" - CFLAGS: "/guard:cf /Qspectre" - - - ${{ if not(parameters.VSCODE_CHECK_ONLY) }}: - - ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/unsigned_vscode_cli_win32_arm64_cli.zip - artifactName: unsigned_vscode_cli_win32_arm64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code Windows arm64 CLI (unsigned)" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish unsigned_vscode_cli_win32_arm64_cli artifact - - - ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(Build.ArtifactStagingDirectory)/unsigned_vscode_cli_win32_x64_cli.zip - artifactName: unsigned_vscode_cli_win32_x64_cli - sbomBuildDropPath: $(Build.ArtifactStagingDirectory)/cli - sbomPackageName: "VS Code Windows x64 CLI (unsigned)" - sbomPackageVersion: $(Build.SourceVersion) - displayName: Publish unsigned_vscode_cli_win32_x64_cli artifact diff --git a/build/azure-pipelines/win32/codesign.js b/build/azure-pipelines/win32/codesign.js deleted file mode 100644 index 630f9a64..00000000 --- a/build/azure-pipelines/win32/codesign.js +++ /dev/null @@ -1,73 +0,0 @@ -"use strict"; -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ -Object.defineProperty(exports, "__esModule", { value: true }); -const zx_1 = require("zx"); -const codesign_1 = require("../common/codesign"); -const publish_1 = require("../common/publish"); -async function main() { - (0, zx_1.usePwsh)(); - const arch = (0, publish_1.e)('VSCODE_ARCH'); - const esrpCliDLLPath = (0, publish_1.e)('EsrpCliDllPath'); - const codeSigningFolderPath = (0, publish_1.e)('CodeSigningFolderPath'); - // Start the code sign processes in parallel - // 1. Codesign executables and shared libraries - // 2. Codesign Powershell scripts - // 3. Codesign context menu appx package (insiders only) - const codesignTask1 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows', codeSigningFolderPath, '*.dll,*.exe,*.node'); - const codesignTask2 = (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.ps1'); - const codesignTask3 = process.env['VSCODE_QUALITY'] === 'insider' - ? (0, codesign_1.spawnCodesignProcess)(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.appx') - : undefined; - // Codesign executables and shared libraries - (0, codesign_1.printBanner)('Codesign executables and shared libraries'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign executables and shared libraries', codesignTask1); - // Codesign Powershell scripts - (0, codesign_1.printBanner)('Codesign Powershell scripts'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign Powershell scripts', codesignTask2); - if (codesignTask3) { - // Codesign context menu appx package - (0, codesign_1.printBanner)('Codesign context menu appx package'); - await (0, codesign_1.streamProcessOutputAndCheckResult)('Codesign context menu appx package', codesignTask3); - } - // Create build artifact directory - await (0, zx_1.$) `New-Item -ItemType Directory -Path .build/win32-${arch} -Force`; - // Package client - if (process.env['BUILT_CLIENT']) { - // Product version - const version = await (0, zx_1.$) `node -p "require('../VSCode-win32-${arch}/resources/app/package.json').version"`; - (0, codesign_1.printBanner)('Package client'); - const clientArchivePath = `.build/win32-${arch}/VSCode-win32-${arch}-${version}.zip`; - await (0, zx_1.$) `7z.exe a -tzip ${clientArchivePath} ../VSCode-win32-${arch}/* "-xr!CodeSignSummary*.md"`.pipe(process.stdout); - await (0, zx_1.$) `7z.exe l ${clientArchivePath}`.pipe(process.stdout); - } - // Package server - if (process.env['BUILT_SERVER']) { - (0, codesign_1.printBanner)('Package server'); - const serverArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}.zip`; - await (0, zx_1.$) `7z.exe a -tzip ${serverArchivePath} ../vscode-server-win32-${arch}`.pipe(process.stdout); - await (0, zx_1.$) `7z.exe l ${serverArchivePath}`.pipe(process.stdout); - } - // Package server (web) - if (process.env['BUILT_WEB']) { - (0, codesign_1.printBanner)('Package server (web)'); - const webArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}-web.zip`; - await (0, zx_1.$) `7z.exe a -tzip ${webArchivePath} ../vscode-server-win32-${arch}-web`.pipe(process.stdout); - await (0, zx_1.$) `7z.exe l ${webArchivePath}`.pipe(process.stdout); - } - // Sign setup - if (process.env['BUILT_CLIENT']) { - (0, codesign_1.printBanner)('Sign setup packages (system, user)'); - const task = (0, zx_1.$) `npm exec -- npm-run-all -lp "gulp vscode-win32-${arch}-system-setup -- --sign" "gulp vscode-win32-${arch}-user-setup -- --sign"`; - await (0, codesign_1.streamProcessOutputAndCheckResult)('Sign setup packages (system, user)', task); - } -} -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); -//# sourceMappingURL=codesign.js.map \ No newline at end of file diff --git a/build/azure-pipelines/win32/codesign.ts b/build/azure-pipelines/win32/codesign.ts deleted file mode 100644 index 7e717070..00000000 --- a/build/azure-pipelines/win32/codesign.ts +++ /dev/null @@ -1,84 +0,0 @@ -/*--------------------------------------------------------------------------------------------- - * Copyright (c) Microsoft Corporation. All rights reserved. - * Licensed under the MIT License. See License.txt in the project root for license information. - *--------------------------------------------------------------------------------------------*/ - -import { $, usePwsh } from 'zx'; -import { printBanner, spawnCodesignProcess, streamProcessOutputAndCheckResult } from '../common/codesign'; -import { e } from '../common/publish'; - -async function main() { - usePwsh(); - - const arch = e('VSCODE_ARCH'); - const esrpCliDLLPath = e('EsrpCliDllPath'); - const codeSigningFolderPath = e('CodeSigningFolderPath'); - - // Start the code sign processes in parallel - // 1. Codesign executables and shared libraries - // 2. Codesign Powershell scripts - // 3. Codesign context menu appx package (insiders only) - const codesignTask1 = spawnCodesignProcess(esrpCliDLLPath, 'sign-windows', codeSigningFolderPath, '*.dll,*.exe,*.node'); - const codesignTask2 = spawnCodesignProcess(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.ps1'); - const codesignTask3 = process.env['VSCODE_QUALITY'] === 'insider' - ? spawnCodesignProcess(esrpCliDLLPath, 'sign-windows-appx', codeSigningFolderPath, '*.appx') - : undefined; - - // Codesign executables and shared libraries - printBanner('Codesign executables and shared libraries'); - await streamProcessOutputAndCheckResult('Codesign executables and shared libraries', codesignTask1); - - // Codesign Powershell scripts - printBanner('Codesign Powershell scripts'); - await streamProcessOutputAndCheckResult('Codesign Powershell scripts', codesignTask2); - - if (codesignTask3) { - // Codesign context menu appx package - printBanner('Codesign context menu appx package'); - await streamProcessOutputAndCheckResult('Codesign context menu appx package', codesignTask3); - } - - // Create build artifact directory - await $`New-Item -ItemType Directory -Path .build/win32-${arch} -Force`; - - // Package client - if (process.env['BUILT_CLIENT']) { - // Product version - const version = await $`node -p "require('../VSCode-win32-${arch}/resources/app/package.json').version"`; - - printBanner('Package client'); - const clientArchivePath = `.build/win32-${arch}/VSCode-win32-${arch}-${version}.zip`; - await $`7z.exe a -tzip ${clientArchivePath} ../VSCode-win32-${arch}/* "-xr!CodeSignSummary*.md"`.pipe(process.stdout); - await $`7z.exe l ${clientArchivePath}`.pipe(process.stdout); - } - - // Package server - if (process.env['BUILT_SERVER']) { - printBanner('Package server'); - const serverArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}.zip`; - await $`7z.exe a -tzip ${serverArchivePath} ../vscode-server-win32-${arch}`.pipe(process.stdout); - await $`7z.exe l ${serverArchivePath}`.pipe(process.stdout); - } - - // Package server (web) - if (process.env['BUILT_WEB']) { - printBanner('Package server (web)'); - const webArchivePath = `.build/win32-${arch}/vscode-server-win32-${arch}-web.zip`; - await $`7z.exe a -tzip ${webArchivePath} ../vscode-server-win32-${arch}-web`.pipe(process.stdout); - await $`7z.exe l ${webArchivePath}`.pipe(process.stdout); - } - - // Sign setup - if (process.env['BUILT_CLIENT']) { - printBanner('Sign setup packages (system, user)'); - const task = $`npm exec -- npm-run-all -lp "gulp vscode-win32-${arch}-system-setup -- --sign" "gulp vscode-win32-${arch}-user-setup -- --sign"`; - await streamProcessOutputAndCheckResult('Sign setup packages (system, user)', task); - } -} - -main().then(() => { - process.exit(0); -}, err => { - console.error(`ERROR: ${err}`); - process.exit(1); -}); diff --git a/build/azure-pipelines/win32/exec.ps1 b/build/azure-pipelines/win32/exec.ps1 deleted file mode 100644 index 826cefdf..00000000 --- a/build/azure-pipelines/win32/exec.ps1 +++ /dev/null @@ -1,24 +0,0 @@ -# Taken from psake https://github.com/psake/psake - -<# -.SYNOPSIS - This is a helper function that runs a scriptblock and checks the PS variable $lastexitcode - to see if an error occcured. If an error is detected then an exception is thrown. - This function allows you to run command-line programs without having to - explicitly check the $lastexitcode variable. - -.EXAMPLE - exec { svn info $repository_trunk } "Error executing SVN. Please verify SVN command-line client is installed" -#> -function Exec -{ - [CmdletBinding()] - param( - [Parameter(Position=0,Mandatory=1)][scriptblock]$cmd, - [Parameter(Position=1,Mandatory=0)][string]$errorMessage = ($msgs.error_bad_command -f $cmd) - ) - & $cmd - if ($lastexitcode -ne 0) { - throw ("Exec: " + $errorMessage) - } -} \ No newline at end of file diff --git a/build/azure-pipelines/win32/import-esrp-auth-cert.ps1 b/build/azure-pipelines/win32/import-esrp-auth-cert.ps1 deleted file mode 100644 index e9a1d5a8..00000000 --- a/build/azure-pipelines/win32/import-esrp-auth-cert.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -param ($CertBase64) -$ErrorActionPreference = "Stop" - -$CertBytes = [System.Convert]::FromBase64String($CertBase64) -$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection -$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet) - -$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine") -$CertStore.Open("ReadWrite") -$CertStore.AddRange($CertCollection) -$CertStore.Close() - -$ESRPAuthCertificateSubjectName = $CertCollection[0].Subject -Write-Output ("##vso[task.setvariable variable=ESRPAuthCertificateSubjectName;]$ESRPAuthCertificateSubjectName") diff --git a/build/azure-pipelines/win32/listprocesses.bat b/build/azure-pipelines/win32/listprocesses.bat deleted file mode 100644 index f17ec239..00000000 --- a/build/azure-pipelines/win32/listprocesses.bat +++ /dev/null @@ -1,3 +0,0 @@ -echo "------------------------------------" -tasklist /V -echo "------------------------------------" diff --git a/build/azure-pipelines/win32/product-build-win32-cli-sign.yml b/build/azure-pipelines/win32/product-build-win32-cli-sign.yml deleted file mode 100644 index c7f4b0a0..00000000 --- a/build/azure-pipelines/win32/product-build-win32-cli-sign.yml +++ /dev/null @@ -1,63 +0,0 @@ -parameters: - - name: VSCODE_BUILD_WIN32 - type: boolean - - name: VSCODE_BUILD_WIN32_ARM64 - type: boolean - - name: VSCODE_QUALITY - type: string - -steps: - - task: NodeTool@0 - displayName: "Use Node.js" - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - powershell: node build/setup-npm-registry.js $env:NPM_REGISTRY build - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - exec { npm config set registry "$env:NPM_REGISTRY" } - $NpmrcPath = (npm config get userconfig) - echo "##vso[task.setvariable variable=NPMRC_PATH]$NpmrcPath" - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - powershell: | - . azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm ci } - workingDirectory: build - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - retryCountOnTaskFailure: 5 - displayName: Install build dependencies - - - template: ../cli/cli-win32-sign.yml@self - parameters: - VSCODE_CLI_ARTIFACTS: - - ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}: - - unsigned_vscode_cli_win32_x64_cli - - ${{ if eq(parameters.VSCODE_BUILD_WIN32_ARM64, true) }}: - - unsigned_vscode_cli_win32_arm64_cli diff --git a/build/azure-pipelines/win32/product-build-win32-test.yml b/build/azure-pipelines/win32/product-build-win32-test.yml deleted file mode 100644 index 73f2b8f9..00000000 --- a/build/azure-pipelines/win32/product-build-win32-test.yml +++ /dev/null @@ -1,246 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - - name: VSCODE_ARCH - type: string - - name: VSCODE_RUN_ELECTRON_TESTS - type: boolean - - name: VSCODE_RUN_BROWSER_TESTS - type: boolean - - name: VSCODE_RUN_REMOTE_TESTS - type: boolean - - name: VSCODE_TEST_ARTIFACT_NAME - type: string - - name: PUBLISH_TASK_NAME - type: string - default: PublishPipelineArtifact@0 - -steps: - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: npm exec -- -- npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Download Electron and Playwright - retryCountOnTaskFailure: 3 - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - powershell: .\scripts\test.bat --tfs "Unit Tests" - displayName: ๐Ÿงช Run unit tests (Electron) - timeoutInMinutes: 15 - - powershell: npm run test-node - displayName: ๐Ÿงช Run unit tests (node.js) - timeoutInMinutes: 15 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - powershell: node test/unit/browser/index.js --browser chromium --tfs "Browser Unit Tests" - displayName: ๐Ÿงช Run unit tests (Browser, Chromium) - timeoutInMinutes: 20 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - powershell: .\scripts\test.bat --build --tfs "Unit Tests" - displayName: ๐Ÿงช Run unit tests (Electron) - timeoutInMinutes: 15 - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: npm run test-node -- -- --build - displayName: ๐Ÿงช Run unit tests (node.js) - timeoutInMinutes: 15 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: npm run test-browser-no-install -- -- --build --browser chromium --tfs "Browser Unit Tests" - displayName: ๐Ÿงช Run unit tests (Browser, Chromium) - timeoutInMinutes: 20 - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm run gulp ` - compile-extension:configuration-editing ` - compile-extension:css-language-features-server ` - compile-extension:emmet ` - compile-extension:git ` - compile-extension:github-authentication ` - compile-extension:html-language-features-server ` - compile-extension:ipynb ` - compile-extension:notebook-renderers ` - compile-extension:json-language-features-server ` - compile-extension:markdown-language-features ` - compile-extension-media ` - compile-extension:microsoft-authentication ` - compile-extension:typescript-language-features ` - compile-extension:vscode-api-tests ` - compile-extension:vscode-colorize-tests ` - compile-extension:vscode-colorize-perf-tests ` - compile-extension:vscode-test-resolver ` - } - displayName: Build integration tests - - - powershell: .\build\azure-pipelines\win32\listprocesses.bat - displayName: Diagnostics before integration test runs - continueOnError: true - condition: succeededOrFailed() - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - powershell: .\scripts\test-integration.bat --tfs "Integration Tests" - displayName: ๐Ÿงช Run integration tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - powershell: .\scripts\test-web-integration.bat --browser chromium - displayName: ๐Ÿงช Run integration tests (Browser, Chromium) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - powershell: .\scripts\test-remote-integration.bat - displayName: ๐Ÿงช Run integration tests (Remote) - timeoutInMinutes: 20 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - powershell: | - # Copy client, server and web builds to a separate test directory, to avoid Access Denied errors in codesign - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $TestDir = "$(agent.builddirectory)\test" - New-Item -ItemType Directory -Path $TestDir -Force - Copy-Item -Path "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)" -Destination "$TestDir\VSCode-win32-$(VSCODE_ARCH)" -Recurse -Force - Copy-Item -Path "$(agent.builddirectory)\vscode-server-win32-$(VSCODE_ARCH)" -Destination "$TestDir\vscode-server-win32-$(VSCODE_ARCH)" -Recurse -Force - Copy-Item -Path "$(agent.builddirectory)\vscode-server-win32-$(VSCODE_ARCH)-web" -Destination "$TestDir\vscode-server-win32-$(VSCODE_ARCH)-web" -Recurse -Force - displayName: Copy builds to test directory - - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - - powershell: | - # Figure out the full absolute path of the product we just built - # including the remote server and configure the integration tests - # to run with these builds instead of running out of sources. - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $AppRoot = "$(agent.builddirectory)\test\VSCode-win32-$(VSCODE_ARCH)" - $AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json - $AppNameShort = $AppProductJson.nameShort - $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe" - $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\test\vscode-server-win32-$(VSCODE_ARCH)" - exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" } - displayName: ๐Ÿงช Run integration tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\test\vscode-server-win32-$(VSCODE_ARCH)-web" - exec { .\scripts\test-web-integration.bat --browser firefox } - displayName: ๐Ÿงช Run integration tests (Browser, Firefox) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $AppRoot = "$(agent.builddirectory)\test\VSCode-win32-$(VSCODE_ARCH)" - $AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json - $AppNameShort = $AppProductJson.nameShort - $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe" - $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\test\vscode-server-win32-$(VSCODE_ARCH)" - exec { .\scripts\test-remote-integration.bat } - displayName: ๐Ÿงช Run integration tests (Remote) - timeoutInMinutes: 20 - - - powershell: .\build\azure-pipelines\win32\listprocesses.bat - displayName: Diagnostics after integration test runs - continueOnError: true - condition: succeededOrFailed() - - - powershell: .\build\azure-pipelines\win32\listprocesses.bat - displayName: Diagnostics before smoke test run - continueOnError: true - condition: succeededOrFailed() - - # - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - # - powershell: npm run compile - # workingDirectory: test/smoke - # displayName: Compile smoke tests - - # - powershell: npm run gulp compile-extension-media - # displayName: Build extensions for smoke tests - - # - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - # # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - # - powershell: npm run smoketest-no-compile -- -- --tracing - # displayName: ๐Ÿงช Run smoke tests (Electron) - # timeoutInMinutes: 20 - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - ${{ if eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true) }}: - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: npm run smoketest-no-compile -- -- --verbose --tracing --build "$(agent.builddirectory)\test\VSCode-win32-$(VSCODE_ARCH)" - displayName: ๐Ÿงช Run smoke tests (Electron) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_BROWSER_TESTS, true) }}: - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: npm run smoketest-no-compile -- -- --web --tracing --headless - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)\test\vscode-server-win32-$(VSCODE_ARCH)-web - displayName: ๐Ÿงช Run smoke tests (Browser, Chromium) - timeoutInMinutes: 20 - - - ${{ if eq(parameters.VSCODE_RUN_REMOTE_TESTS, true) }}: - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: npm run smoketest-no-compile -- -- --tracing --remote --build "$(agent.builddirectory)\test\VSCode-win32-$(VSCODE_ARCH)" - env: - VSCODE_REMOTE_SERVER_PATH: $(agent.builddirectory)\test\vscode-server-win32-$(VSCODE_ARCH) - displayName: ๐Ÿงช Run smoke tests (Remote) - timeoutInMinutes: 20 - - - powershell: .\build\azure-pipelines\win32\listprocesses.bat - displayName: Diagnostics after smoke test run - continueOnError: true - condition: succeededOrFailed() - - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: .build\crashes - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: crash-dump-windows-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: crash-dump-windows-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Crash Reports" - continueOnError: true - condition: failed() - - # In order to properly symbolify above crash reports - # (if any), we need the compiled native modules too - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: node_modules - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: node-modules-windows-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: node-modules-windows-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Node Modules" - continueOnError: true - condition: failed() - - - task: ${{ parameters.PUBLISH_TASK_NAME }} - inputs: - targetPath: .build\logs - ${{ if eq(parameters.VSCODE_TEST_ARTIFACT_NAME, '') }}: - artifactName: logs-windows-$(VSCODE_ARCH)-$(System.JobAttempt) - ${{ else }}: - artifactName: logs-windows-$(VSCODE_ARCH)-${{ parameters.VSCODE_TEST_ARTIFACT_NAME }}-$(System.JobAttempt) - sbomEnabled: false - displayName: "Publish Log Files" - continueOnError: true - condition: succeededOrFailed() - - - task: PublishTestResults@2 - displayName: Publish Tests Results - inputs: - testResultsFiles: "*-results.xml" - searchFolder: "$(Build.ArtifactStagingDirectory)/test-results" - condition: succeededOrFailed() diff --git a/build/azure-pipelines/win32/product-build-win32.yml b/build/azure-pipelines/win32/product-build-win32.yml deleted file mode 100644 index 0acc0b73..00000000 --- a/build/azure-pipelines/win32/product-build-win32.yml +++ /dev/null @@ -1,386 +0,0 @@ -parameters: - - name: VSCODE_QUALITY - type: string - - name: VSCODE_ARCH - type: string - - name: VSCODE_CIBUILD - type: boolean - - name: VSCODE_RUN_ELECTRON_TESTS - type: boolean - default: false - - name: VSCODE_RUN_BROWSER_TESTS - type: boolean - default: false - - name: VSCODE_RUN_REMOTE_TESTS - type: boolean - default: false - - name: VSCODE_TEST_ARTIFACT_NAME - type: string - default: "" - -steps: - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - checkout: self - fetchDepth: 1 - retryCountOnTaskFailure: 3 - - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - task: UsePythonVersion@0 - inputs: - versionSpec: "3.x" - addToPath: true - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - task: DownloadPipelineArtifact@2 - inputs: - artifact: Compilation - path: $(Build.ArtifactStagingDirectory) - displayName: Download compilation output - - - task: ExtractFiles@1 - displayName: Extract compilation output - inputs: - archiveFilePatterns: "$(Build.ArtifactStagingDirectory)/compilation.tar.gz" - cleanDestinationFolder: false - - - powershell: node build/setup-npm-registry.js $env:NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - pwsh: | - mkdir .build -ea 0 - node build/azure-pipelines/common/computeNodeModulesCacheKey.js win32 $(VSCODE_ARCH) $(node -p process.arch) > .build/packagelockhash - displayName: Prepare node_modules cache key - - - task: Cache@2 - inputs: - key: '"node_modules" | .build/packagelockhash' - path: .build/node_modules_cache - cacheHitVar: NODE_MODULES_RESTORED - displayName: Restore node_modules cache - - - powershell: 7z.exe x .build/node_modules_cache/cache.7z -aoa - condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Extract node_modules cache - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - exec { npm config set registry "$env:NPM_REGISTRY" } - $NpmrcPath = (npm config get userconfig) - echo "##vso[task.setvariable variable=NPMRC_PATH]$NpmrcPath" - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - # Remove once https://github.com/parcel-bundler/watcher/pull/202 is merged. - - pwsh: | - $includes = @' - { - 'target_defaults': { - 'conditions': [ - ['OS=="win"', { - "msvs_settings": { - "VCCLCompilerTool": { - "AdditionalOptions": [ - "/guard:cf", - "/w34244", - "/w34267", - ] - }, - "VCLinkerTool": { - "AdditionalOptions": [ - "/guard:cf", - ] - } - } - }] - ] - } - } - '@ - - if (!(Test-Path "~/.gyp")) { - mkdir "~/.gyp" - } - echo $includes > "~/.gyp/include.gypi" - displayName: Create include.gypi - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm ci } - env: - npm_config_arch: $(VSCODE_ARCH) - npm_config_foreground_scripts: "true" - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - retryCountOnTaskFailure: 5 - displayName: Install dependencies - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - powershell: node build/azure-pipelines/distro/mixin-npm - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Mixin distro node modules - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt } - exec { mkdir -Force .build/node_modules_cache } - exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt } - condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true')) - displayName: Create node_modules archive - - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - powershell: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - - - template: ../common/install-builtin-extensions.yml@self - - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'oss')) }}: - - powershell: node build\lib\policies win32 - displayName: Generate Group Policy definitions - retryCountOnTaskFailure: 3 - - - ${{ if eq(parameters.VSCODE_QUALITY, 'oss') }}: - - powershell: npm run gulp "transpile-client-esbuild" "transpile-extensions" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Transpile client and extensions - - - ${{ else }}: - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'exploration')) }}: - - powershell: node build/win32/explorer-dll-fetcher .build/win32/appx - displayName: Download Explorer dll - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm run gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" } - exec { npm run gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" } - echo "##vso[task.setvariable variable=BUILT_CLIENT]true" - echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH)" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build client - - # Note: the appx prepare step has to follow Build client step since build step replaces the template - # strings in the raw manifest file at resources/win32/appx/AppxManifest.xml and places it under - # /appx/manifest, we need a separate step to prepare the appx package with the - # final contents. In our case only the manifest file is bundled into the appx package. - - ${{ if and(ne(parameters.VSCODE_CIBUILD, true), ne(parameters.VSCODE_QUALITY, 'exploration')) }}: - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - # Add Windows SDK to path - $sdk = "C:\Program Files (x86)\Windows Kits\10\bin\10.0.26100.0\x64" - $env:PATH = "$sdk;$env:PATH" - $AppxName = if ('$(VSCODE_QUALITY)' -eq 'stable') { 'code' } else { 'code_insider' } - makeappx pack /d "$(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH)/appx/manifest" /p "$(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH)/appx/${AppxName}_$(VSCODE_ARCH).appx" /nv - # Remove the raw manifest folder - Remove-Item -Path "$(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH)/appx/manifest" -Recurse -Force - displayName: Prepare appx package - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm run gulp "vscode-reh-win32-$(VSCODE_ARCH)-min-ci" } - mv ..\vscode-reh-win32-$(VSCODE_ARCH) ..\vscode-server-win32-$(VSCODE_ARCH) # TODO@joaomoreno - echo "##vso[task.setvariable variable=BUILT_SERVER]true" - echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(CodeSigningFolderPath),$(Agent.BuildDirectory)/vscode-server-win32-$(VSCODE_ARCH)" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm run gulp "vscode-reh-web-win32-$(VSCODE_ARCH)-min-ci" } - mv ..\vscode-reh-web-win32-$(VSCODE_ARCH) ..\vscode-server-win32-$(VSCODE_ARCH)-web # TODO@joaomoreno - echo "##vso[task.setvariable variable=BUILT_WEB]true" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Build server (web) - - - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - - task: DownloadPipelineArtifact@2 - inputs: - artifact: unsigned_vscode_cli_win32_$(VSCODE_ARCH)_cli - patterns: "**" - path: $(Build.ArtifactStagingDirectory)/cli - displayName: Download VS Code CLI - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $ArtifactName = (gci -Path "$(Build.ArtifactStagingDirectory)/cli" | Select-Object -last 1).FullName - Expand-Archive -Path $ArtifactName -DestinationPath "$(Build.ArtifactStagingDirectory)/cli" - $AppProductJson = Get-Content -Raw -Path "$(Agent.BuildDirectory)\VSCode-win32-$(VSCODE_ARCH)\resources\app\product.json" | ConvertFrom-Json - $CliAppName = $AppProductJson.tunnelApplicationName - $AppName = $AppProductJson.applicationName - Move-Item -Path "$(Build.ArtifactStagingDirectory)/cli/$AppName.exe" -Destination "$(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH)/bin/$CliAppName.exe" - displayName: Move VS Code CLI - - - task: UseDotNet@2 - inputs: - version: 6.x - - - task: EsrpCodeSigning@5 - inputs: - UseMSIAuthentication: true - ConnectedServiceName: vscode-esrp - AppRegistrationClientId: $(ESRP_CLIENT_ID) - AppRegistrationTenantId: $(ESRP_TENANT_ID) - AuthAKVName: vscode-esrp - AuthSignCertName: esrp-sign - FolderPath: . - Pattern: noop - displayName: 'Install ESRP Tooling' - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - $EsrpCodeSigningTool = (gci -directory -filter EsrpCodeSigning_* $(Agent.RootDirectory)\_tasks | Select-Object -last 1).FullName - $Version = (gci -directory $EsrpCodeSigningTool | Select-Object -last 1).FullName - echo "##vso[task.setvariable variable=EsrpCliDllPath]$Version\net6.0\esrpcli.dll" - displayName: Find ESRP CLI - - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npx deemon --detach --wait -- -- npx zx build/azure-pipelines/win32/codesign.js } - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - displayName: โœ๏ธ Codesign - - - ${{ if or(eq(parameters.VSCODE_RUN_ELECTRON_TESTS, true), eq(parameters.VSCODE_RUN_BROWSER_TESTS, true), eq(parameters.VSCODE_RUN_REMOTE_TESTS, true)) }}: - - template: product-build-win32-test.yml@self - parameters: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - VSCODE_ARCH: ${{ parameters.VSCODE_ARCH }} - VSCODE_RUN_ELECTRON_TESTS: ${{ parameters.VSCODE_RUN_ELECTRON_TESTS }} - VSCODE_RUN_BROWSER_TESTS: ${{ parameters.VSCODE_RUN_BROWSER_TESTS }} - VSCODE_RUN_REMOTE_TESTS: ${{ parameters.VSCODE_RUN_REMOTE_TESTS }} - VSCODE_TEST_ARTIFACT_NAME: ${{ parameters.VSCODE_TEST_ARTIFACT_NAME }} - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - PUBLISH_TASK_NAME: 1ES.PublishPipelineArtifact@1 - - - ${{ if ne(parameters.VSCODE_CIBUILD, true) }}: - - ${{ if ne(parameters.VSCODE_QUALITY, 'oss') }}: - # Additional "--" needed to workaround https://github.com/npm/cli/issues/7375 - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npx deemon --attach -- -- npx zx build/azure-pipelines/win32/codesign.js } - condition: succeededOrFailed() - env: - NODE_DEBUG: "net,child_process" - NODE_OPTIONS: "--report-filename=stdout,--report-uncaught-exception,--report-on-fatalerror" - displayName: "โœ๏ธ Post-job: Codesign" - - - powershell: | - $ErrorActionPreference = "Stop" - - $PackageJson = Get-Content -Raw -Path ..\VSCode-win32-$(VSCODE_ARCH)\resources\app\package.json | ConvertFrom-Json - $Version = $PackageJson.version - - $ClientArchivePath = ".build\win32-$(VSCODE_ARCH)\VSCode-win32-$(VSCODE_ARCH)-$Version.zip" - $ServerArchivePath = ".build\win32-$(VSCODE_ARCH)\vscode-server-win32-$(VSCODE_ARCH).zip" - $WebArchivePath = ".build\win32-$(VSCODE_ARCH)\vscode-server-win32-$(VSCODE_ARCH)-web.zip" - - $SystemSetupPath = ".build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" - $UserSetupPath = ".build\win32-$(VSCODE_ARCH)\user-setup\VSCodeUserSetup-$(VSCODE_ARCH)-$Version.exe" - - mv .build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe $SystemSetupPath - mv .build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe $UserSetupPath - - echo "##vso[task.setvariable variable=CLIENT_PATH]$ClientArchivePath" - echo "##vso[task.setvariable variable=SERVER_PATH]$ServerArchivePath" - echo "##vso[task.setvariable variable=WEB_PATH]$WebArchivePath" - - echo "##vso[task.setvariable variable=SYSTEM_SETUP_PATH]$SystemSetupPath" - echo "##vso[task.setvariable variable=USER_SETUP_PATH]$UserSetupPath" - condition: succeededOrFailed() - displayName: Move setup packages - - - powershell: echo "##vso[task.setvariable variable=ARTIFACT_PREFIX]attempt$(System.JobAttempt)_" - condition: and(succeededOrFailed(), notIn(variables['Agent.JobStatus'], 'Succeeded', 'SucceededWithIssues')) - displayName: Generate artifact prefix - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(CLIENT_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_client_win32_$(VSCODE_ARCH)_archive - sbomBuildDropPath: $(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH) - sbomPackageName: "VS Code Windows $(VSCODE_ARCH)" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['CLIENT_PATH'], '')) - displayName: Publish archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SERVER_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_server_win32_$(VSCODE_ARCH)_archive - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-server-win32-$(VSCODE_ARCH) - sbomPackageName: "VS Code Windows $(VSCODE_ARCH) Server" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['SERVER_PATH'], '')) - displayName: Publish server archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(WEB_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_web_win32_$(VSCODE_ARCH)_archive - sbomBuildDropPath: $(Agent.BuildDirectory)/vscode-server-win32-$(VSCODE_ARCH)-web - sbomPackageName: "VS Code Windows $(VSCODE_ARCH) Web" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['WEB_PATH'], '')) - displayName: Publish web server archive - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(SYSTEM_SETUP_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_client_win32_$(VSCODE_ARCH)_setup - sbomBuildDropPath: $(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH) - sbomPackageName: "VS Code Windows $(VSCODE_ARCH) System Setup" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['SYSTEM_SETUP_PATH'], '')) - displayName: Publish system setup - - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: $(USER_SETUP_PATH) - artifactName: $(ARTIFACT_PREFIX)vscode_client_win32_$(VSCODE_ARCH)_user-setup - sbomBuildDropPath: $(Agent.BuildDirectory)/VSCode-win32-$(VSCODE_ARCH) - sbomPackageName: "VS Code Windows $(VSCODE_ARCH) User Setup" - sbomPackageVersion: $(Build.SourceVersion) - condition: and(succeededOrFailed(), ne(variables['USER_SETUP_PATH'], '')) - displayName: Publish user setup diff --git a/build/azure-pipelines/win32/retry.ps1 b/build/azure-pipelines/win32/retry.ps1 deleted file mode 100644 index 0cc67f58..00000000 --- a/build/azure-pipelines/win32/retry.ps1 +++ /dev/null @@ -1,19 +0,0 @@ -function Retry -{ - [CmdletBinding()] - param( - [Parameter(Position=0,Mandatory=1)][scriptblock]$cmd - ) - $retry = 0 - - while ($retry++ -lt 5) { - try { - & $cmd - return - } catch { - # noop - } - } - - throw "Max retries reached" -} diff --git a/build/azure-pipelines/win32/sdl-scan-win32.yml b/build/azure-pipelines/win32/sdl-scan-win32.yml deleted file mode 100644 index bf6819a4..00000000 --- a/build/azure-pipelines/win32/sdl-scan-win32.yml +++ /dev/null @@ -1,154 +0,0 @@ -parameters: - - name: VSCODE_ARCH - type: string - - name: VSCODE_QUALITY - type: string - -steps: - - task: NodeTool@0 - inputs: - versionSource: fromFile - versionFilePath: .nvmrc - nodejsMirror: https://github.com/joaomoreno/node-mirror/releases/download - - - task: UsePythonVersion@0 - inputs: - versionSpec: "3.x" - addToPath: true - - - template: ../distro/download-distro.yml@self - - - task: AzureKeyVault@2 - displayName: "Azure Key Vault: Get Secrets" - inputs: - azureSubscription: vscode - KeyVaultName: vscode-build-secrets - SecretsFilter: "github-distro-mixin-password" - - - powershell: node build/setup-npm-registry.js $env:NPM_REGISTRY - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Registry - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - # Set the private NPM registry to the global npmrc file - # so that authentication works for subfolders like build/, remote/, extensions/ etc - # which does not have their own .npmrc file - exec { npm config set registry "$env:NPM_REGISTRY" } - $NpmrcPath = (npm config get userconfig) - echo "##vso[task.setvariable variable=NPMRC_PATH]$NpmrcPath" - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM - - - task: npmAuthenticate@0 - inputs: - workingFile: $(NPMRC_PATH) - condition: and(succeeded(), ne(variables['NPM_REGISTRY'], 'none')) - displayName: Setup NPM Authentication - - - pwsh: | - $includes = @' - { - 'target_defaults': { - 'conditions': [ - ['OS=="win"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - 'AdditionalOptions': [ - '/Zi', - '/FS' - ], - }, - 'VCLinkerTool': { - 'AdditionalOptions': [ - '/profile' - ] - } - } - }] - ] - } - } - '@ - - if (!(Test-Path "~/.gyp")) { - mkdir "~/.gyp" - } - echo $includes > "~/.gyp/include.gypi" - displayName: Create include.gypi - - - powershell: | - . build/azure-pipelines/win32/exec.ps1 - $ErrorActionPreference = "Stop" - exec { npm ci } - env: - npm_config_arch: ${{ parameters.VSCODE_ARCH }} - npm_config_foreground_scripts: "true" - ELECTRON_SKIP_BINARY_DOWNLOAD: 1 - PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1 - GITHUB_TOKEN: "$(github-distro-mixin-password)" - retryCountOnTaskFailure: 5 - displayName: Install dependencies - - - script: node build/azure-pipelines/distro/mixin-npm - displayName: Mixin distro node modules - - - script: node build/azure-pipelines/distro/mixin-quality - displayName: Mixin distro quality - env: - VSCODE_QUALITY: ${{ parameters.VSCODE_QUALITY }} - - - powershell: npm run compile - displayName: Compile - - - powershell: npm run gulp "vscode-symbols-win32-${{ parameters.VSCODE_ARCH }}" - env: - GITHUB_TOKEN: "$(github-distro-mixin-password)" - displayName: Download Symbols - - - powershell: | - Get-ChildItem '$(Agent.BuildDirectory)\scanbin' -Recurse -Filter "*.exe" - Get-ChildItem '$(Agent.BuildDirectory)\scanbin' -Recurse -Filter "*.dll" - Get-ChildItem '$(Agent.BuildDirectory)\scanbin' -Recurse -Filter "*.node" - Get-ChildItem '$(Agent.BuildDirectory)\scanbin' -Recurse -Filter "*.pdb" - displayName: List files - - - task: CopyFiles@2 - displayName: 'Collect Symbols for API Scan' - inputs: - SourceFolder: $(Agent.BuildDirectory) - Contents: 'scanbin\**\*.pdb' - TargetFolder: '$(Agent.BuildDirectory)\symbols' - flattenFolders: true - condition: succeeded() - - - task: APIScan@2 - inputs: - softwareFolder: $(Agent.BuildDirectory)\scanbin - softwareName: 'vscode-client' - softwareVersionNum: '1' - symbolsFolder: 'srv*https://symweb.azurefd.net;$(Agent.BuildDirectory)\symbols' - isLargeApp: false - toolVersion: 'Latest' - azureSubscription: 'vscode-apiscan' - displayName: Run ApiScan - condition: succeeded() - env: - AzureServicesAuthConnectionString: RunAs=App;AppId=c0940da5-8bd3-4dd3-8af1-40774b50edbd;TenantId=72f988bf-86f1-41af-91ab-2d7cd011db47;ServiceConnectionId=3e55d992-b60d-414d-9071-e4fad359c748; - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - - - task: PublishSecurityAnalysisLogs@3 - inputs: - ArtifactName: CodeAnalysisLogs - ArtifactType: Container - PublishProcessedResults: false - AllTools: true - - # TSA Upload - - task: securedevelopmentteam.vss-secure-development-tools.build-task-uploadtotsa.TSAUpload@2 - displayName: TSA Upload - continueOnError: true - inputs: - GdnPublishTsaOnboard: true - GdnPublishTsaConfigFile: '$(Build.SourcesDirectory)/build/azure-pipelines/config/tsaoptions.json' diff --git a/config/model_config.yaml b/config/model_config.yaml index e69de29b..2d84b58b 100644 --- a/config/model_config.yaml +++ b/config/model_config.yaml @@ -0,0 +1,17 @@ +default_provider: gemini +default_model: chat-bison-001 + +providers: + gemini: + default_model: chat-bison-001 + openai: + default_model: gpt-3.5-turbo + ollama: + default_model: llama2 + +agent: + type: ZERO_SHOT_REACT_DESCRIPTION + verbose: true + memory: + enabled: true + type: ConversationBufferMemory diff --git a/doc/deepagent.md b/doc/deepagent.md index 9a17ad3e..8482e004 100644 --- a/doc/deepagent.md +++ b/doc/deepagent.md @@ -1,56 +1,96 @@ -# DeepAgent usage +# DeepAgent Usage -This document describes `src/agents/deepagent.py` usage, required environment variables, and the safe dry-run mode. +This document describes the usage of `src/agents/deepagent.py`, its configuration, and how to run and test it. -Required environment variables (depending on provider): +## Configuration -- For Gemini/Google: - - `LLM_PROVIDER=gemini` or `LLM_PROVIDER=google` - - `GOOGLE_GEMINI_API_KEY` (must be provided via GitHub Secrets or local environment) - - Optional: `LLM_MODEL` (defaults to `chat-bison-001`) +The `SDLCFlexibleAgent` is configured via the `config/model_config.yaml` file. This file allows you to set the default LLM provider, model, and agent settings. -- For OpenAI: - - `LLM_PROVIDER=openai` - - `OPENAI_API_KEY` (must be provided via GitHub Secrets or local environment) - - Optional: `LLM_MODEL` (defaults to `gpt-3.5-turbo`) +Here is an example of the configuration file: +```yaml +default_provider: gemini +default_model: chat-bison-001 - - For Ollama (local server): - - `LLM_PROVIDER=ollama` - - Ensure a local Ollama server is running (default: `http://localhost:11434`) +providers: + gemini: + default_model: chat-bison-001 + openai: + default_model: gpt-3.5-turbo + ollama: + default_model: llama2 -Dry-run / CI-safe mode +agent: + type: ZERO_SHOT_REACT_DESCRIPTION + verbose: true + memory: + enabled: true + type: ConversationBufferMemory +``` + +You can override the default provider and model at runtime by passing the `--provider` and `--model` command-line arguments when running the agent. + +## Memory and Sessions + +The agent supports conversational memory. Each conversation is tracked by a session ID. You can provide a session ID when running the agent to maintain context across multiple turns. -The module supports a `DRY_RUN=true` environment variable that prevents network calls and forces the agent to use the built-in `EchoTool` only. This makes unit tests and CI runs deterministic and offline-friendly. +## Running the Agent -Example (dry run): +You can run the agent from the command line using the `deepagent.py` script. +**Command-line arguments:** +- `--dry-run`: Run the agent in dry-run mode (no network calls). +- `--provider`: The LLM provider to use (e.g., `gemini`, `openai`, `ollama`). +- `--model`: The model name to use. +- `--prompt`: The prompt to run. +- `--session-id`: The session ID for the conversation. + +**Example:** ```bash -DRY_RUN=true LLM_PROVIDER=gemini python -c "import runpy; runpy.run_module('src.agents.deepagent', run_name='__main__')" +python src/agents/deepagent.py --prompt "Hello, how are you?" --session-id "my-session" ``` -Tests +## Testing the DeepAgent + +### Unit Tests -Unit tests mock `initialize_agent` to avoid any external requests. Run tests with: +Unit tests for the `deepagent` are located in `test/unit/test_deepagent.py` and `test/unit/test_deepagent_providers.py`. These tests use `pytest` and `monkeypatch` to test the agent's functionality in isolation, without making any network calls. +To run the unit tests, use the following command: ```bash -PYTHONPATH=. python -m pytest -q +PYTHONPATH=. pytest test/unit/ ``` -## Migration and CI notes +### Integration Tests + +Integration tests for the LLM providers are located in `test/integration/llm/test_llm_api.py`. These tests are designed to be run against real LLM providers and require valid API keys. -- Ollama can be used as a local LLM provider (good for on-prem or offline runs). However, for primary CI testing we prefer cloud providers (Gemini and OpenAI) because they are easier to run in CI and have stable HTTP APIs. -- If you want to run the provider matrix in CI (which exercises real provider adapters), use the workflow dispatch input `run_providers=true` when triggering the `Python Tests (consolidated)` workflow. -- For provider matrix runs you must provide credentials/secrets in the repository (for example `GOOGLE_GEMINI_API_KEY` and `OPENAI_API_KEY`). For private repos or to upload coverage, set `CODECOV_TOKEN` in the repository secrets. -- Note: API keys were intentionally removed from the repository `.env` file. Provide keys via one of the following: +To run the integration tests, you will need to set up your API keys as described in the next section. - - GitHub Secrets (recommended for CI): add `GOOGLE_GEMINI_API_KEY` and/or `OPENAI_API_KEY` in repository Settings โ†’ Secrets โ†’ Actions. +## API Keys - - Local environment (developer): export them in your shell: +To use providers like Gemini and OpenAI, you need to provide API keys. There are several ways to do this: +- **GitHub Secrets (recommended for CI):** Add `GOOGLE_GEMINI_API_KEY` and/or `OPENAI_API_KEY` in your repository's `Settings` โ†’ `Secrets` โ†’ `Actions`. +- **Local environment variables:** Export the keys in your shell: ```bash export GOOGLE_GEMINI_API_KEY="your-gemini-key" export OPENAI_API_KEY="your-openai-key" ``` +- **`.env` file:** Create a `.env` file in the root of the repository and add your API keys there. You can copy the `.env.template` file to get started. +- **`.env.local` file (development only):** For local development, you can create an untracked `.env.local` file and set your keys there. Do NOT commit this file to version control. + +## Ollama Provider + +The `deepagent` supports the Ollama provider for running LLMs locally. To use it, you need to have an Ollama server running on your local machine (default: `http://localhost:11434`). + +You can run the agent with the Ollama provider like this: +```bash +python src/agents/deepagent.py --provider ollama --model llama2 --prompt "Why is the sky blue?" +``` + +## Migration and CI Notes - - Local dotenv (development only): create an untracked `.env.local` and set keys there; do NOT commit it. -- Run the providers matrix manually from the Actions tab: select `Python Tests (consolidated)` โ†’ `Run workflow` โ†’ set `run_providers` to `true`. +- Ollama can be used as a local LLM provider, which is great for on-premise or offline development. However, for CI testing, we prefer cloud providers like Gemini and OpenAI because they are easier to run in a CI environment and have stable HTTP APIs. +- If you want to run the provider matrix in CI, which exercises the real provider adapters, you can manually trigger the `Python Tests (consolidated)` workflow from the GitHub Actions tab and set the `run_providers` input to `true`. +- For provider matrix runs, you must provide API keys as GitHub Secrets. For private repos or to upload code coverage, you can also set the `CODECOV_TOKEN` secret. +- Run the providers matrix manually from the Actions tab: select `Python Tests (consolidated)` โ†’ `Run workflow` โ†’ set `run_providers` to `true`. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..4a31112a --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,25 @@ +version: '3.8' + +services: + app: + build: . + ports: + - "8000:8000" + volumes: + - .:/app + environment: + - DATABASE_URL=postgresql://user:password@db:5432/mydatabase + depends_on: + - db + + db: + image: postgres:13 + volumes: + - postgres_data:/var/lib/postgresql/data/ + environment: + - POSTGRES_DB=mydatabase + - POSTGRES_USER=user + - POSTGRES_PASSWORD=password + +volumes: + postgres_data: diff --git a/examples/deepagent_demo.py b/examples/deepagent_demo.py new file mode 100644 index 00000000..e4f28456 --- /dev/null +++ b/examples/deepagent_demo.py @@ -0,0 +1,71 @@ +""" +Example script demonstrating the SDLCFlexibleAgent. + +This script shows how to: +- Initialize the SDLCFlexibleAgent. +- Run the agent with a simple prompt. +- Run the agent with a session ID to demonstrate conversational memory. +""" + +import os +import sys +from pathlib import Path + +# Add src to path for imports +sys.path.insert(0, str(Path(__file__).parent.parent / "src")) + +from agents.deepagent import SDLCFlexibleAgent +from dotenv import load_dotenv + +def main(): + """Main demonstration function.""" + print("๐Ÿค– SDLCFlexibleAgent Demo") + print("=" * 50) + + # Load environment variables from .env file + load_dotenv() + + # --- Basic Usage --- + print("\n--- Basic Usage ---") + try: + # Initialize the agent (provider and model are loaded from config/model_config.yaml) + agent = SDLCFlexibleAgent() + + prompt = "What is the capital of France?" + print(f"Running prompt: '{prompt}'") + result = agent.run(prompt) + print("Agent result:", result) + + except Exception as e: + print(f" โŒ Error: {e}") + print(" Please ensure you have a valid API key in your .env file.") + + + # --- Conversational Memory with Session ID --- + print("\n--- Conversational Memory with Session ID ---") + try: + # Initialize the agent again for a new conversation + agent = SDLCFlexibleAgent() + session_id = "my-test-session" + + # First turn + prompt1 = "My name is Bob." + print(f"Running prompt 1: '{prompt1}' (session: {session_id})") + result1 = agent.run(prompt1, session_id=session_id) + print("Agent result 1:", result1) + + # Second turn + prompt2 = "What is my name?" + print(f"\nRunning prompt 2: '{prompt2}' (session: {session_id})") + result2 = agent.run(prompt2, session_id=session_id) + print("Agent result 2:", result2) + + print("\nNote how the agent remembers the name 'Bob' in the second turn.") + + except Exception as e: + print(f" โŒ Error: {e}") + print(" Please ensure you have a valid API key in your .env file.") + + +if __name__ == "__main__": + main() diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..26dee12e --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,8 @@ +pylint==3.2.2 +flake8==7.0.0 +pytest==8.4.1 +pytest-cov==5.0.0 +mypy==1.9.0 +ruff==0.4.2 +python-dotenv==1.0.1 +PyYAML==6.0.1 diff --git a/requirements-docs.txt b/requirements-docs.txt new file mode 100644 index 00000000..3968f1c8 --- /dev/null +++ b/requirements-docs.txt @@ -0,0 +1,2 @@ +sphinx==7.3.7 +sphinx-autodoc-typehints==2.2.0 diff --git a/requirements.txt b/requirements.txt index a7110654..c18b17ca 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,8 @@ - langchain==0.3.27 -# Note: `langchain-deepagent` is not published on PyPI at the pinned version and -# caused CI install failures. It's intentionally omitted here; install any -# deepagent/local adapters manually or in provider-specific CI jobs. - -# Keep provider adapters optional; install them per-job if needed -# Provider adapters (optional) - pinned to validated versions from the dev environment langchain-google-genai==2.1.9 langchain-community==0.3.27 -# Ollama adapter left unpinned (install per-job if needed) langchain-ollama==0.3.6 - -# python-dotenv used by the module when running locally -python-dotenv==1.1.1 - -# Test/runtime helpers -pytest==8.4.1 +fastapi==0.111.0 +uvicorn==0.29.0 +psycopg2-binary==2.9.9 +PyYAML==6.0.1 diff --git a/scripts/evaluate-prompt.py b/scripts/evaluate-prompt.py new file mode 100644 index 00000000..b60a2a39 --- /dev/null +++ b/scripts/evaluate-prompt.py @@ -0,0 +1,40 @@ +import os +import argparse +from src.agents import deepagent +from dotenv import load_dotenv + +def main(): + # Load environment variables from .env file in the repo root + dotenv_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), ".env") + if os.path.exists(dotenv_path): + load_dotenv(dotenv_path) + + parser = argparse.ArgumentParser() + parser.add_argument("--prompt-file", required=True) + parser.add_argument("--provider", required=True) + parser.add_argument("--model", default=None) + parser.add_argument("--output-file", required=True) + args = parser.parse_args() + + with open(args.prompt_file, 'r') as f: + prompt = f.read() + + api_key = None + if args.provider.lower() == "gemini" or args.provider.lower() == "google": + api_key = os.getenv("GOOGLE_GEMINI_API_KEY") + elif args.provider.lower() == "openai": + api_key = os.getenv("OPENAI_API_KEY") + + agent = deepagent.SDLCFlexibleAgent( + provider=args.provider, + api_key=api_key, + model=args.model, + dry_run=False + ) + response = agent.run(prompt) + + with open(args.output_file, 'w') as f: + f.write(response.get('output', '')) + +if __name__ == "__main__": + main() diff --git a/src/agents/deepagent.py b/src/agents/deepagent.py index c68f809c..985ce6e4 100644 --- a/src/agents/deepagent.py +++ b/src/agents/deepagent.py @@ -1,8 +1,13 @@ """LangChain agent integration using OpenAI LLM and standard tools.""" +import os +import yaml from typing import Any, Optional, List -from langchain.agents import initialize_agent, AgentType +from langchain.agents import AgentExecutor, create_tool_calling_agent +from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain_community.chat_message_histories import ChatMessageHistory +from langchain_core.runnables.history import RunnableWithMessageHistory # LLM names declared as Any so mypy accepts fallback to None if imports fail GoogleGenerativeAI: Any @@ -45,11 +50,12 @@ class SDLCFlexibleAgent: """ def __init__( self, - provider: str, + provider: Optional[str] = None, api_key: Optional[str] = None, model: Optional[str] = None, tools: Optional[List[Any]] = None, dry_run: bool = False, + config_path: str = "config/model_config.yaml", **kwargs, ) -> None: """ @@ -57,55 +63,81 @@ def __init__( api_key: API key for the provider (if required) model: Model name (if required) tools: Optional list of tools + dry_run: If True, use a mock agent for testing. + config_path: Path to the model configuration file. kwargs: Additional LLM-specific arguments """ + with open(config_path, 'r', encoding='utf-8') as f: + config = yaml.safe_load(f) + + provider = provider or config.get('default_provider') provider = provider.lower() + self.dry_run = bool(dry_run) - # typed instance attributes for mypy self.llm: Any = None self.agent: Any = None - # In dry-run mode, avoid creating real LLMs or network calls + self.store = {} # In-memory session store + if self.dry_run: - # Use a no-network mock LLM and a MockAgent below - self.llm = None - if tools is None: - self.tools = [EchoTool()] - else: - self.tools = tools + self.tools = tools or [EchoTool()] self.agent = MockAgent() return - if provider == "gemini" or provider == "google": - # Use GoogleGenerativeAI from langchain-google-genai - gemini_model = model or "chat-bison-001" - self.llm = GoogleGenerativeAI( - google_api_key=api_key, - model=gemini_model, - **kwargs, - ) - elif provider == "openai": - self.llm = OpenAI(openai_api_key=api_key, model=model or "gpt-3.5-turbo", **kwargs) - elif provider == "ollama" and Ollama is not None: - self.llm = Ollama(model=model or "llama2", **kwargs) - else: - raise ValueError(f"Unsupported or unavailable provider: {provider}") - if tools is None: - self.tools = [EchoTool()] - else: - self.tools = tools - # initialize_agent returns an executor; keep as Any for flexibility in dry-run tests - self.agent = initialize_agent( - self.tools, - self.llm, - agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, - verbose=not self.dry_run, + + # Configure agent from YAML + agent_config = config.get('agent', {}) + verbose = agent_config.get('verbose', True) + + # Configure provider + provider_config = config.get('providers', {}).get(provider, {}) + model = model or provider_config.get('default_model') + + try: + if provider == "gemini" or provider == "google": + self.llm = GoogleGenerativeAI(google_api_key=api_key, model=model, **kwargs) + elif provider == "openai": + self.llm = OpenAI(openai_api_key=api_key, model=model, **kwargs) + elif provider == "ollama" and Ollama is not None: + self.llm = Ollama(model=model, **kwargs) + else: + raise ValueError(f"Unsupported or unavailable provider: {provider}") + except Exception as e: + raise RuntimeError(f"Failed to initialize LLM provider '{provider}': {e}") from e + + self.tools = tools or [EchoTool()] + + prompt = ChatPromptTemplate.from_messages( + [ + ("system", "You are a helpful assistant."), + MessagesPlaceholder(variable_name="chat_history"), + ("human", "{input}"), + MessagesPlaceholder(variable_name="agent_scratchpad"), + ] ) - def run(self, input_data: str): + agent = create_tool_calling_agent(self.llm, self.tools, prompt) + agent_executor = AgentExecutor(agent=agent, tools=self.tools, verbose=verbose) + + def get_session_history(session_id: str) -> ChatMessageHistory: + if session_id not in self.store: + self.store[session_id] = ChatMessageHistory() + return self.store[session_id] + + self.agent = RunnableWithMessageHistory( + agent_executor, + get_session_history, + input_messages_key="input", + history_messages_key="chat_history", + ) + + + def run(self, input_data: str, session_id: str = "default"): """ Run the agent on the provided input data (prompt). """ - # If dry-run, MockAgent implements run; otherwise call the agent - return self.agent.run(input_data) + return self.agent.invoke( + {"input": input_data}, + config={"configurable": {"session_id": session_id}}, + ) class MockAgent: @@ -113,48 +145,45 @@ class MockAgent: def __init__(self): self.last_input = None - def run(self, input_data: str): - self.last_input = input_data - return f"dry-run-echo:{input_data}" - + def invoke(self, input_dict: dict, config: dict): + def invoke(self, input: dict, config: dict): + self.last_input = input["input"] + return {"output": f"dry-run-echo:{self.last_input}"} -if __name__ == "__main__": - import os - import argparse - from dotenv import load_dotenv - - # Load environment variables from .env file in the repo root - load_dotenv(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ".env")) +def main(): + """Main function to run the agent from the command line.""" parser = argparse.ArgumentParser() - parser.add_argument("--dry-run", dest="dry_run", action="store_true", help="Run agent in dry-run (no network) mode") - parser.add_argument("--provider", dest="provider", default=None, help="LLM provider to use (overrides LLM_PROVIDER env var)") - parser.add_argument("--model", dest="model", default=None, help="Model name to use (overrides LLM_MODEL env var)") + parser.add_argument("--dry-run", action="store_true", help="Run agent in dry-run mode") + parser.add_argument("--provider", help="LLM provider to use") + parser.add_argument("--model", help="Model name to use") + parser.add_argument("--prompt", default="What is the capital of France?", help="The prompt to run") + parser.add_argument("--session-id", default="default", help="The session ID for the conversation") args = parser.parse_args() - provider = args.provider or os.getenv("LLM_PROVIDER", "gemini") - model = args.model or os.getenv("LLM_MODEL", None) - api_key = None - - # DRY_RUN environment or flag - dry_run_env = os.getenv("DRY_RUN", "false").lower() in ("1", "true", "yes") - dry_run = dry_run_env or bool(args.dry_run) + # Load environment variables from .env file + load_dotenv(os.path.join(os.path.dirname(__file__), ".env")) - if not dry_run: - if provider.lower() == "gemini" or provider.lower() == "google": + api_key = None + if not args.dry_run: + if args.provider and (args.provider.lower() == "gemini" or args.provider.lower() == "google"): api_key = os.getenv("GOOGLE_GEMINI_API_KEY") - if not api_key: - raise ValueError("GOOGLE_GEMINI_API_KEY not found in environment or .env file.") - elif provider.lower() == "openai": + elif args.provider and args.provider.lower() == "openai": api_key = os.getenv("OPENAI_API_KEY") - if not api_key: - raise ValueError("OPENAI_API_KEY not found in environment or .env file.") - elif provider.lower() == "ollama": - api_key = None # Ollama may not require an API key - else: - raise ValueError(f"Unsupported provider: {provider}") - - agent = SDLCFlexibleAgent(provider=provider, api_key=api_key, model=model, dry_run=dry_run) - prompt = "What is the capital of France?" - result = agent.run(prompt) - print("Agent result:", result) + + try: + agent = SDLCFlexibleAgent( + provider=args.provider, + api_key=api_key, + model=args.model, + dry_run=args.dry_run, + ) + result = agent.run(args.prompt, session_id=args.session_id) + print("Agent result:", result) + except (ValueError, RuntimeError) as e: + print(f"Error: {e}") + +if __name__ == "__main__": + import argparse + from dotenv import load_dotenv + main() diff --git a/src/app.py b/src/app.py new file mode 100644 index 00000000..f5961e68 --- /dev/null +++ b/src/app.py @@ -0,0 +1,7 @@ +from fastapi import FastAPI + +app = FastAPI() + +@app.get("/") +def read_root(): + return {"Hello": "World"} diff --git a/test/unit/test_deepagent.py b/test/unit/test_deepagent.py index ec462222..fd02e7cf 100644 --- a/test/unit/test_deepagent.py +++ b/test/unit/test_deepagent.py @@ -1,46 +1,16 @@ import pytest -from types import SimpleNamespace - -# Import the module under test from src.agents import deepagent - -class DummyLLM: - """A minimal LLM-like object that provides a generate or call-like interface used by LangChain.""" - def __init__(self, response_text="Dummy response"): - self.response_text = response_text - - def __call__(self, *args, **kwargs): - return self.response_text - - async def agenerate(self, *args, **kwargs): - class R: - generations = [[SimpleNamespace(text=self.response_text)]] - return R() - - -class DummyAgent: - def __init__(self): - self.last_input = None - - def run(self, input_data: str): - self.last_input = input_data - return f"mock-run:{input_data}" - - -def test_sdlcflexibleagent_with_mocked_llm(monkeypatch): - # Arrange: replace initialize_agent with a factory that returns our DummyAgent - monkeypatch.setattr(deepagent, "initialize_agent", lambda tools, llm, agent, verbose: DummyAgent()) - - # Also bypass provider selection by directly constructing the agent and injecting a dummy LLM - agent = deepagent.SDLCFlexibleAgent.__new__(deepagent.SDLCFlexibleAgent) - agent.llm = DummyLLM() - agent.tools = [deepagent.EchoTool()] - agent.agent = deepagent.initialize_agent(agent.tools, agent.llm, agent=None, verbose=False) +def test_sdlcflexibleagent_dry_run(): + """ + Tests that the SDLCFlexibleAgent in dry-run mode uses the MockAgent and returns the expected echo response. + """ + # Arrange + agent = deepagent.SDLCFlexibleAgent(dry_run=True) # Act - resp = deepagent.SDLCFlexibleAgent.run(agent, "hello") + resp = agent.run("hello") # Assert - assert resp == "mock-run:hello" - assert agent.agent.last_input == "hello" + assert isinstance(agent.agent, deepagent.MockAgent) + assert resp == {"output": "dry-run-echo:hello"} diff --git a/test/unit/test_deepagent_providers.py b/test/unit/test_deepagent_providers.py index 5e0bdd11..2a808d3b 100644 --- a/test/unit/test_deepagent_providers.py +++ b/test/unit/test_deepagent_providers.py @@ -1,45 +1,40 @@ -from types import SimpleNamespace import pytest - from src.agents import deepagent - class FakeLLM: def __init__(self, **kwargs): self.kwargs = kwargs - def test_provider_selection_gemini(monkeypatch): - # Patch GoogleGenerativeAI to our fake monkeypatch.setattr(deepagent, 'GoogleGenerativeAI', lambda **kw: FakeLLM(**kw)) - # Prevent LangChain from validating the LLM by returning a MockAgent - monkeypatch.setattr(deepagent, 'initialize_agent', lambda tools, llm, agent, verbose: deepagent.MockAgent()) - agent = deepagent.SDLCFlexibleAgent(provider='gemini', api_key='x', model='chat-bison-001', tools=[deepagent.EchoTool()]) + monkeypatch.setattr(deepagent, 'create_tool_calling_agent', lambda llm, tools, prompt: None) + monkeypatch.setattr(deepagent, 'AgentExecutor', lambda agent, tools, verbose: None) + monkeypatch.setattr(deepagent, 'RunnableWithMessageHistory', lambda executor, get_session_history, input_messages_key, history_messages_key: None) + agent = deepagent.SDLCFlexibleAgent(provider='gemini', api_key='x') assert isinstance(agent.llm, FakeLLM) - def test_provider_selection_openai(monkeypatch): monkeypatch.setattr(deepagent, 'OpenAI', lambda **kw: FakeLLM(**kw)) - monkeypatch.setattr(deepagent, 'initialize_agent', lambda tools, llm, agent, verbose: deepagent.MockAgent()) - agent = deepagent.SDLCFlexibleAgent(provider='openai', api_key='openai-key', model='gpt-3.5', tools=[deepagent.EchoTool()]) + monkeypatch.setattr(deepagent, 'create_tool_calling_agent', lambda llm, tools, prompt: None) + monkeypatch.setattr(deepagent, 'AgentExecutor', lambda agent, tools, verbose: None) + monkeypatch.setattr(deepagent, 'RunnableWithMessageHistory', lambda executor, get_session_history, input_messages_key, history_messages_key: None) + agent = deepagent.SDLCFlexibleAgent(provider='openai', api_key='openai-key') assert isinstance(agent.llm, FakeLLM) - def test_provider_selection_ollama(monkeypatch): - # If Ollama is present, patch it; else ensure constructor raises for missing provider if deepagent.Ollama is not None: monkeypatch.setattr(deepagent, 'Ollama', lambda **kw: FakeLLM(**kw)) - monkeypatch.setattr(deepagent, 'initialize_agent', lambda tools, llm, agent, verbose: deepagent.MockAgent()) - agent = deepagent.SDLCFlexibleAgent(provider='ollama', model='llama2', tools=[deepagent.EchoTool()]) + monkeypatch.setattr(deepagent, 'create_tool_calling_agent', lambda llm, tools, prompt: None) + monkeypatch.setattr(deepagent, 'AgentExecutor', lambda agent, tools, verbose: None) + monkeypatch.setattr(deepagent, 'RunnableWithMessageHistory', lambda executor, get_session_history, input_messages_key, history_messages_key: None) + agent = deepagent.SDLCFlexibleAgent(provider='ollama') assert isinstance(agent.llm, FakeLLM) else: with pytest.raises(ValueError): deepagent.SDLCFlexibleAgent(provider='ollama') - def test_dry_run_flag(): - agent = deepagent.SDLCFlexibleAgent(provider='gemini', dry_run=True) - # In dry-run mode llm should be None and agent is a MockAgent + agent = deepagent.SDLCFlexibleAgent(dry_run=True) assert agent.llm is None - assert hasattr(agent, 'agent') - assert agent.agent.run('x') == 'dry-run-echo:x' + assert isinstance(agent.agent, deepagent.MockAgent) + assert agent.run("test") == {"output": "dry-run-echo:test"}