From c09e5cdfa70a6c0226e10d93595717bf3c0feedb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebasti=C3=A1n=20Ram=C3=ADrez?= Date: Sat, 24 Jun 2023 02:00:12 +0200 Subject: [PATCH] =?UTF-8?q?=F0=9F=91=B7=20Refactor=20Docs=20CI,=20run=20in?= =?UTF-8?q?=20multiple=20workers=20with=20a=20dynamic=20matrix=20to=20opti?= =?UTF-8?q?mize=20speed=20(#9732)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-docs.yml | 73 ++++++--- .../{preview-docs.yml => deploy-docs.yml} | 18 +-- .gitignore | 1 + scripts/docs.py | 152 +++++++++--------- scripts/zip-docs.sh | 11 -- 5 files changed, 142 insertions(+), 113 deletions(-) rename .github/workflows/{preview-docs.yml => deploy-docs.yml} (79%) delete mode 100644 scripts/zip-docs.sh diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index fb1fa6f09..c2880ef71 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -23,15 +23,45 @@ jobs: id: filter with: filters: | - docs: - - README.md - - docs/** - - docs_src/** - - requirements-docs.txt + docs: + - README.md + - docs/** + - docs_src/** + - requirements-docs.txt + langs: + needs: + - changes + runs-on: ubuntu-latest + outputs: + langs: ${{ steps.show-langs.outputs.langs }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + - uses: actions/cache@v3 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ runner.os }}-python-docs-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml', 'requirements-docs.txt') }}-v03 + - name: Install docs extras + if: steps.cache.outputs.cache-hit != 'true' + run: pip install -r requirements-docs.txt + - name: Export Language Codes + id: show-langs + run: | + echo "langs=$(python ./scripts/docs.py langs-json)" >> $GITHUB_OUTPUT + build-docs: - needs: changes + needs: + - changes + - langs if: ${{ needs.changes.outputs.docs == 'true' }} runs-on: ubuntu-latest + strategy: + matrix: + lang: ${{ fromJson(needs.langs.outputs.langs) }} steps: - name: Dump GitHub context env: @@ -53,21 +83,24 @@ jobs: - name: Install Material for MkDocs Insiders if: ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false ) && steps.cache.outputs.cache-hit != 'true' run: pip install git+https://${{ secrets.ACTIONS_TOKEN }}@github.com/squidfunk/mkdocs-material-insiders.git + - name: Update Languages + run: python ./scripts/docs.py update-languages - name: Build Docs - run: python ./scripts/docs.py build-all - - name: Zip docs - run: bash ./scripts/zip-docs.sh + run: python ./scripts/docs.py build-lang ${{ matrix.lang }} - uses: actions/upload-artifact@v3 with: - name: docs-zip - path: ./site/docs.zip - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.0.0 + name: docs-site + path: ./site/** + + # https://github.com/marketplace/actions/alls-green#why + docs-all-green: # This job does nothing and is only used for the branch protection + if: always() + needs: + - build-docs + runs-on: ubuntu-latest + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 with: - publish-dir: './site' - production-branch: master - github-token: ${{ secrets.FASTAPI_BUILD_DOCS_NETLIFY }} - enable-commit-comment: false - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + jobs: ${{ toJSON(needs) }} + allowed-skips: build-docs diff --git a/.github/workflows/preview-docs.yml b/.github/workflows/deploy-docs.yml similarity index 79% rename from .github/workflows/preview-docs.yml rename to .github/workflows/deploy-docs.yml index da98f5d2b..312d835af 100644 --- a/.github/workflows/preview-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -1,4 +1,4 @@ -name: Preview Docs +name: Deploy Docs on: workflow_run: workflows: @@ -7,9 +7,13 @@ on: - completed jobs: - preview-docs: + deploy-docs: runs-on: ubuntu-latest steps: + - name: Dump GitHub context + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: echo "$GITHUB_CONTEXT" - uses: actions/checkout@v3 - name: Clean site run: | @@ -23,21 +27,15 @@ jobs: github_token: ${{ secrets.FASTAPI_PREVIEW_DOCS_DOWNLOAD_ARTIFACTS }} workflow: build-docs.yml run_id: ${{ github.event.workflow_run.id }} - name: docs-zip + name: docs-site path: ./site/ - - name: Unzip docs - if: steps.download.outputs.found_artifact == 'true' - run: | - cd ./site - unzip docs.zip - rm -f docs.zip - name: Deploy to Netlify if: steps.download.outputs.found_artifact == 'true' id: netlify uses: nwtgck/actions-netlify@v2.0.0 with: publish-dir: './site' - production-deploy: false + production-deploy: ${{ github.event.workflow_run.head_repository.full_name == github.repository && github.event.workflow_run.head_branch == 'master' }} github-token: ${{ secrets.FASTAPI_PREVIEW_DOCS_NETLIFY }} enable-commit-comment: false env: diff --git a/.gitignore b/.gitignore index a26bb5cd6..3cb64c047 100644 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ Pipfile.lock env3.* env docs_build +site_build venv docs.zip archive.zip diff --git a/scripts/docs.py b/scripts/docs.py index e0953b8ed..c464f8dbe 100644 --- a/scripts/docs.py +++ b/scripts/docs.py @@ -1,3 +1,4 @@ +import json import os import re import shutil @@ -133,75 +134,83 @@ def build_lang( build_lang_path = build_dir_path / lang en_lang_path = Path("docs/en") site_path = Path("site").absolute() + build_site_path = Path("site_build").absolute() + build_site_dist_path = build_site_path / lang if lang == "en": dist_path = site_path else: dist_path: Path = site_path / lang shutil.rmtree(build_lang_path, ignore_errors=True) shutil.copytree(lang_path, build_lang_path) - shutil.copytree(en_docs_path / "data", build_lang_path / "data") - overrides_src = en_docs_path / "overrides" - overrides_dest = build_lang_path / "overrides" - for path in overrides_src.iterdir(): - dest_path = overrides_dest / path.name - if not dest_path.exists(): - shutil.copy(path, dest_path) - en_config_path: Path = en_lang_path / mkdocs_name - en_config: dict = mkdocs.utils.yaml_load(en_config_path.read_text(encoding="utf-8")) - nav = en_config["nav"] - lang_config_path: Path = lang_path / mkdocs_name - lang_config: dict = mkdocs.utils.yaml_load( - lang_config_path.read_text(encoding="utf-8") - ) - lang_nav = lang_config["nav"] - # Exclude first 2 entries FastAPI and Languages, for custom handling - use_nav = nav[2:] - lang_use_nav = lang_nav[2:] - file_to_nav = get_file_to_nav_map(use_nav) - sections = get_sections(use_nav) - lang_file_to_nav = get_file_to_nav_map(lang_use_nav) - use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav) - for file in file_to_nav: - file_path = Path(file) - lang_file_path: Path = build_lang_path / "docs" / file_path - en_file_path: Path = en_lang_path / "docs" / file_path - lang_file_path.parent.mkdir(parents=True, exist_ok=True) - if not lang_file_path.is_file(): - en_text = en_file_path.read_text(encoding="utf-8") - lang_text = get_text_with_translate_missing(en_text) - lang_file_path.write_text(lang_text, encoding="utf-8") - file_key = file_to_nav[file] - use_lang_file_to_nav[file] = file_key - if file_key: - composite_key = () - new_key = () - for key_part in file_key: - composite_key += (key_part,) - key_first_file = sections[composite_key] - if key_first_file in lang_file_to_nav: - new_key = lang_file_to_nav[key_first_file] - else: - new_key += (key_part,) - use_lang_file_to_nav[file] = new_key - key_to_section = {(): []} - for file, orig_file_key in file_to_nav.items(): - if file in use_lang_file_to_nav: - file_key = use_lang_file_to_nav[file] - else: - file_key = orig_file_key - section = get_key_section(key_to_section=key_to_section, key=file_key) - section.append(file) - new_nav = key_to_section[()] - export_lang_nav = [lang_nav[0], nav[1]] + new_nav - lang_config["nav"] = export_lang_nav - build_lang_config_path: Path = build_lang_path / mkdocs_name - build_lang_config_path.write_text( - yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True), - encoding="utf-8", - ) + if not lang == "en": + shutil.copytree(en_docs_path / "data", build_lang_path / "data") + overrides_src = en_docs_path / "overrides" + overrides_dest = build_lang_path / "overrides" + for path in overrides_src.iterdir(): + dest_path = overrides_dest / path.name + if not dest_path.exists(): + shutil.copy(path, dest_path) + en_config_path: Path = en_lang_path / mkdocs_name + en_config: dict = mkdocs.utils.yaml_load( + en_config_path.read_text(encoding="utf-8") + ) + nav = en_config["nav"] + lang_config_path: Path = lang_path / mkdocs_name + lang_config: dict = mkdocs.utils.yaml_load( + lang_config_path.read_text(encoding="utf-8") + ) + lang_nav = lang_config["nav"] + # Exclude first 2 entries FastAPI and Languages, for custom handling + use_nav = nav[2:] + lang_use_nav = lang_nav[2:] + file_to_nav = get_file_to_nav_map(use_nav) + sections = get_sections(use_nav) + lang_file_to_nav = get_file_to_nav_map(lang_use_nav) + use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav) + for file in file_to_nav: + file_path = Path(file) + lang_file_path: Path = build_lang_path / "docs" / file_path + en_file_path: Path = en_lang_path / "docs" / file_path + lang_file_path.parent.mkdir(parents=True, exist_ok=True) + if not lang_file_path.is_file(): + en_text = en_file_path.read_text(encoding="utf-8") + lang_text = get_text_with_translate_missing(en_text) + lang_file_path.write_text(lang_text, encoding="utf-8") + file_key = file_to_nav[file] + use_lang_file_to_nav[file] = file_key + if file_key: + composite_key = () + new_key = () + for key_part in file_key: + composite_key += (key_part,) + key_first_file = sections[composite_key] + if key_first_file in lang_file_to_nav: + new_key = lang_file_to_nav[key_first_file] + else: + new_key += (key_part,) + use_lang_file_to_nav[file] = new_key + key_to_section = {(): []} + for file, orig_file_key in file_to_nav.items(): + if file in use_lang_file_to_nav: + file_key = use_lang_file_to_nav[file] + else: + file_key = orig_file_key + section = get_key_section(key_to_section=key_to_section, key=file_key) + section.append(file) + new_nav = key_to_section[()] + export_lang_nav = [lang_nav[0], nav[1]] + new_nav + lang_config["nav"] = export_lang_nav + build_lang_config_path: Path = build_lang_path / mkdocs_name + build_lang_config_path.write_text( + yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True), + encoding="utf-8", + ) current_dir = os.getcwd() os.chdir(build_lang_path) - subprocess.run(["mkdocs", "build", "--site-dir", dist_path], check=True) + shutil.rmtree(build_site_dist_path, ignore_errors=True) + shutil.rmtree(dist_path, ignore_errors=True) + subprocess.run(["mkdocs", "build", "--site-dir", build_site_dist_path], check=True) + shutil.copytree(build_site_dist_path, dist_path, dirs_exist_ok=True) os.chdir(current_dir) typer.secho(f"Successfully built docs for: {lang}", color=typer.colors.GREEN) @@ -271,18 +280,8 @@ def build_all(): Build mkdocs site for en, and then build each language inside, end result is located at directory ./site/ with each language inside. """ - site_path = Path("site").absolute() update_languages(lang=None) - current_dir = os.getcwd() - os.chdir(en_docs_path) - typer.echo("Building docs for: en") - subprocess.run(["mkdocs", "build", "--site-dir", site_path], check=True) - os.chdir(current_dir) - langs = [] - for lang in get_lang_paths(): - if lang == en_docs_path or not lang.is_dir(): - continue - langs.append(lang.name) + langs = [lang.name for lang in get_lang_paths() if lang.is_dir()] cpu_count = os.cpu_count() or 1 process_pool_size = cpu_count * 4 typer.echo(f"Using process pool size: {process_pool_size}") @@ -397,6 +396,15 @@ def update_config(lang: str): ) +@app.command() +def langs_json(): + langs = [] + for lang_path in get_lang_paths(): + if lang_path.is_dir(): + langs.append(lang_path.name) + print(json.dumps(langs)) + + def get_key_section( *, key_to_section: Dict[Tuple[str, ...], list], key: Tuple[str, ...] ) -> list: diff --git a/scripts/zip-docs.sh b/scripts/zip-docs.sh deleted file mode 100644 index 47c3b0977..000000000 --- a/scripts/zip-docs.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -set -x -set -e - -cd ./site - -if [ -f docs.zip ]; then - rm -rf docs.zip -fi -zip -r docs.zip ./