Browse Source

👷 Refactor Docs CI, run in multiple workers with a dynamic matrix to optimize speed (#9732)

pull/9734/head
Sebastián Ramírez 2 years ago
committed by GitHub
parent
commit
c09e5cdfa7
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 73
      .github/workflows/build-docs.yml
  2. 18
      .github/workflows/deploy-docs.yml
  3. 1
      .gitignore
  4. 152
      scripts/docs.py
  5. 11
      scripts/zip-docs.sh

73
.github/workflows/build-docs.yml

@ -23,15 +23,45 @@ jobs:
id: filter id: filter
with: with:
filters: | filters: |
docs: docs:
- README.md - README.md
- docs/** - docs/**
- docs_src/** - docs_src/**
- requirements-docs.txt - requirements-docs.txt
langs:
needs:
- changes
runs-on: ubuntu-latest
outputs:
langs: ${{ steps.show-langs.outputs.langs }}
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.11"
- uses: actions/cache@v3
id: cache
with:
path: ${{ env.pythonLocation }}
key: ${{ runner.os }}-python-docs-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml', 'requirements-docs.txt') }}-v03
- name: Install docs extras
if: steps.cache.outputs.cache-hit != 'true'
run: pip install -r requirements-docs.txt
- name: Export Language Codes
id: show-langs
run: |
echo "langs=$(python ./scripts/docs.py langs-json)" >> $GITHUB_OUTPUT
build-docs: build-docs:
needs: changes needs:
- changes
- langs
if: ${{ needs.changes.outputs.docs == 'true' }} if: ${{ needs.changes.outputs.docs == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
matrix:
lang: ${{ fromJson(needs.langs.outputs.langs) }}
steps: steps:
- name: Dump GitHub context - name: Dump GitHub context
env: env:
@ -53,21 +83,24 @@ jobs:
- name: Install Material for MkDocs Insiders - name: Install Material for MkDocs Insiders
if: ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false ) && steps.cache.outputs.cache-hit != 'true' if: ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false ) && steps.cache.outputs.cache-hit != 'true'
run: pip install git+https://${{ secrets.ACTIONS_TOKEN }}@github.com/squidfunk/mkdocs-material-insiders.git run: pip install git+https://${{ secrets.ACTIONS_TOKEN }}@github.com/squidfunk/mkdocs-material-insiders.git
- name: Update Languages
run: python ./scripts/docs.py update-languages
- name: Build Docs - name: Build Docs
run: python ./scripts/docs.py build-all run: python ./scripts/docs.py build-lang ${{ matrix.lang }}
- name: Zip docs
run: bash ./scripts/zip-docs.sh
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3
with: with:
name: docs-zip name: docs-site
path: ./site/docs.zip path: ./site/**
- name: Deploy to Netlify
uses: nwtgck/[email protected] # https://github.com/marketplace/actions/alls-green#why
docs-all-green: # This job does nothing and is only used for the branch protection
if: always()
needs:
- build-docs
runs-on: ubuntu-latest
steps:
- name: Decide whether the needed jobs succeeded or failed
uses: re-actors/alls-green@release/v1
with: with:
publish-dir: './site' jobs: ${{ toJSON(needs) }}
production-branch: master allowed-skips: build-docs
github-token: ${{ secrets.FASTAPI_BUILD_DOCS_NETLIFY }}
enable-commit-comment: false
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}

18
.github/workflows/preview-docs.yml → .github/workflows/deploy-docs.yml

@ -1,4 +1,4 @@
name: Preview Docs name: Deploy Docs
on: on:
workflow_run: workflow_run:
workflows: workflows:
@ -7,9 +7,13 @@ on:
- completed - completed
jobs: jobs:
preview-docs: deploy-docs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Dump GitHub context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT"
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Clean site - name: Clean site
run: | run: |
@ -23,21 +27,15 @@ jobs:
github_token: ${{ secrets.FASTAPI_PREVIEW_DOCS_DOWNLOAD_ARTIFACTS }} github_token: ${{ secrets.FASTAPI_PREVIEW_DOCS_DOWNLOAD_ARTIFACTS }}
workflow: build-docs.yml workflow: build-docs.yml
run_id: ${{ github.event.workflow_run.id }} run_id: ${{ github.event.workflow_run.id }}
name: docs-zip name: docs-site
path: ./site/ path: ./site/
- name: Unzip docs
if: steps.download.outputs.found_artifact == 'true'
run: |
cd ./site
unzip docs.zip
rm -f docs.zip
- name: Deploy to Netlify - name: Deploy to Netlify
if: steps.download.outputs.found_artifact == 'true' if: steps.download.outputs.found_artifact == 'true'
id: netlify id: netlify
uses: nwtgck/[email protected] uses: nwtgck/[email protected]
with: with:
publish-dir: './site' publish-dir: './site'
production-deploy: false production-deploy: ${{ github.event.workflow_run.head_repository.full_name == github.repository && github.event.workflow_run.head_branch == 'master' }}
github-token: ${{ secrets.FASTAPI_PREVIEW_DOCS_NETLIFY }} github-token: ${{ secrets.FASTAPI_PREVIEW_DOCS_NETLIFY }}
enable-commit-comment: false enable-commit-comment: false
env: env:

1
.gitignore

@ -16,6 +16,7 @@ Pipfile.lock
env3.* env3.*
env env
docs_build docs_build
site_build
venv venv
docs.zip docs.zip
archive.zip archive.zip

152
scripts/docs.py

@ -1,3 +1,4 @@
import json
import os import os
import re import re
import shutil import shutil
@ -133,75 +134,83 @@ def build_lang(
build_lang_path = build_dir_path / lang build_lang_path = build_dir_path / lang
en_lang_path = Path("docs/en") en_lang_path = Path("docs/en")
site_path = Path("site").absolute() site_path = Path("site").absolute()
build_site_path = Path("site_build").absolute()
build_site_dist_path = build_site_path / lang
if lang == "en": if lang == "en":
dist_path = site_path dist_path = site_path
else: else:
dist_path: Path = site_path / lang dist_path: Path = site_path / lang
shutil.rmtree(build_lang_path, ignore_errors=True) shutil.rmtree(build_lang_path, ignore_errors=True)
shutil.copytree(lang_path, build_lang_path) shutil.copytree(lang_path, build_lang_path)
shutil.copytree(en_docs_path / "data", build_lang_path / "data") if not lang == "en":
overrides_src = en_docs_path / "overrides" shutil.copytree(en_docs_path / "data", build_lang_path / "data")
overrides_dest = build_lang_path / "overrides" overrides_src = en_docs_path / "overrides"
for path in overrides_src.iterdir(): overrides_dest = build_lang_path / "overrides"
dest_path = overrides_dest / path.name for path in overrides_src.iterdir():
if not dest_path.exists(): dest_path = overrides_dest / path.name
shutil.copy(path, dest_path) if not dest_path.exists():
en_config_path: Path = en_lang_path / mkdocs_name shutil.copy(path, dest_path)
en_config: dict = mkdocs.utils.yaml_load(en_config_path.read_text(encoding="utf-8")) en_config_path: Path = en_lang_path / mkdocs_name
nav = en_config["nav"] en_config: dict = mkdocs.utils.yaml_load(
lang_config_path: Path = lang_path / mkdocs_name en_config_path.read_text(encoding="utf-8")
lang_config: dict = mkdocs.utils.yaml_load( )
lang_config_path.read_text(encoding="utf-8") nav = en_config["nav"]
) lang_config_path: Path = lang_path / mkdocs_name
lang_nav = lang_config["nav"] lang_config: dict = mkdocs.utils.yaml_load(
# Exclude first 2 entries FastAPI and Languages, for custom handling lang_config_path.read_text(encoding="utf-8")
use_nav = nav[2:] )
lang_use_nav = lang_nav[2:] lang_nav = lang_config["nav"]
file_to_nav = get_file_to_nav_map(use_nav) # Exclude first 2 entries FastAPI and Languages, for custom handling
sections = get_sections(use_nav) use_nav = nav[2:]
lang_file_to_nav = get_file_to_nav_map(lang_use_nav) lang_use_nav = lang_nav[2:]
use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav) file_to_nav = get_file_to_nav_map(use_nav)
for file in file_to_nav: sections = get_sections(use_nav)
file_path = Path(file) lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
lang_file_path: Path = build_lang_path / "docs" / file_path use_lang_file_to_nav = get_file_to_nav_map(lang_use_nav)
en_file_path: Path = en_lang_path / "docs" / file_path for file in file_to_nav:
lang_file_path.parent.mkdir(parents=True, exist_ok=True) file_path = Path(file)
if not lang_file_path.is_file(): lang_file_path: Path = build_lang_path / "docs" / file_path
en_text = en_file_path.read_text(encoding="utf-8") en_file_path: Path = en_lang_path / "docs" / file_path
lang_text = get_text_with_translate_missing(en_text) lang_file_path.parent.mkdir(parents=True, exist_ok=True)
lang_file_path.write_text(lang_text, encoding="utf-8") if not lang_file_path.is_file():
file_key = file_to_nav[file] en_text = en_file_path.read_text(encoding="utf-8")
use_lang_file_to_nav[file] = file_key lang_text = get_text_with_translate_missing(en_text)
if file_key: lang_file_path.write_text(lang_text, encoding="utf-8")
composite_key = () file_key = file_to_nav[file]
new_key = () use_lang_file_to_nav[file] = file_key
for key_part in file_key: if file_key:
composite_key += (key_part,) composite_key = ()
key_first_file = sections[composite_key] new_key = ()
if key_first_file in lang_file_to_nav: for key_part in file_key:
new_key = lang_file_to_nav[key_first_file] composite_key += (key_part,)
else: key_first_file = sections[composite_key]
new_key += (key_part,) if key_first_file in lang_file_to_nav:
use_lang_file_to_nav[file] = new_key new_key = lang_file_to_nav[key_first_file]
key_to_section = {(): []} else:
for file, orig_file_key in file_to_nav.items(): new_key += (key_part,)
if file in use_lang_file_to_nav: use_lang_file_to_nav[file] = new_key
file_key = use_lang_file_to_nav[file] key_to_section = {(): []}
else: for file, orig_file_key in file_to_nav.items():
file_key = orig_file_key if file in use_lang_file_to_nav:
section = get_key_section(key_to_section=key_to_section, key=file_key) file_key = use_lang_file_to_nav[file]
section.append(file) else:
new_nav = key_to_section[()] file_key = orig_file_key
export_lang_nav = [lang_nav[0], nav[1]] + new_nav section = get_key_section(key_to_section=key_to_section, key=file_key)
lang_config["nav"] = export_lang_nav section.append(file)
build_lang_config_path: Path = build_lang_path / mkdocs_name new_nav = key_to_section[()]
build_lang_config_path.write_text( export_lang_nav = [lang_nav[0], nav[1]] + new_nav
yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True), lang_config["nav"] = export_lang_nav
encoding="utf-8", build_lang_config_path: Path = build_lang_path / mkdocs_name
) build_lang_config_path.write_text(
yaml.dump(lang_config, sort_keys=False, width=200, allow_unicode=True),
encoding="utf-8",
)
current_dir = os.getcwd() current_dir = os.getcwd()
os.chdir(build_lang_path) os.chdir(build_lang_path)
subprocess.run(["mkdocs", "build", "--site-dir", dist_path], check=True) shutil.rmtree(build_site_dist_path, ignore_errors=True)
shutil.rmtree(dist_path, ignore_errors=True)
subprocess.run(["mkdocs", "build", "--site-dir", build_site_dist_path], check=True)
shutil.copytree(build_site_dist_path, dist_path, dirs_exist_ok=True)
os.chdir(current_dir) os.chdir(current_dir)
typer.secho(f"Successfully built docs for: {lang}", color=typer.colors.GREEN) typer.secho(f"Successfully built docs for: {lang}", color=typer.colors.GREEN)
@ -271,18 +280,8 @@ def build_all():
Build mkdocs site for en, and then build each language inside, end result is located Build mkdocs site for en, and then build each language inside, end result is located
at directory ./site/ with each language inside. at directory ./site/ with each language inside.
""" """
site_path = Path("site").absolute()
update_languages(lang=None) update_languages(lang=None)
current_dir = os.getcwd() langs = [lang.name for lang in get_lang_paths() if lang.is_dir()]
os.chdir(en_docs_path)
typer.echo("Building docs for: en")
subprocess.run(["mkdocs", "build", "--site-dir", site_path], check=True)
os.chdir(current_dir)
langs = []
for lang in get_lang_paths():
if lang == en_docs_path or not lang.is_dir():
continue
langs.append(lang.name)
cpu_count = os.cpu_count() or 1 cpu_count = os.cpu_count() or 1
process_pool_size = cpu_count * 4 process_pool_size = cpu_count * 4
typer.echo(f"Using process pool size: {process_pool_size}") typer.echo(f"Using process pool size: {process_pool_size}")
@ -397,6 +396,15 @@ def update_config(lang: str):
) )
@app.command()
def langs_json():
langs = []
for lang_path in get_lang_paths():
if lang_path.is_dir():
langs.append(lang_path.name)
print(json.dumps(langs))
def get_key_section( def get_key_section(
*, key_to_section: Dict[Tuple[str, ...], list], key: Tuple[str, ...] *, key_to_section: Dict[Tuple[str, ...], list], key: Tuple[str, ...]
) -> list: ) -> list:

11
scripts/zip-docs.sh

@ -1,11 +0,0 @@
#!/usr/bin/env bash
set -x
set -e
cd ./site
if [ -f docs.zip ]; then
rm -rf docs.zip
fi
zip -r docs.zip ./
Loading…
Cancel
Save