Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions .github/workflows/local_lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,26 @@ jobs:
- name: Test
run: pnpm run actions:test

python_checks:
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@v6

- name: Set up uv package manager
uses: astral-sh/setup-uv@v8.1.0
with:
python-version: "3.14"
enable-cache: true

- name: Install Python dev dependencies
run: uv sync --group dev

- name: Ruff format
run: uv run ruff format --check

- name: Ruff lint
run: uv run ruff check

- name: Type check
run: uv run ty check
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,6 @@
.mise.toml
node_modules/
*.tsbuildinfo
__pycache__/
.ruff_cache/
.ty_cache/
67 changes: 31 additions & 36 deletions git-cliff-release/enhance_context.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
from __future__ import annotations

from argparse import ArgumentParser, BooleanOptionalAction
import json
import subprocess
import sys
from argparse import ArgumentParser, BooleanOptionalAction
from pathlib import Path
from typing import Any


def load_pr_issues(owner: str, repo: str) -> dict[int, list[int]]:
output = subprocess.check_output(
[
str(Path(__file__).parent / "fetch_pr_issues.sh"),
str(Path(__file__).parent / 'fetch_pr_issues.sh'),
owner,
repo,
]
Expand All @@ -20,7 +20,7 @@ def load_pr_issues(owner: str, repo: str) -> dict[int, list[int]]:
try:
pr_issues = json.loads(output)
except ValueError:
print(f"fetch_pr_issues.sh output: {output}")
print(f'fetch_pr_issues.sh output: {output}')
raise

if pr_issues is None:
Expand All @@ -29,65 +29,60 @@ def load_pr_issues(owner: str, repo: str) -> dict[int, list[int]]:
return {int(key): value for key, value in pr_issues.items()}


def enhance_release(
release: dict[str, Any], is_release_notes: bool, unreleased_version: str | None
) -> None:
release["extra"] = release["extra"] or {}
release["extra"]["is_release_notes"] = is_release_notes
def enhance_release(release: dict[str, Any], *, is_release_notes: bool, unreleased_version: str | None) -> None:
release['extra'] = release['extra'] or {}
release['extra']['is_release_notes'] = is_release_notes

if release["version"]:
release["extra"]["release_link"] = (
f"{repo_url}/releases/tag/{release['version']}"
)
if release['version']:
release['extra']['release_link'] = f'{repo_url}/releases/tag/{release["version"]}'
elif unreleased_version:
release["extra"]["unreleased_version"] = unreleased_version
release['extra']['unreleased_version'] = unreleased_version


def enhance_commit(commit: dict[str, Any], pr_issues: dict[int, list[int]]) -> None:
commit_remote = commit.get("remote", {})
commit_remote = commit.get('remote', {})

pr_number = commit_remote.get("pr_number")
username = commit_remote.get("username")
pr_number = commit_remote.get('pr_number')
username = commit_remote.get('username')

commit["extra"] = commit["extra"] or {}
commit["extra"]["commit_link"] = f"{repo_url}/commit/{commit['id']}"
commit['extra'] = commit['extra'] or {}
commit['extra']['commit_link'] = f'{repo_url}/commit/{commit["id"]}'

if username:
commit["extra"]["username"] = username
commit['extra']['username'] = username

if pr_number:
commit["extra"]["closed_issues"] = pr_issues.get(pr_number, [])
commit['extra']['closed_issues'] = pr_issues.get(pr_number, [])

pr_link = f"{repo_url}/pull/{pr_number}"
commit["extra"]["pr_link"] = f"([#{pr_number}]({pr_link}))"
commit["extra"]["raw_pr_link"] = f"(#{pr_number})"
pr_link = f'{repo_url}/pull/{pr_number}'
commit['extra']['pr_link'] = f'([#{pr_number}]({pr_link}))'
commit['extra']['raw_pr_link'] = f'(#{pr_number})'

commit["extra"]["closed_issue_links"] = [
f"[#{issue}]({repo_url}/issues/{issue})"
for issue in commit["extra"]["closed_issues"]
commit['extra']['closed_issue_links'] = [
f'[#{issue}]({repo_url}/issues/{issue})' for issue in commit['extra']['closed_issues']
]


parser = ArgumentParser()
parser.add_argument("--repo", type=str, required=True)
parser.add_argument("--unreleased-version", nargs="?", default=None, type=str)
parser.add_argument("--release-notes", action=BooleanOptionalAction)
parser.add_argument("--no-github", default=False, action="store_true")
parser.add_argument('--repo', type=str, required=True)
parser.add_argument('--unreleased-version', nargs='?', default=None, type=str)
parser.add_argument('--release-notes', action=BooleanOptionalAction)
parser.add_argument('--no-github', default=False, action='store_true')


if __name__ == "__main__":
if __name__ == '__main__':
args = parser.parse_args()
repo_url = f"https://github.com/{args.repo}"
owner, repo = args.repo.split("/")
repo_url = f'https://github.com/{args.repo}'
owner, repo = args.repo.split('/')

pr_issues = load_pr_issues(owner, repo)
context = json.load(sys.stdin)

if not args.no_github:
if not args.no_github:
for release in context:
enhance_release(release, args.release_notes, args.unreleased_version)
enhance_release(release, is_release_notes=args.release_notes, unreleased_version=args.unreleased_version)

for commit in release["commits"]:
for commit in release['commits']:
enhance_commit(commit, pr_issues)

json.dump(context, sys.stdout)
53 changes: 53 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
[project]
name = "apify-workflows"
version = "0"
description = "Lint and type-check configuration for Python scripts in apify/workflows."
requires-python = ">=3.14"

[dependency-groups]
dev = [
"ruff~=0.15.0",
"ty~=0.0.0",
]

[tool.uv]
package = false

[tool.ruff]
line-length = 120
include = ["**/*.py"]

[tool.ruff.lint]
select = ["ALL"]
ignore = [
"COM812", # May conflict with the formatter.
"D", # pydocstyle - docstring presence/formatting not enforced for this repo's small scripts.
"EM", # flake8-errmsg.
"G004", # Logging statement uses f-string.
"ISC001", # May conflict with the formatter.
"PLR0913", # Too many arguments in function definition.
"TD002", # Missing author in TODO.
"TRY003", # Avoid specifying long messages outside the exception class.
]

[tool.ruff.format]
quote-style = "single"
indent-style = "space"

[tool.ruff.lint.flake8-quotes]
docstring-quotes = "double"
inline-quotes = "single"

[tool.ruff.lint.per-file-ignores]
"{python-package-check,git-cliff-release}/**/*.py" = [
"INP001", # Implicit namespace package - these are script directories, not packages.
"S603", # `subprocess` call: arguments are trusted (CI-provided or constants).
"S607", # Starting a process with a partial executable path - tools resolved from PATH by design.
"T201", # `print` is the script's UX.
]

[tool.ty.environment]
python-version = "3.14"

[tool.ty.src]
include = ["python-package-check", "git-cliff-release"]
54 changes: 54 additions & 0 deletions python-package-check/action.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
name: Python package check
description: >
Verify built sdist + wheel artifacts in `dist/` install and import correctly.
Run after building the package (e.g. via `prepare-pypi-distribution`).

inputs:
package_name:
description: Importable Python package name (e.g. `crawlee`, `apify`, `apify_client`).
required: true
src_package_dir:
description: Path to the package source directory, relative to the repository root.
required: true
dist_dir:
description: Directory containing the built sdist + wheel.
required: true
python_version:
description: Python version to use for the verification venvs.
required: true
Comment on lines +10 to +18
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Isn't there a sensible default for these?

extras:
description: Optional extras to install (e.g. `all`). Empty for no extras.
required: false
default: ""
smoke_code:
description: >
Optional extra Python code to run inside the install smoke test, after `import <package>`.
Useful for asserting that specific symbols import and construct cleanly.
required: false
default: ""

runs:
using: composite
steps:
- name: Set up uv package manager
uses: astral-sh/setup-uv@v8.1.0
with:
python-version: ${{ inputs.python_version }}

- name: Verify built package
shell: bash
env:
PACKAGE_NAME: ${{ inputs.package_name }}
DIST_DIR: ${{ inputs.dist_dir }}
PYTHON_VERSION: ${{ inputs.python_version }}
EXTRAS: ${{ inputs.extras }}
SRC_PACKAGE_DIR: ${{ inputs.src_package_dir }}
SMOKE_CODE: ${{ inputs.smoke_code }}
run: |
uv run --no-project python "${{ github.action_path }}/verify_built_package.py" \
--package "$PACKAGE_NAME" \
--dist-dir "$DIST_DIR" \
--python-version "$PYTHON_VERSION" \
--extras "$EXTRAS" \
--src-package-dir "$SRC_PACKAGE_DIR" \
--smoke-code "$SMOKE_CODE"
Loading
Loading