test: internal contributor workflow test

- Add test documentation file
- Update docs.json navigation
- Testing two-workflow pattern for internal PRs
This commit is contained in:
Gu
2025-08-23 10:24:37 +08:00
parent 6f06791d1c
commit 33455cd05e
10 changed files with 1900 additions and 123 deletions

125
.github/workflow-config.yml vendored Normal file
View File

@@ -0,0 +1,125 @@
# GitHub Actions Workflow Configuration
# Configuration for documentation synchronization workflows
# Security settings
security:
# Require manual approval for external PRs
require_approval_for_forks: true
# Maximum files allowed per PR
max_files_per_pr: 50
# Maximum file size in MB
max_file_size_mb: 10
# Allowed file extensions
allowed_extensions:
- .md
- .mdx
- .json
# Trusted contributors (GitHub usernames)
trusted_contributors:
- guchenhe@gmail.com
# Add more trusted contributors here
# Rate limiting
rate_limits:
# Maximum sync operations per hour per PR author
max_syncs_per_hour: 5
# Maximum API calls per sync operation
max_api_calls_per_sync: 100
# Translation settings
translation:
# Target languages
target_languages:
- zh-hans
- ja-jp
# Maximum files to translate in a single operation
max_files_per_batch: 10
# Timeout for translation operations (seconds)
translation_timeout: 300
# Branch settings
branches:
# Branches that trigger automatic sync
auto_sync_branches:
- main
- revamp
# Branch protection for external PRs
require_branch_protection: true
# Prefix for sync branches
sync_branch_prefix: "docs-sync-pr-"
# Notification settings
notifications:
# Comment on PRs with sync status
comment_on_pr: true
# Include translation preview links
include_preview_links: true
# Notify on sync failures
notify_on_failure: true
# Artifact settings
artifacts:
# Retention period for analysis artifacts (days)
retention_days: 1
# Maximum artifact size (MB)
max_artifact_size_mb: 50
# Approval workflow
approval:
# Required approver associations for external PRs
required_approver_associations:
- OWNER
- MEMBER
- COLLABORATOR
# Require review from code owners
require_code_owner_review: false
# Auto-approve for trusted contributors
auto_approve_trusted: true
# Dry run mode (for testing)
dry_run:
# Enable dry run mode (no actual changes made)
enabled: false
# Show what would be changed
show_diff: true
# Monitoring and logging
monitoring:
# Log all operations
enable_logging: true
# Include security events in logs
log_security_events: true
# Monitor API usage
monitor_api_usage: true
# Emergency settings
emergency:
# Disable all workflows
disable_workflows: false
# Disable external PR processing only
disable_external_prs: false
# Emergency contact (GitHub username)
emergency_contact: "guchenhe@gmail.com"
# Version info
version: "1.0.0"
updated: "2024-08-22"

View File

@@ -1,121 +0,0 @@
name: Sync Documentation Structure
on:
push:
branches:
- main
- revamp
paths:
- 'docs.json'
- 'en/**/*.md'
- 'en/**/*.mdx'
workflow_dispatch:
inputs:
since_commit:
description: 'Git commit to compare against (default: HEAD~1)'
required: false
default: 'HEAD~1'
jobs:
sync-docs:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for git diff
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install dependencies
run: |
cd tools/translate
pip install httpx aiofiles python-dotenv
- name: Check for documentation changes
id: check-changes
run: |
# Determine the commit to compare against
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
SINCE_COMMIT="${{ github.event.inputs.since_commit }}"
else
SINCE_COMMIT="HEAD~1"
fi
echo "Checking for changes since: $SINCE_COMMIT"
# Check if there are any English doc changes
if git diff --name-only $SINCE_COMMIT HEAD | grep -E '^(docs\.json|en/.*\.(md|mdx))$'; then
echo "has_changes=true" >> $GITHUB_OUTPUT
echo "since_commit=$SINCE_COMMIT" >> $GITHUB_OUTPUT
else
echo "has_changes=false" >> $GITHUB_OUTPUT
echo "No documentation changes detected"
fi
- name: Run documentation synchronization
if: steps.check-changes.outputs.has_changes == 'true'
env:
DIFY_API_KEY: ${{ secrets.DIFY_API_KEY }}
run: |
cd tools/translate
echo "Starting documentation synchronization..."
echo "Since commit: ${{ steps.check-changes.outputs.since_commit }}"
python sync_and_translate.py "$DIFY_API_KEY" "${{ steps.check-changes.outputs.since_commit }}"
- name: Check for sync results
if: steps.check-changes.outputs.has_changes == 'true'
id: check-sync-results
run: |
# Check if there are any changes to commit
if [[ -n $(git status --porcelain) ]]; then
echo "has_sync_changes=true" >> $GITHUB_OUTPUT
echo "Sync created changes to commit"
else
echo "has_sync_changes=false" >> $GITHUB_OUTPUT
echo "No changes from sync"
fi
- name: Commit and push synchronized changes
if: steps.check-sync-results.outputs.has_sync_changes == 'true'
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
# Add all changes
git add .
# Create commit message
COMMIT_MSG="docs: auto-sync documentation structure and translations
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude <noreply@anthropic.com>"
git commit -m "$COMMIT_MSG"
# Push to the current branch
echo "Pushing to branch: ${{ github.ref_name }}"
git push origin HEAD:${{ github.ref_name }}
echo "✓ Documentation synchronization completed and pushed"
- name: Summary
if: always()
run: |
if [[ "${{ steps.check-changes.outputs.has_changes }}" == "true" ]]; then
if [[ "${{ steps.check-sync-results.outputs.has_sync_changes }}" == "true" ]]; then
echo "✅ Documentation synchronization completed successfully"
else
echo " Documentation synchronization ran but no changes were needed"
fi
else
echo " No documentation changes detected, synchronization skipped"
fi

289
.github/workflows/sync_docs_analyze.yml vendored Normal file
View File

@@ -0,0 +1,289 @@
name: Analyze Documentation Changes
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- 'docs.json'
- 'en/**/*.md'
- 'en/**/*.mdx'
permissions:
contents: read
pull-requests: read
jobs:
analyze:
runs-on: ubuntu-latest
steps:
- name: Checkout PR
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Analyze documentation changes
id: analyze
run: |
echo "Analyzing documentation changes..."
# Get base and head commits
BASE_SHA="${{ github.event.pull_request.base.sha }}"
HEAD_SHA="${{ github.event.pull_request.head.sha }}"
echo "Base SHA: $BASE_SHA"
echo "Head SHA: $HEAD_SHA"
# Detect changed files
CHANGED_FILES=$(git diff --name-only $BASE_SHA $HEAD_SHA | grep -E '^(docs\.json|en/.*\.(md|mdx))$' || true)
if [ -z "$CHANGED_FILES" ]; then
echo "No documentation changes detected"
echo "has_changes=false" >> $GITHUB_OUTPUT
exit 0
fi
echo "has_changes=true" >> $GITHUB_OUTPUT
# Count changes for security limits
FILE_COUNT=$(echo "$CHANGED_FILES" | wc -l)
echo "Changed files count: $FILE_COUNT"
# Security check: Limit number of files
MAX_FILES=50
if [ "$FILE_COUNT" -gt "$MAX_FILES" ]; then
echo "Error: Too many files changed ($FILE_COUNT > $MAX_FILES)"
echo "error=too_many_files" >> $GITHUB_OUTPUT
exit 1
fi
# Create analysis report
cat > /tmp/analysis.json <<EOF
{
"pr_number": ${{ github.event.pull_request.number }},
"pr_title": "${{ github.event.pull_request.title }}",
"pr_author": "${{ github.event.pull_request.user.login }}",
"base_sha": "$BASE_SHA",
"head_sha": "$HEAD_SHA",
"file_count": $FILE_COUNT,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"repository": "${{ github.repository }}",
"ref": "${{ github.ref }}"
}
EOF
# Save changed files list
echo "$CHANGED_FILES" > /tmp/changed_files.txt
# Analyze file types and sizes
> /tmp/file_analysis.txt
while IFS= read -r file; do
if [ -f "$file" ]; then
SIZE=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo "0")
echo "$file|$SIZE" >> /tmp/file_analysis.txt
# Security check: File size limit (10MB)
MAX_SIZE=$((10 * 1024 * 1024))
if [ "$SIZE" -gt "$MAX_SIZE" ]; then
echo "Error: File $file exceeds size limit ($SIZE > $MAX_SIZE)"
echo "error=file_too_large" >> $GITHUB_OUTPUT
exit 1
fi
fi
done <<< "$CHANGED_FILES"
echo "Analysis complete"
- name: Check for docs.json structure changes
if: steps.analyze.outputs.has_changes == 'true'
run: |
# Check if docs.json was modified
if git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | grep -q '^docs\.json$'; then
echo "docs.json structure changes detected"
echo "true" > /tmp/docs_json_changed.txt
# Extract English documentation structure changes
python3 - <<'EOF'
import json
import subprocess
def get_docs_structure(sha):
try:
result = subprocess.run(
["git", "show", f"{sha}:docs.json"],
capture_output=True,
text=True,
check=True
)
return json.loads(result.stdout)
except:
return None
base_sha = "${{ github.event.pull_request.base.sha }}"
head_sha = "${{ github.event.pull_request.head.sha }}"
base_docs = get_docs_structure(base_sha)
head_docs = get_docs_structure(head_sha)
changes = {
"structure_changed": base_docs != head_docs if base_docs and head_docs else False,
"navigation_modified": False,
"languages_affected": []
}
if base_docs and head_docs:
# Check navigation changes
base_nav = base_docs.get("navigation", {})
head_nav = head_docs.get("navigation", {})
if base_nav != head_nav:
changes["navigation_modified"] = True
# Identify affected languages
for lang_data in head_nav.get("languages", []):
if lang_data.get("language") == "en":
changes["languages_affected"] = ["zh-Hans", "jp"]
break
with open("/tmp/structure_changes.json", "w") as f:
json.dump(changes, f, indent=2)
EOF
else
echo "No docs.json changes"
echo "false" > /tmp/docs_json_changed.txt
fi
- name: Validate file paths
if: steps.analyze.outputs.has_changes == 'true'
run: |
# Security: Validate all file paths
while IFS= read -r file; do
# Check for directory traversal attempts
if echo "$file" | grep -q '\.\./'; then
echo "Error: Invalid file path detected: $file"
exit 1
fi
# Check file extension
if ! echo "$file" | grep -qE '\.(md|mdx|json)$'; then
echo "Error: Invalid file type: $file"
exit 1
fi
# Check path starts with allowed directories
if ! echo "$file" | grep -qE '^(en/|docs\.json$)'; then
echo "Error: File outside allowed directories: $file"
exit 1
fi
done < /tmp/changed_files.txt
echo "All file paths validated"
- name: Create analysis summary
if: steps.analyze.outputs.has_changes == 'true'
run: |
# Create a comprehensive analysis summary
python3 - <<'EOF'
import json
import os
# Load analysis data
with open("/tmp/analysis.json") as f:
analysis = json.load(f)
# Load file analysis
files_to_sync = []
with open("/tmp/file_analysis.txt") as f:
for line in f:
if line.strip():
file_path, size = line.strip().split("|")
files_to_sync.append({
"path": file_path,
"size": int(size),
"type": "mdx" if file_path.endswith(".mdx") else "md" if file_path.endswith(".md") else "json"
})
# Load structure changes if exists
structure_changes = {}
if os.path.exists("/tmp/structure_changes.json"):
with open("/tmp/structure_changes.json") as f:
structure_changes = json.load(f)
# Create sync plan
sync_plan = {
"metadata": analysis,
"files_to_sync": files_to_sync,
"structure_changes": structure_changes,
"target_languages": ["zh-hans", "ja-jp"],
"sync_required": len(files_to_sync) > 0 or structure_changes.get("structure_changed", False)
}
# Save sync plan
with open("/tmp/sync_plan.json", "w") as f:
json.dump(sync_plan, f, indent=2)
print(f"Sync plan created: {len(files_to_sync)} files to sync")
if structure_changes.get("structure_changed"):
print("Documentation structure changes detected")
EOF
- name: Upload analysis artifacts
if: steps.analyze.outputs.has_changes == 'true'
uses: actions/upload-artifact@v4
with:
name: docs-sync-analysis-${{ github.event.pull_request.number }}
path: |
/tmp/analysis.json
/tmp/changed_files.txt
/tmp/file_analysis.txt
/tmp/sync_plan.json
/tmp/docs_json_changed.txt
/tmp/structure_changes.json
retention-days: 1
- name: Comment on PR with analysis
if: steps.analyze.outputs.has_changes == 'true'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const syncPlan = JSON.parse(fs.readFileSync('/tmp/sync_plan.json', 'utf8'));
const fileCount = syncPlan.files_to_sync.length;
const structureChanged = syncPlan.structure_changes.structure_changed || false;
let comment = '## 📋 Documentation Sync Analysis\n\n';
comment += `Found **${fileCount}** documentation file(s) that need synchronization.\n\n`;
if (fileCount > 0) {
comment += '### Files to Sync:\n';
syncPlan.files_to_sync.forEach(file => {
const sizeKB = (file.size / 1024).toFixed(2);
comment += `- \`${file.path}\` (${sizeKB} KB)\n`;
});
comment += '\n';
}
if (structureChanged) {
comment += '### Structure Changes:\n';
comment += '- Documentation navigation structure will be updated\n';
comment += '- Target languages: Chinese (zh-hans), Japanese (ja-jp)\n\n';
}
comment += '### Next Steps:\n';
comment += '1. A maintainer will review and approve the synchronization\n';
comment += '2. Once approved, translations will be generated automatically\n';
comment += '3. Synchronized files will be added to a new branch for review\n\n';
comment += '_This analysis was performed automatically. No code from your PR was executed._';
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: comment
});

434
.github/workflows/sync_docs_execute.yml vendored Normal file
View File

@@ -0,0 +1,434 @@
name: Execute Documentation Sync
on:
workflow_run:
workflows: ["Analyze Documentation Changes"]
types:
- completed
permissions:
contents: write
pull-requests: write
actions: read
jobs:
execute-sync:
runs-on: ubuntu-latest
if: github.event.workflow_run.conclusion == 'success'
steps:
- name: Check workflow source
id: check-source
run: |
echo "Checking workflow source..."
echo "Event: ${{ github.event.workflow_run.event }}"
echo "Repository: ${{ github.event.workflow_run.repository.full_name }}"
echo "Head Repository: ${{ github.event.workflow_run.head_repository.full_name }}"
echo "Head Branch: ${{ github.event.workflow_run.head_branch }}"
# Security check: Only process PRs from the same repository or trusted forks
if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
echo "Not a pull request event, skipping"
echo "should_process=false" >> $GITHUB_OUTPUT
exit 0
fi
# Check if this is from a fork
IS_FORK="false"
if [[ "${{ github.event.workflow_run.repository.full_name }}" != "${{ github.event.workflow_run.head_repository.full_name }}" ]]; then
IS_FORK="true"
fi
echo "is_fork=$IS_FORK" >> $GITHUB_OUTPUT
echo "should_process=true" >> $GITHUB_OUTPUT
- name: Download analysis artifacts
if: steps.check-source.outputs.should_process == 'true'
uses: actions/github-script@v7
id: download-artifacts
with:
script: |
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: ${{ github.event.workflow_run.id }}
});
const matchArtifact = artifacts.data.artifacts.find(artifact => {
return artifact.name.startsWith('docs-sync-analysis-');
});
if (!matchArtifact) {
console.log('No analysis artifacts found');
return false;
}
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip'
});
const fs = require('fs');
fs.writeFileSync('/tmp/artifacts.zip', Buffer.from(download.data));
// Extract PR number from artifact name
const prNumber = matchArtifact.name.split('-').pop();
core.setOutput('pr_number', prNumber);
core.setOutput('artifact_found', 'true');
return true;
- name: Extract and validate artifacts
if: steps.download-artifacts.outputs.artifact_found == 'true'
id: extract-artifacts
run: |
echo "Extracting artifacts..."
# Create secure temporary directory
WORK_DIR=$(mktemp -d /tmp/sync-XXXXXX)
echo "work_dir=$WORK_DIR" >> $GITHUB_OUTPUT
# Extract to temporary directory
cd "$WORK_DIR"
unzip /tmp/artifacts.zip
# Validate extracted files
REQUIRED_FILES="analysis.json sync_plan.json changed_files.txt"
for file in $REQUIRED_FILES; do
if [ ! -f "$file" ]; then
echo "Error: Required file $file not found"
exit 1
fi
done
# Validate JSON structure
python3 -c "
import json
import sys
try:
with open('analysis.json') as f:
analysis = json.load(f)
with open('sync_plan.json') as f:
sync_plan = json.load(f)
# Validate required fields
assert 'pr_number' in analysis
assert 'files_to_sync' in sync_plan
assert 'target_languages' in sync_plan
print('Artifacts validated successfully')
except Exception as e:
print(f'Validation error: {e}')
sys.exit(1)
"
# Extract PR number and other metadata
PR_NUMBER=$(python3 -c "import json; print(json.load(open('analysis.json'))['pr_number'])")
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
# Check if sync is required
SYNC_REQUIRED=$(python3 -c "import json; print(str(json.load(open('sync_plan.json'))['sync_required']).lower())")
echo "sync_required=$SYNC_REQUIRED" >> $GITHUB_OUTPUT
- name: Checkout base repository
if: steps.extract-artifacts.outputs.sync_required == 'true'
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Set up Python
if: steps.extract-artifacts.outputs.sync_required == 'true'
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install dependencies
if: steps.extract-artifacts.outputs.sync_required == 'true'
run: |
cd tools/translate
pip install httpx aiofiles python-dotenv
- name: Check for manual approval requirement
if: steps.extract-artifacts.outputs.sync_required == 'true' && steps.check-source.outputs.is_fork == 'true'
id: check-approval
uses: actions/github-script@v7
with:
script: |
const prNumber = ${{ steps.extract-artifacts.outputs.pr_number }};
// Get PR details
const pr = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
const author = pr.data.user.login;
const authorAssociation = pr.data.author_association;
// Check if author is trusted
const trustedAssociations = ['OWNER', 'MEMBER', 'COLLABORATOR'];
const trustedContributors = process.env.TRUSTED_CONTRIBUTORS?.split(',') || [];
const isTrusted = trustedAssociations.includes(authorAssociation) ||
trustedContributors.includes(author);
if (!isTrusted) {
// Check for approval from maintainer
const reviews = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
const hasApproval = reviews.data.some(review =>
review.state === 'APPROVED' &&
trustedAssociations.includes(review.author_association)
);
if (!hasApproval) {
console.log('PR requires manual approval from a maintainer');
core.setOutput('needs_approval', 'true');
// Comment on PR
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: '⏸️ **Documentation sync is pending approval**\n\n' +
'This PR requires approval from a maintainer before automatic synchronization can proceed.\n\n' +
'Once approved, the documentation will be automatically translated and synchronized.'
});
return;
}
}
core.setOutput('needs_approval', 'false');
- name: Execute safe synchronization
if: steps.extract-artifacts.outputs.sync_required == 'true' && steps.check-approval.outputs.needs_approval != 'true'
id: sync
env:
DIFY_API_KEY: ${{ secrets.DIFY_API_KEY }}
run: |
echo "Executing documentation synchronization..."
WORK_DIR="${{ steps.extract-artifacts.outputs.work_dir }}"
PR_NUMBER="${{ steps.extract-artifacts.outputs.pr_number }}"
# Create a new branch for the sync results
SYNC_BRANCH="docs-sync-pr-${PR_NUMBER}"
git checkout -b "$SYNC_BRANCH"
# Run synchronization with security constraints
cd tools/translate
# Create a secure sync script
cat > secure_sync.py <<'EOF'
import json
import sys
import os
import asyncio
from pathlib import Path
# Add parent directory to path
sys.path.append(os.path.dirname(__file__))
from sync_and_translate import DocsSynchronizer
async def secure_sync():
work_dir = sys.argv[1]
# Load sync plan
with open(f"{work_dir}/sync_plan.json") as f:
sync_plan = json.load(f)
# Security: Only sync files from the approved list
files_to_sync = sync_plan.get("files_to_sync", [])
# Validate file paths again
for file_info in files_to_sync:
file_path = file_info["path"]
# Security checks
if ".." in file_path or file_path.startswith("/"):
print(f"Security error: Invalid path {file_path}")
return False
if not file_path.startswith("en/"):
print(f"Security error: File outside en/ directory: {file_path}")
return False
# Initialize synchronizer
api_key = os.environ.get("DIFY_API_KEY")
if not api_key:
print("Error: DIFY_API_KEY not set")
return False
synchronizer = DocsSynchronizer(api_key)
# Perform limited sync
results = {
"translated": [],
"failed": [],
"skipped": []
}
for file_info in files_to_sync[:10]: # Limit to 10 files
file_path = file_info["path"]
print(f"Processing: {file_path}")
try:
# Only translate if file exists and is safe
if os.path.exists(f"../../{file_path}"):
for target_lang in ["zh-hans", "ja-jp"]:
target_path = file_path.replace("en/", f"{target_lang}/")
success = await synchronizer.translate_file_with_notice(
file_path,
target_path,
target_lang
)
if success:
results["translated"].append(target_path)
else:
results["failed"].append(target_path)
else:
results["skipped"].append(file_path)
except Exception as e:
print(f"Error processing {file_path}: {e}")
results["failed"].append(file_path)
# Handle docs.json structure sync if needed
if sync_plan.get("structure_changes", {}).get("structure_changed"):
print("Syncing docs.json structure...")
try:
sync_log = synchronizer.sync_docs_json_structure()
print("\n".join(sync_log))
except Exception as e:
print(f"Error syncing structure: {e}")
# Save results
with open("/tmp/sync_results.json", "w") as f:
json.dump(results, f, indent=2)
return len(results["failed"]) == 0
if __name__ == "__main__":
success = asyncio.run(secure_sync())
sys.exit(0 if success else 1)
EOF
# Run the secure sync
python secure_sync.py "$WORK_DIR"
SYNC_EXIT_CODE=$?
echo "sync_exit_code=$SYNC_EXIT_CODE" >> $GITHUB_OUTPUT
# Check for changes
if [[ -n $(git status --porcelain) ]]; then
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "has_changes=false" >> $GITHUB_OUTPUT
fi
- name: Commit sync results
if: steps.sync.outputs.has_changes == 'true'
id: commit
run: |
PR_NUMBER="${{ steps.extract-artifacts.outputs.pr_number }}"
SYNC_BRANCH="docs-sync-pr-${PR_NUMBER}"
git config user.name 'github-actions[bot]'
git config user.email 'github-actions[bot]@users.noreply.github.com'
git add .
git commit -m "docs: sync translations for PR #${PR_NUMBER}
Auto-generated translations for documentation changes.
Review these changes carefully before merging.
🤖 Generated with GitHub Actions"
# Push the branch
git push origin "$SYNC_BRANCH" --force
echo "branch_name=$SYNC_BRANCH" >> $GITHUB_OUTPUT
- name: Comment on PR with results
if: steps.extract-artifacts.outputs.sync_required == 'true' && steps.check-approval.outputs.needs_approval != 'true'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const prNumber = ${{ steps.extract-artifacts.outputs.pr_number }};
const hasChanges = '${{ steps.sync.outputs.has_changes }}' === 'true';
const branchName = '${{ steps.commit.outputs.branch_name }}';
let comment = '## ✅ Documentation Synchronization Complete\n\n';
if (hasChanges) {
// Load sync results if available
let results = { translated: [], failed: [], skipped: [] };
try {
results = JSON.parse(fs.readFileSync('/tmp/sync_results.json', 'utf8'));
} catch (e) {
console.log('Could not load sync results');
}
comment += `Translations have been generated and pushed to branch: \`${branchName}\`\n\n`;
if (results.translated.length > 0) {
comment += `### ✅ Successfully Translated (${results.translated.length}):\n`;
results.translated.slice(0, 10).forEach(file => {
comment += `- \`${file}\`\n`;
});
if (results.translated.length > 10) {
comment += `- ... and ${results.translated.length - 10} more\n`;
}
comment += '\n';
}
if (results.failed.length > 0) {
comment += `### ⚠️ Failed Translations (${results.failed.length}):\n`;
results.failed.forEach(file => {
comment += `- \`${file}\`\n`;
});
comment += '\n';
}
comment += '### Next Steps:\n';
comment += '1. Review the generated translations in the sync branch\n';
comment += '2. Make any necessary adjustments\n';
comment += '3. Merge the sync branch into your PR branch if satisfied\n\n';
comment += `[View changes](https://github.com/${{ github.repository }}/compare/${{ github.event.workflow_run.head_branch }}...${branchName})`;
} else {
comment += 'No changes were needed. All documentation is already in sync.';
}
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: comment
});
handle-failure:
runs-on: ubuntu-latest
if: github.event.workflow_run.conclusion == 'failure'
steps:
- name: Report analysis failure
uses: actions/github-script@v7
with:
script: |
// Try to extract PR number from workflow run
const workflowRun = context.payload.workflow_run;
console.log('Analysis workflow failed');
console.log('Attempting to notify PR if possible...');
// This is a best-effort attempt to notify
// In practice, you might want to store PR number differently