remove bundle workflow

This commit is contained in:
Gu
2025-10-28 20:25:00 -07:00
parent de587fb570
commit 9639e9727c
5 changed files with 1473 additions and 90 deletions

125
.github/workflow-config.yml vendored Normal file
View File

@@ -0,0 +1,125 @@
# GitHub Actions Workflow Configuration
# Configuration for documentation synchronization workflows
# Security settings
security:
# Require manual approval for external PRs
require_approval_for_forks: true
# Maximum files allowed per PR
max_files_per_pr: 50
# Maximum file size in MB
max_file_size_mb: 10
# Allowed file extensions
allowed_extensions:
- .md
- .mdx
- .json
# Trusted contributors (GitHub usernames)
trusted_contributors:
- guchenhe@gmail.com
# Add more trusted contributors here
# Rate limiting
rate_limits:
# Maximum sync operations per hour per PR author
max_syncs_per_hour: 5
# Maximum API calls per sync operation
max_api_calls_per_sync: 100
# Translation settings
translation:
# Target languages
target_languages:
- zh-hans
- ja-jp
# Maximum files to translate in a single operation
max_files_per_batch: 10
# Timeout for translation operations (seconds)
translation_timeout: 300
# Branch settings
branches:
# Branches that trigger automatic sync
auto_sync_branches:
- main
- revamp
# Branch protection for external PRs
require_branch_protection: true
# Prefix for sync branches
sync_branch_prefix: "docs-sync-pr-"
# Notification settings
notifications:
# Comment on PRs with sync status
comment_on_pr: true
# Include translation preview links
include_preview_links: true
# Notify on sync failures
notify_on_failure: true
# Artifact settings
artifacts:
# Retention period for analysis artifacts (days)
retention_days: 1
# Maximum artifact size (MB)
max_artifact_size_mb: 50
# Approval workflow
approval:
# Required approver associations for external PRs
required_approver_associations:
- OWNER
- MEMBER
- COLLABORATOR
# Require review from code owners
require_code_owner_review: false
# Auto-approve for trusted contributors
auto_approve_trusted: true
# Dry run mode (for testing)
dry_run:
# Enable dry run mode (no actual changes made)
enabled: false
# Show what would be changed
show_diff: true
# Monitoring and logging
monitoring:
# Log all operations
enable_logging: true
# Include security events in logs
log_security_events: true
# Monitor API usage
monitor_api_usage: true
# Emergency settings
emergency:
# Disable all workflows
disable_workflows: false
# Disable external PR processing only
disable_external_prs: false
# Emergency contact (GitHub username)
emergency_contact: "guchenhe@gmail.com"
# Version info
version: "1.0.0"
updated: "2024-08-22"

View File

@@ -1,90 +0,0 @@
name: Process Documentation
on:
pull_request:
types: [opened, synchronize]
branches: [main]
workflow_dispatch:
jobs:
process-docs:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pyyaml
- name: Run documentation tools
id: doc-tools
run: python tools/main_docs_bundle.py
- name: Display results
run: |
echo "Execution results:"
echo "Successful operations: ${{ steps.doc-tools.outputs.success_count }}"
echo "Failed operations: ${{ steps.doc-tools.outputs.error_count }}"
if [ "${{ steps.doc-tools.outputs.detailed_message }}" != "" ]; then
echo "Details:"
echo "${{ steps.doc-tools.outputs.detailed_message }}"
fi
- name: Commit and Push changes (if applicable)
run: |
# Push only if the PR is coming from a branch within the same repository
# where the workflow is running (i.e., not a PR from an external fork targeting this repo).
# This covers:
# 1. PRs within the main repository.
# 2. PRs within a fork of the main repository (when this workflow runs in that fork).
if [[ "${{ github.event.pull_request.head.repo.full_name }}" == "${{ github.repository }}" ]]; then
if [[ -n "$(git status --porcelain)" ]]; then
FINAL_COMMIT_SUBJECT=""
if [[ -n "${{ steps.doc-tools.outputs.commit_message }}" ]]; then
FINAL_COMMIT_SUBJECT="${{ steps.doc-tools.outputs.commit_message }}"
else
FINAL_COMMIT_SUBJECT="Docs: Apply automated formatting by CI"
fi
FINAL_COMMIT_BODY=""
if [[ -n "${{ steps.doc-tools.outputs.detailed_message }}" ]]; then
FINAL_COMMIT_BODY="${{ steps.doc-tools.outputs.detailed_message }}"
else
FINAL_COMMIT_BODY="Automated changes by CI. This commit was made to the PR branch."
fi
git config --local user.email "88554920+alterxyz@users.noreply.github.com"
git config --local user.name "alterxyz"
git add .
git commit -m "$FINAL_COMMIT_SUBJECT" -m "$FINAL_COMMIT_BODY"
# Push to the PR's head ref (the source branch of the PR)
git push origin HEAD:${{ github.head_ref }}
echo "Formatting changes automatically committed and pushed to PR branch: ${{ github.head_ref }}"
echo "Commit Subject: $FINAL_COMMIT_SUBJECT"
echo "Description:"
echo "$FINAL_COMMIT_BODY"
else
echo "No file changes detected by script in this PR (source repo: ${{ github.event.pull_request.head.repo.full_name }}, target repo: ${{ github.repository }}). Nothing to commit or push."
fi
else
# This case covers PRs from truly external forks targeting this repository.
echo "PR is from an external fork ('${{ github.event.pull_request.head.repo.full_name }}' to '${{ github.repository }}'). Formatting changes will not be pushed to the PR branch."
if [[ -n "$(git status --porcelain)" ]]; then
echo "Note: The script identified formatting changes. These will be handled by other processes post-merge if necessary."
else
echo "No formatting changes were identified by the script in this externally forked PR."
fi
fi

291
.github/workflows/sync_docs_analyze.yml vendored Normal file
View File

@@ -0,0 +1,291 @@
name: Analyze Documentation Changes
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- 'docs.json'
- 'en/**/*.md'
- 'en/**/*.mdx'
- 'cn/**/*.md'
- 'cn/**/*.mdx'
- 'jp/**/*.md'
- 'jp/**/*.mdx'
- 'versions/**/*.md'
- 'versions/**/*.mdx'
permissions:
contents: read
pull-requests: read
jobs:
analyze:
runs-on: ubuntu-latest
steps:
- name: Checkout PR
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Categorize and validate PR changes
id: categorize
run: |
echo "Categorizing PR changes..."
# Get base and head commits
BASE_SHA="${{ github.event.pull_request.base.sha }}"
HEAD_SHA="${{ github.event.pull_request.head.sha }}"
echo "Base SHA: $BASE_SHA"
echo "Head SHA: $HEAD_SHA"
# Run PR analyzer
cd tools/translate
python pr_analyzer.py "$BASE_SHA" "$HEAD_SHA" > /tmp/pr_analysis_output.txt 2>&1
# Parse analyzer output
if [ $? -eq 0 ]; then
# Successful analysis
source /tmp/pr_analysis_output.txt
echo "PR categorization successful"
echo "PR Type: $pr_type"
echo "Should Skip: $should_skip"
# Set GitHub outputs
echo "pr_type=$pr_type" >> $GITHUB_OUTPUT
echo "should_skip=$should_skip" >> $GITHUB_OUTPUT
if [ "$should_skip" = "true" ]; then
if [ "$pr_type" = "translation" ]; then
echo "✅ Translation-only PR detected. Skipping automation (direct review process)."
elif [ "$pr_type" = "none" ]; then
echo "✅ No relevant documentation changes detected. Skipping workflow."
fi
exit 0
fi
else
# Analysis failed - likely mixed PR
echo "PR categorization failed - likely mixed content PR"
ERROR_MESSAGE=$(cat /tmp/pr_analysis_output.txt | grep "error_message=" | cut -d'=' -f2- || echo "Mixed content PR detected")
echo "error=mixed_pr" >> $GITHUB_OUTPUT
echo "error_message<<EOF" >> $GITHUB_OUTPUT
echo "$ERROR_MESSAGE" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
exit 1
fi
- name: Analyze English changes for translation
if: steps.categorize.outputs.pr_type == 'english'
id: analyze
run: |
echo "Analyzing English changes for automatic translation..."
BASE_SHA="${{ github.event.pull_request.base.sha }}"
HEAD_SHA="${{ github.event.pull_request.head.sha }}"
# Get all changed files (not just English ones for file analysis)
CHANGED_FILES=$(git diff --name-only $BASE_SHA $HEAD_SHA)
# Count changes for security limits
FILE_COUNT=$(echo "$CHANGED_FILES" | wc -l)
echo "Changed files count: $FILE_COUNT"
# Security check: Limit number of files
MAX_FILES=50
if [ "$FILE_COUNT" -gt "$MAX_FILES" ]; then
echo "Error: Too many files changed ($FILE_COUNT > $MAX_FILES)"
echo "error=too_many_files" >> $GITHUB_OUTPUT
exit 1
fi
# Create analysis report
cat > /tmp/analysis.json <<EOF
{
"pr_number": ${{ github.event.pull_request.number }},
"pr_title": "${{ github.event.pull_request.title }}",
"pr_author": "${{ github.event.pull_request.user.login }}",
"base_sha": "$BASE_SHA",
"head_sha": "$HEAD_SHA",
"file_count": $FILE_COUNT,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)",
"repository": "${{ github.repository }}",
"ref": "${{ github.ref }}",
"pr_type": "english"
}
EOF
# Save changed files list
echo "$CHANGED_FILES" > /tmp/changed_files.txt
# Analyze file types and sizes for English files that need translation
> /tmp/file_analysis.txt
while IFS= read -r file; do
if [[ "$file" =~ ^en/.*\.(md|mdx)$ ]] && [ -f "$file" ]; then
SIZE=$(stat -f%z "$file" 2>/dev/null || stat -c%s "$file" 2>/dev/null || echo "0")
echo "$file|$SIZE" >> /tmp/file_analysis.txt
# Security check: File size limit (10MB)
MAX_SIZE=$((10 * 1024 * 1024))
if [ "$SIZE" -gt "$MAX_SIZE" ]; then
echo "Error: File $file exceeds size limit ($SIZE > $MAX_SIZE)"
echo "error=file_too_large" >> $GITHUB_OUTPUT
exit 1
fi
fi
done <<< "$CHANGED_FILES"
# Check for docs.json changes
if echo "$CHANGED_FILES" | grep -q '^docs\.json$'; then
echo "true" > /tmp/docs_json_changed.txt
# Use PR analyzer's docs.json analysis
cd tools/translate
python3 - <<EOF
import sys
sys.path.append('.')
from pr_analyzer import PRAnalyzer
analyzer = PRAnalyzer("$BASE_SHA", "$HEAD_SHA")
docs_changes = analyzer.analyze_docs_json_changes()
structure_changes = {
"structure_changed": docs_changes["any_docs_json_changes"],
"navigation_modified": docs_changes["english_section"],
"languages_affected": ["cn", "jp"] if docs_changes["english_section"] else []
}
import json
with open("/tmp/structure_changes.json", "w") as f:
json.dump(structure_changes, f, indent=2)
EOF
else
echo "false" > /tmp/docs_json_changed.txt
echo '{"structure_changed": false, "navigation_modified": false, "languages_affected": []}' > /tmp/structure_changes.json
fi
echo "has_changes=true" >> $GITHUB_OUTPUT
echo "Analysis complete"
- name: Validate file paths
if: steps.analyze.outputs.has_changes == 'true'
run: |
echo "Validating English file paths for translation..."
# Security: Validate English files that will be translated
while IFS='|' read -r file size; do
if [ -n "$file" ]; then
# Check for directory traversal attempts
if echo "$file" | grep -q '\.\./'; then
echo "Error: Invalid file path detected: $file"
exit 1
fi
# Check file extension for English files
if ! echo "$file" | grep -qE '\.(md|mdx)$'; then
echo "Error: Invalid file type for translation: $file"
exit 1
fi
# Check path starts with en/ (only English files need translation)
if ! echo "$file" | grep -qE '^en/'; then
echo "Error: Non-English file in translation list: $file"
exit 1
fi
fi
done < /tmp/file_analysis.txt
echo "All English file paths validated for translation"
- name: Create analysis summary
if: steps.analyze.outputs.has_changes == 'true'
run: |
echo "Creating analysis summary for English changes..."
# Create a comprehensive analysis summary
python3 - <<'EOF'
import json
import os
# Load analysis data
with open("/tmp/analysis.json") as f:
analysis = json.load(f)
# Load file analysis (English files to translate)
files_to_sync = []
with open("/tmp/file_analysis.txt") as f:
for line in f:
if line.strip():
file_path, size = line.strip().split("|")
files_to_sync.append({
"path": file_path,
"size": int(size),
"type": "mdx" if file_path.endswith(".mdx") else "md"
})
# Note: docs.json is NOT added to files_to_sync
# It's handled separately via structure_changes and sync_docs_json_structure()
# because it needs structure syncing, not translation
# Load structure changes
with open("/tmp/structure_changes.json") as f:
structure_changes = json.load(f)
# Create sync plan
sync_plan = {
"metadata": analysis,
"files_to_sync": files_to_sync,
"structure_changes": structure_changes,
"target_languages": ["cn", "jp"],
"sync_required": len(files_to_sync) > 0 or structure_changes.get("structure_changed", False)
}
# Save sync plan
with open("/tmp/sync_plan.json", "w") as f:
json.dump(sync_plan, f, indent=2)
print(f"English sync plan created: {len(files_to_sync)} files to translate")
if structure_changes.get("structure_changed"):
print("Documentation structure changes detected")
EOF
- name: Upload analysis artifacts
if: steps.analyze.outputs.has_changes == 'true'
uses: actions/upload-artifact@v4
with:
name: docs-sync-analysis-${{ github.event.pull_request.number }}
path: |
/tmp/analysis.json
/tmp/changed_files.txt
/tmp/file_analysis.txt
/tmp/sync_plan.json
/tmp/docs_json_changed.txt
/tmp/structure_changes.json
retention-days: 1
- name: Report mixed PR error
if: failure() && steps.categorize.outputs.error == 'mixed_pr'
uses: actions/github-script@v7
continue-on-error: true
with:
script: |
const errorMessage = `${{ steps.categorize.outputs.error_message }}`;
try {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: errorMessage
});
console.log('Posted mixed PR error message to PR');
} catch (error) {
console.log('Could not comment on PR:', error.message);
console.log('Error message would have been:');
console.log(errorMessage);
}

644
.github/workflows/sync_docs_execute.yml vendored Normal file
View File

@@ -0,0 +1,644 @@
name: Execute Documentation Sync
on:
workflow_run:
workflows: ["Analyze Documentation Changes"]
types:
- completed
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to process'
required: true
type: string
permissions:
contents: write
pull-requests: write
actions: read
jobs:
execute-sync:
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success'
steps:
- name: Check workflow source
id: check-source
run: |
echo "Checking workflow source..."
echo "Event: ${{ github.event.workflow_run.event }}"
echo "Repository: ${{ github.event.workflow_run.repository.full_name }}"
echo "Head Repository: ${{ github.event.workflow_run.head_repository.full_name }}"
echo "Head Branch: ${{ github.event.workflow_run.head_branch }}"
# Security check: Only process PRs from the same repository or trusted forks
if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
echo "Not a pull request event, skipping"
echo "should_process=false" >> $GITHUB_OUTPUT
exit 0
fi
# Check if this is from a fork
IS_FORK="false"
if [[ "${{ github.event.workflow_run.repository.full_name }}" != "${{ github.event.workflow_run.head_repository.full_name }}" ]]; then
IS_FORK="true"
fi
echo "is_fork=$IS_FORK" >> $GITHUB_OUTPUT
echo "should_process=true" >> $GITHUB_OUTPUT
- name: Download analysis artifacts
if: steps.check-source.outputs.should_process == 'true' || github.event_name == 'workflow_dispatch'
uses: actions/github-script@v7
id: download-artifacts
with:
script: |
const fs = require('fs');
// Determine which workflow run to get artifacts from
let runId;
let prNumber;
if (context.eventName === 'workflow_dispatch') {
// Manual trigger - use the pr_number input
prNumber = '${{ github.event.inputs.pr_number }}';
console.log(`Manual trigger for PR #${prNumber}`);
// Find the most recent analyze workflow run for this specific PR
const runs = await github.rest.actions.listWorkflowRuns({
owner: context.repo.owner,
repo: context.repo.repo,
workflow_id: 'sync_docs_analyze.yml',
per_page: 100
});
// Find run that matches our specific PR number
let matchingRun = null;
for (const run of runs.data.workflow_runs) {
if (run.conclusion === 'success' && run.event === 'pull_request' && run.pull_requests.length > 0) {
const pullRequest = run.pull_requests[0];
if (pullRequest.number.toString() === prNumber) {
matchingRun = run;
break;
}
}
}
if (!matchingRun) {
console.log(`No successful analyze workflow run found for PR #${prNumber}`);
return false;
}
runId = matchingRun.id;
console.log(`Found analyze workflow run: ${runId} for PR #${prNumber}`);
} else {
// Triggered by workflow_run
runId = context.payload.workflow_run.id;
console.log(`Workflow run trigger, run ID: ${runId}`);
}
// List artifacts from the analyze workflow run
const artifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: runId
});
console.log(`Found ${artifacts.data.artifacts.length} artifacts`);
artifacts.data.artifacts.forEach(a => console.log(` - ${a.name}`));
const matchArtifact = artifacts.data.artifacts.find(artifact => {
if (context.eventName === 'workflow_dispatch') {
return artifact.name === `docs-sync-analysis-${prNumber}`;
} else {
return artifact.name.startsWith('docs-sync-analysis-');
}
});
if (!matchArtifact) {
console.log('No matching analysis artifact found');
return false;
}
console.log(`Downloading artifact: ${matchArtifact.name}`);
const download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip'
});
fs.writeFileSync('/tmp/artifacts.zip', Buffer.from(download.data));
console.log('Artifact downloaded successfully');
// Extract PR number from artifact name
if (!prNumber) {
prNumber = matchArtifact.name.split('-').pop();
}
core.setOutput('pr_number', prNumber);
core.setOutput('artifact_found', 'true');
return true;
- name: Extract and validate artifacts
if: steps.download-artifacts.outputs.artifact_found == 'true'
id: extract-artifacts
run: |
echo "Extracting artifacts..."
# Create secure temporary directory
WORK_DIR=$(mktemp -d /tmp/sync-XXXXXX)
echo "work_dir=$WORK_DIR" >> $GITHUB_OUTPUT
# Extract to temporary directory
cd "$WORK_DIR"
unzip /tmp/artifacts.zip
# Validate extracted files
REQUIRED_FILES="analysis.json sync_plan.json changed_files.txt"
for file in $REQUIRED_FILES; do
if [ ! -f "$file" ]; then
echo "Error: Required file $file not found"
exit 1
fi
done
# Validate JSON structure
python3 -c "
import json
import sys
try:
with open('analysis.json') as f:
analysis = json.load(f)
with open('sync_plan.json') as f:
sync_plan = json.load(f)
# Validate required fields
assert 'pr_number' in analysis
assert 'files_to_sync' in sync_plan
assert 'target_languages' in sync_plan
print('Artifacts validated successfully')
except Exception as e:
print(f'Validation error: {e}')
sys.exit(1)
"
# Extract PR number and other metadata
PR_NUMBER=$(python3 -c "import json; print(json.load(open('analysis.json'))['pr_number'])")
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
# Extract head SHA to checkout the PR branch (needed for new files)
HEAD_SHA=$(python3 -c "import json; print(json.load(open('analysis.json'))['head_sha'])")
echo "head_sha=$HEAD_SHA" >> $GITHUB_OUTPUT
# Check if sync is required
SYNC_REQUIRED=$(python3 -c "import json; print(str(json.load(open('sync_plan.json'))['sync_required']).lower())")
echo "sync_required=$SYNC_REQUIRED" >> $GITHUB_OUTPUT
- name: Checkout PR branch
if: steps.extract-artifacts.outputs.sync_required == 'true'
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
ref: ${{ steps.extract-artifacts.outputs.head_sha }} # Checkout PR's head commit to access new files
- name: Set up Python
if: steps.extract-artifacts.outputs.sync_required == 'true'
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install dependencies
if: steps.extract-artifacts.outputs.sync_required == 'true'
run: |
cd tools/translate
pip install httpx aiofiles python-dotenv
- name: Check for manual approval requirement
if: steps.extract-artifacts.outputs.sync_required == 'true' && steps.check-source.outputs.is_fork == 'true'
id: check-approval
uses: actions/github-script@v7
with:
script: |
const prNumber = ${{ steps.extract-artifacts.outputs.pr_number }};
// Get PR details
const pr = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
const author = pr.data.user.login;
const authorAssociation = pr.data.author_association;
// Check if author is trusted
const trustedAssociations = ['OWNER', 'MEMBER', 'COLLABORATOR'];
const trustedContributors = process.env.TRUSTED_CONTRIBUTORS?.split(',') || [];
const isTrusted = trustedAssociations.includes(authorAssociation) ||
trustedContributors.includes(author);
if (!isTrusted) {
// Check for approval from maintainer
const reviews = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
const hasApproval = reviews.data.some(review =>
review.state === 'APPROVED' &&
trustedAssociations.includes(review.author_association)
);
if (!hasApproval) {
console.log('PR requires manual approval from a maintainer');
core.setOutput('needs_approval', 'true');
// Comment on PR
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: '⏸️ **Documentation sync is pending approval**\n\n' +
'This PR requires approval from a maintainer before automatic synchronization can proceed.\n\n' +
'Once approved, the documentation will be automatically translated and synchronized.'
});
return;
}
}
core.setOutput('needs_approval', 'false');
- name: Execute safe synchronization
if: steps.extract-artifacts.outputs.sync_required == 'true' && steps.check-approval.outputs.needs_approval != 'true'
id: sync
env:
DIFY_API_KEY: ${{ secrets.DIFY_API_KEY }}
run: |
echo "Executing documentation synchronization..."
WORK_DIR="${{ steps.extract-artifacts.outputs.work_dir }}"
PR_NUMBER="${{ steps.extract-artifacts.outputs.pr_number }}"
# Create a new branch for the sync results
SYNC_BRANCH="docs-sync-pr-${PR_NUMBER}"
git checkout -b "$SYNC_BRANCH"
# Reset branch to main to avoid including English file changes from PR
# Use --soft to keep working directory with PR files (needed for translation)
git reset --soft origin/main
# Unstage everything
git reset
# Run synchronization with security constraints
cd tools/translate
# Create a secure sync script
cat > secure_sync.py <<'EOF'
import json
import sys
import os
import asyncio
from pathlib import Path
# Add parent directory to path
sys.path.append(os.path.dirname(__file__))
from sync_and_translate import DocsSynchronizer
async def secure_sync():
work_dir = sys.argv[1]
# Load sync plan
with open(f"{work_dir}/sync_plan.json") as f:
sync_plan = json.load(f)
# Security: Only sync files from the approved list
files_to_sync = sync_plan.get("files_to_sync", [])
# Validate file paths again
for file_info in files_to_sync:
file_path = file_info["path"]
# Security checks
if ".." in file_path or file_path.startswith("/"):
print(f"Security error: Invalid path {file_path}")
return False
# Allow en/ files and docs.json
if not (file_path.startswith("en/") or file_path == "docs.json"):
print(f"Security error: File outside en/ directory: {file_path}")
return False
# Initialize synchronizer
api_key = os.environ.get("DIFY_API_KEY")
if not api_key:
print("Error: DIFY_API_KEY not set")
return False
synchronizer = DocsSynchronizer(api_key)
# Perform limited sync
results = {
"translated": [],
"failed": [],
"skipped": []
}
for file_info in files_to_sync[:10]: # Limit to 10 files
file_path = file_info["path"]
print(f"Processing: {file_path}")
# Skip docs.json - it's handled separately in structure sync
if file_path == "docs.json":
results["skipped"].append(f"{file_path} (structure file - handled separately)")
continue
# Skip versioned directories (frozen/archived docs)
if file_path.startswith("versions/"):
results["skipped"].append(f"{file_path} (versioned - not auto-translated)")
continue
try:
# Only translate if file exists and is safe
if os.path.exists(f"../../{file_path}"):
for target_lang in ["cn", "jp"]:
target_path = file_path.replace("en/", f"{target_lang}/")
success = await synchronizer.translate_file_with_notice(
file_path,
target_path,
target_lang
)
if success:
results["translated"].append(target_path)
else:
results["failed"].append(target_path)
else:
results["skipped"].append(file_path)
except Exception as e:
print(f"Error processing {file_path}: {e}")
results["failed"].append(file_path)
# Handle docs.json structure sync if needed
if sync_plan.get("structure_changes", {}).get("structure_changed"):
print("Syncing docs.json structure...")
try:
sync_log = synchronizer.sync_docs_json_structure()
print("\n".join(sync_log))
except Exception as e:
print(f"Error syncing structure: {e}")
# Save results
with open("/tmp/sync_results.json", "w") as f:
json.dump(results, f, indent=2)
return len(results["failed"]) == 0
if __name__ == "__main__":
success = asyncio.run(secure_sync())
sys.exit(0 if success else 1)
EOF
# Run the secure sync
python secure_sync.py "$WORK_DIR"
SYNC_EXIT_CODE=$?
echo "sync_exit_code=$SYNC_EXIT_CODE" >> $GITHUB_OUTPUT
# Check for changes
if [[ -n $(git status --porcelain) ]]; then
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "has_changes=false" >> $GITHUB_OUTPUT
fi
- name: Commit and create translation PR
if: steps.sync.outputs.has_changes == 'true'
id: create-translation-pr
env:
GH_TOKEN: ${{ github.token }}
run: |
PR_NUMBER="${{ steps.extract-artifacts.outputs.pr_number }}"
SYNC_BRANCH="docs-sync-pr-${PR_NUMBER}"
git config user.name 'github-actions[bot]'
git config user.email 'github-actions[bot]@users.noreply.github.com'
# Commit translation changes only (not English files from PR)
git add cn/ jp/ docs.json
git commit -m "🌐 Auto-translate documentation for PR #${PR_NUMBER}
Auto-generated translations for documentation changes in PR #${PR_NUMBER}.
Original PR: #${PR_NUMBER}
Languages: Chinese (cn), Japanese (jp)
🤖 Generated with GitHub Actions"
# Push the translation branch to main repo
git push origin "$SYNC_BRANCH" --force
# Get original PR details for translation PR
ORIGINAL_PR_TITLE=$(gh pr view ${PR_NUMBER} --json title --jq '.title' 2>/dev/null || echo "Documentation changes")
# Create translation PR body
cat > /tmp/translation_pr_body.md <<EOF
# 🌐 Auto-generated Translations
This PR contains automatically generated translations for the documentation changes in PR #${PR_NUMBER}.
## Original PR
**Title:** ${ORIGINAL_PR_TITLE}
**Link:** #${PR_NUMBER}
## What's included
- 🇨🇳 Chinese (cn) translations
- 🇯🇵 Japanese (jp) translations
- 📋 Updated navigation structure in docs.json
## Review Process
1. Review the generated translations for accuracy
2. Make any necessary adjustments
3. Merge this PR to apply the translations
## Links
- **Original English PR:** #${PR_NUMBER}
- **Translation branch:** \`${SYNC_BRANCH}\`
---
🤖 This PR was created automatically by the documentation translation workflow.
EOF
# Create the translation PR
TRANSLATION_PR_URL=$(gh pr create \
--base main \
--head "$SYNC_BRANCH" \
--title "🌐 Auto-translations for PR #${PR_NUMBER}: ${ORIGINAL_PR_TITLE}" \
--body-file /tmp/translation_pr_body.md 2>&1 || echo "")
if [ -z "$TRANSLATION_PR_URL" ]; then
echo "❌ Failed to create translation PR"
fi
if [ -n "$TRANSLATION_PR_URL" ]; then
# Extract PR number from URL
TRANSLATION_PR_NUMBER=$(echo "$TRANSLATION_PR_URL" | grep -o '[0-9]\+$')
echo "translation_pr_number=$TRANSLATION_PR_NUMBER" >> $GITHUB_OUTPUT
echo "translation_pr_url=$TRANSLATION_PR_URL" >> $GITHUB_OUTPUT
echo "branch_name=$SYNC_BRANCH" >> $GITHUB_OUTPUT
echo "creation_successful=true" >> $GITHUB_OUTPUT
echo "✅ Translation PR created successfully: #${TRANSLATION_PR_NUMBER}"
else
echo "❌ Failed to create translation PR"
echo "creation_successful=false" >> $GITHUB_OUTPUT
fi
- name: Comment on original PR with translation PR link
if: steps.extract-artifacts.outputs.sync_required == 'true' && steps.check-approval.outputs.needs_approval != 'true'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const prNumber = ${{ steps.extract-artifacts.outputs.pr_number }};
const hasChanges = '${{ steps.sync.outputs.has_changes }}' === 'true';
const translationPrNumber = '${{ steps.create-translation-pr.outputs.translation_pr_number }}';
const translationPrUrl = '${{ steps.create-translation-pr.outputs.translation_pr_url }}';
const creationSuccessful = '${{ steps.create-translation-pr.outputs.creation_successful }}' === 'true';
let comment = '## 🤖 Automatic Translation Status\n\n';
if (hasChanges && creationSuccessful && translationPrNumber) {
// Load sync results if available
let results = { translated: [], failed: [], skipped: [] };
try {
results = JSON.parse(fs.readFileSync('/tmp/sync_results.json', 'utf8'));
} catch (e) {
console.log('Could not load sync results');
results = { translated: [], failed: [], skipped: [] };
}
comment += `🎉 **Translation PR Created Successfully!**\n\n`;
comment += `Your English documentation changes have been automatically translated and a separate PR has been created.\n\n`;
comment += `### 🔗 Translation PR: [#${translationPrNumber}](${translationPrUrl})\n\n`;
if (results.translated && results.translated.length > 0) {
comment += `### ✅ Successfully Translated (${results.translated.length} files):\n`;
results.translated.slice(0, 8).forEach(file => {
comment += `- \`${file}\`\n`;
});
if (results.translated.length > 8) {
comment += `- ... and ${results.translated.length - 8} more files\n`;
}
comment += '\n';
}
if (results.failed && results.failed.length > 0) {
comment += `### ⚠️ Translation Issues (${results.failed.length}):\n`;
results.failed.slice(0, 5).forEach(file => {
comment += `- \`${file}\`\n`;
});
if (results.failed.length > 5) {
comment += `- ... and ${results.failed.length - 5} more\n`;
}
comment += '\n';
}
comment += '### 🔄 What Happens Next:\n';
comment += `1. **Review**: The translation PR [#${translationPrNumber}](${translationPrUrl}) is ready for review\n`;
comment += '2. **Independent**: Both PRs can be reviewed and merged independently\n';
comment += '3. **Automatic**: Future updates to this PR will automatically update the translation PR\n\n';
comment += '### 📋 Languages Included:\n';
comment += '- 🇨🇳 **Chinese (cn)**: Simplified Chinese translations\n';
comment += '- 🇯🇵 **Japanese (jp)**: Japanese translations\n';
comment += '- 📋 **Navigation**: Updated docs.json structure for both languages\n\n';
comment += '---\n';
comment += '_🤖 This is an automated translation workflow. The translation PR was created automatically and is ready for review._';
} else if (hasChanges && !creationSuccessful) {
comment += '⚠️ **Translation PR Creation Failed**\n\n';
comment += 'The automatic translation process completed, but there was an issue creating the translation PR.\n\n';
comment += '**What you can do:**\n';
comment += '1. Check the workflow logs for detailed error information\n';
comment += '2. Contact a maintainer if the issue persists\n';
comment += '3. The translations may have been generated but need manual PR creation\n\n';
comment += '_🤖 This is an automated notification from the translation workflow._';
} else {
comment += '✅ **No Translation Changes Needed**\n\n';
comment += 'Your changes did not require new translations, or all translations are already up to date.\n\n';
comment += '_🤖 This is an automated check from the translation workflow._';
}
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: comment
});
- name: Comment on translation PR with original PR link
if: steps.create-translation-pr.outputs.creation_successful == 'true' && steps.create-translation-pr.outputs.translation_pr_number
uses: actions/github-script@v7
continue-on-error: true
with:
script: |
const prNumber = ${{ steps.extract-artifacts.outputs.pr_number }};
const translationPrNumber = ${{ steps.create-translation-pr.outputs.translation_pr_number }};
const backLinkComment = [
'## 🔗 Linked to Original PR',
'',
`This translation PR was automatically created for the English documentation changes in **PR #${prNumber}**.`,
'',
'### 📝 Original Changes',
`- **Original PR**: #${prNumber}`,
'- **Type**: English documentation updates',
'- **Auto-translation**: This PR contains the corresponding translations',
'',
'### 🔄 Synchronization',
'- **Automatic Updates**: This PR will be automatically updated if the original PR changes',
'- **Independent Review**: This translation PR can be reviewed and merged independently',
'- **Quality Check**: Please review translations for accuracy and cultural appropriateness',
'',
'---',
`🤖 _This PR is part of the automated translation workflow. Any updates to PR #${prNumber} will automatically update this translation PR._`
].join('\n\n');
try {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: translationPrNumber,
body: backLinkComment
});
console.log(`Successfully linked translation PR #${translationPrNumber} to original PR #${prNumber}`);
} catch (error) {
console.log(`Could not comment on translation PR #${translationPrNumber}:`, error.message);
}
handle-failure:
runs-on: ubuntu-latest
if: github.event.workflow_run.conclusion == 'failure'
steps:
- name: Report analysis failure
uses: actions/github-script@v7
with:
script: |
// Try to extract PR number from workflow run
const workflowRun = context.payload.workflow_run;
console.log('Analysis workflow failed');
console.log('Attempting to notify PR if possible...');
// This is a best-effort attempt to notify
// In practice, you might want to store PR number differently

413
.github/workflows/sync_docs_update.yml vendored Normal file
View File

@@ -0,0 +1,413 @@
name: Update Translation PR
on:
pull_request:
types: [synchronize]
paths:
- 'docs.json'
- 'en/**/*.md'
- 'en/**/*.mdx'
permissions:
contents: write
pull-requests: write
actions: read
jobs:
update-translation:
runs-on: ubuntu-latest
# Only run if this is an English-only PR update, not a translation PR
if: |
github.event.pull_request.draft == false &&
!startsWith(github.event.pull_request.head.ref, 'docs-sync-pr-')
steps:
- name: Checkout PR
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Check if PR is English-only
id: check-pr-type
run: |
echo "Checking if this PR contains only English changes..."
BASE_SHA="${{ github.event.pull_request.base.sha }}"
HEAD_SHA="${{ github.event.pull_request.head.sha }}"
# Use PR analyzer to check PR type
cd tools/translate
python pr_analyzer.py "$BASE_SHA" "$HEAD_SHA" > /tmp/pr_analysis_output.txt 2>&1
if [ $? -eq 0 ]; then
# Parse analyzer output
source /tmp/pr_analysis_output.txt
echo "PR Type: $pr_type"
echo "pr_type=$pr_type" >> $GITHUB_OUTPUT
if [ "$pr_type" = "english" ]; then
echo "✅ English-only PR detected - proceeding with translation update"
echo "should_update=true" >> $GITHUB_OUTPUT
else
echo " Not an English-only PR (type: $pr_type) - skipping translation update"
echo "should_update=false" >> $GITHUB_OUTPUT
fi
else
echo "❌ PR analysis failed - likely mixed content, skipping translation update"
echo "should_update=false" >> $GITHUB_OUTPUT
echo "pr_type=unknown" >> $GITHUB_OUTPUT
fi
- name: Find associated translation PR
if: steps.check-pr-type.outputs.should_update == 'true'
id: find-translation-pr
run: |
PR_NUMBER=${{ github.event.pull_request.number }}
echo "Looking for translation PR associated with PR #${PR_NUMBER}..."
# Search for translation PR by branch name pattern
TRANSLATION_PR_DATA=$(gh pr list \
--search "head:docs-sync-pr-${PR_NUMBER}" \
--json number,title,url,state \
--jq '.[0] // empty' 2>/dev/null || echo "")
if [ -n "$TRANSLATION_PR_DATA" ] && [ "$TRANSLATION_PR_DATA" != "null" ]; then
TRANSLATION_PR_NUMBER=$(echo "$TRANSLATION_PR_DATA" | jq -r '.number')
TRANSLATION_PR_STATE=$(echo "$TRANSLATION_PR_DATA" | jq -r '.state')
TRANSLATION_PR_URL=$(echo "$TRANSLATION_PR_DATA" | jq -r '.url')
if [ "$TRANSLATION_PR_STATE" = "OPEN" ]; then
echo "✅ Found active translation PR #${TRANSLATION_PR_NUMBER}"
echo "translation_pr_number=$TRANSLATION_PR_NUMBER" >> $GITHUB_OUTPUT
echo "translation_pr_url=$TRANSLATION_PR_URL" >> $GITHUB_OUTPUT
echo "found_translation_pr=true" >> $GITHUB_OUTPUT
else
echo " Found translation PR #${TRANSLATION_PR_NUMBER} but it's ${TRANSLATION_PR_STATE} - skipping update"
echo "found_translation_pr=false" >> $GITHUB_OUTPUT
fi
else
echo " No translation PR found for PR #${PR_NUMBER} - this might be the first update"
echo "found_translation_pr=false" >> $GITHUB_OUTPUT
fi
- name: Install dependencies
if: steps.find-translation-pr.outputs.found_translation_pr == 'true'
run: |
cd tools/translate
pip install httpx aiofiles python-dotenv
- name: Update translations
if: steps.find-translation-pr.outputs.found_translation_pr == 'true'
id: update-translations
env:
DIFY_API_KEY: ${{ secrets.DIFY_API_KEY }}
run: |
echo "Updating translations for PR #${{ github.event.pull_request.number }}..."
PR_NUMBER=${{ github.event.pull_request.number }}
SYNC_BRANCH="docs-sync-pr-${PR_NUMBER}"
PR_BRANCH="${{ github.event.pull_request.head.ref }}"
BASE_SHA="${{ github.event.pull_request.base.sha }}"
HEAD_SHA="${{ github.event.pull_request.head.sha }}"
# Switch to translation branch
git fetch origin "$SYNC_BRANCH:$SYNC_BRANCH" || {
echo "❌ Could not fetch translation branch $SYNC_BRANCH"
echo "update_successful=false" >> $GITHUB_OUTPUT
exit 0
}
git checkout "$SYNC_BRANCH"
# Reset translation branch to main (keep working directory)
git reset --soft origin/main
git reset
# Checkout English files from PR to translate
git fetch origin "$PR_BRANCH:refs/remotes/origin/$PR_BRANCH"
git checkout "origin/$PR_BRANCH" -- en/ || echo "No English files to checkout"
# Re-run translation analysis and generation
cd tools/translate
# Create updated sync script
cat > update_translations.py <<'EOF'
import json
import sys
import os
import asyncio
from pathlib import Path
# Add parent directory to path
sys.path.append(os.path.dirname(__file__))
from sync_and_translate import DocsSynchronizer
from pr_analyzer import PRAnalyzer
async def update_translations():
base_sha = sys.argv[1]
head_sha = sys.argv[2]
# Analyze changes
analyzer = PRAnalyzer(base_sha, head_sha)
result = analyzer.categorize_pr()
if result['type'] != 'english':
print(f"PR type is {result['type']}, not english - skipping")
return False
# Initialize synchronizer
api_key = os.environ.get("DIFY_API_KEY")
if not api_key:
print("Error: DIFY_API_KEY not set")
return False
synchronizer = DocsSynchronizer(api_key)
# Get English files that need translation
file_categories = result['files']
english_files = file_categories['english']
results = {
"translated": [],
"failed": [],
"skipped": [],
"updated": True
}
# Translate English files
for file_path in english_files[:10]: # Limit to 10 files for safety
print(f"Updating translations for: {file_path}")
try:
for target_lang in ["cn", "jp"]:
target_path = file_path.replace("en/", f"{target_lang}/")
success = await synchronizer.translate_file_with_notice(
file_path,
target_path,
target_lang
)
if success:
results["translated"].append(target_path)
else:
results["failed"].append(target_path)
except Exception as e:
print(f"Error processing {file_path}: {e}")
results["failed"].append(file_path)
# Handle docs.json structure sync if needed
docs_changes = result['docs_json_changes']
if docs_changes['any_docs_json_changes']:
print("Updating docs.json structure...")
try:
sync_log = synchronizer.sync_docs_json_structure()
print("\n".join(sync_log))
except Exception as e:
print(f"Error syncing docs.json structure: {e}")
# Save results
with open("/tmp/update_results.json", "w") as f:
json.dump(results, f, indent=2)
return len(results["failed"]) == 0
if __name__ == "__main__":
success = asyncio.run(update_translations())
sys.exit(0 if success else 1)
EOF
# Run the update
python update_translations.py "$BASE_SHA" "$HEAD_SHA"
UPDATE_EXIT_CODE=$?
echo "update_exit_code=$UPDATE_EXIT_CODE" >> $GITHUB_OUTPUT
# Check for changes
if [[ -n $(git status --porcelain) ]]; then
echo "has_changes=true" >> $GITHUB_OUTPUT
echo "✅ Translation updates detected"
else
echo "has_changes=false" >> $GITHUB_OUTPUT
echo " No translation updates needed"
fi
- name: Commit and push translation updates
if: steps.update-translations.outputs.has_changes == 'true'
id: commit-updates
run: |
PR_NUMBER=${{ github.event.pull_request.number }}
SYNC_BRANCH="docs-sync-pr-${PR_NUMBER}"
git config user.name 'github-actions[bot]'
git config user.email 'github-actions[bot]@users.noreply.github.com'
# Commit only translation changes (not English files)
git add cn/ jp/ docs.json
git commit -m "🔄 Update translations for PR #${PR_NUMBER}
Updated translations following changes in PR #${PR_NUMBER}.
Auto-updated by translation workflow.
🤖 Generated with GitHub Actions"
# Push updates to translation branch
git push origin "$SYNC_BRANCH" --force
echo "commit_successful=true" >> $GITHUB_OUTPUT
echo "✅ Translation updates committed and pushed"
- name: Comment on original PR about update
if: steps.update-translations.outputs.has_changes == 'true' && steps.commit-updates.outputs.commit_successful == 'true'
uses: actions/github-script@v7
continue-on-error: true
with:
script: |
const fs = require('fs');
const prNumber = ${{ github.event.pull_request.number }};
const translationPrNumber = '${{ steps.find-translation-pr.outputs.translation_pr_number }}';
const translationPrUrl = '${{ steps.find-translation-pr.outputs.translation_pr_url }}';
// Load update results
let results = { translated: [], failed: [], skipped: [] };
try {
results = JSON.parse(fs.readFileSync('/tmp/update_results.json', 'utf8'));
} catch (e) {
console.log('Could not load update results');
}
let comment = `## 🔄 Translation PR Updated\n\n`
comment += `Your English documentation changes have been automatically translated and the translation PR has been updated.\n\n`
comment += `### 📝 Original Changes\n\n`;
comment += `- **Original PR**: #${prNumber}\n`;
comment += `- **Type**: English documentation updates\n`;
comment += `### 🔄 Synchronization\n\n`;
comment += `- **Automatic Updates**: This PR will be automatically updated if the original PR changes\n`;
comment += `- **Independent Review**: This translation PR can be reviewed and merged independently\n`;
if (results.translated && results.translated.length > 0) {
comment += `### ✅ Updated Translations (${results.translated.length} files):\n`;
results.translated.slice(0, 6).forEach(file => {
comment += `- \`${file}\`\n`;
});
if (results.translated.length > 6) {
comment += `- ... and ${results.translated.length - 6} more files\n`;
}
comment += '\n';
}
if (results.failed && results.failed.length > 0) {
comment += `### ⚠️ Update Issues (${results.failed.length}):\n`;
results.failed.slice(0, 3).forEach(file => {
comment += `- \`${file}\`\n`;
});
comment += '\n';
}
comment += `### 🔄 What's Updated:
- **Translation Files**: All corresponding cn and jp files
- **Navigation Structure**: Updated docs.json if needed
- **Automatic**: This update happened automatically when you updated your PR
---
🤖 _Automatic update from the translation workflow._`;
try {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: comment
});
} catch (error) {
console.log('Could not comment on original PR:', error.message);
}
- name: Comment on translation PR about update
if: steps.update-translations.outputs.has_changes == 'true' && steps.commit-updates.outputs.commit_successful == 'true'
uses: actions/github-script@v7
continue-on-error: true
with:
script: |
const fs = require('fs');
const prNumber = ${{ github.event.pull_request.number }};
const translationPrNumber = '${{ steps.find-translation-pr.outputs.translation_pr_number }}';
// Load update results
let results = { translated: [], failed: [], skipped: [] };
try {
results = JSON.parse(fs.readFileSync('/tmp/update_results.json', 'utf8'));
} catch (e) {
console.log('Could not load update results');
}
const updateComment = `## 🔄 Automatic Translation Update
This translation PR has been automatically updated following changes in the original PR #${prNumber}.
### 📝 What Was Updated:
- **Source**: Changes from PR #${prNumber}
- **Updated Files**: ${results.translated ? results.translated.length : 0} translation files
- **Languages**: Chinese (cn) and Japanese (jp)
### ✅ Translation Status:
${results.translated && results.translated.length > 0 ?
`**Successfully Updated (${results.translated.length} files):**\n` +
results.translated.slice(0, 5).map(f => `- \`${f}\``).join('\n') +
(results.translated.length > 5 ? `\n- ... and ${results.translated.length - 5} more` : '') :
'- All translations are up to date'}
${results.failed && results.failed.length > 0 ?
`\n### ⚠️ Update Issues:\n${results.failed.slice(0, 3).map(f => `- \`${f}\``).join('\n')}` : ''}
### 🔄 Review Process:
1. **Automatic Update**: This PR was updated automatically
2. **Review Needed**: Please review the updated translations
3. **Independent Merge**: This PR can still be merged independently
---
🤖 _This update was triggered automatically by changes to PR #${prNumber}._`;
try {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: translationPrNumber,
body: updateComment
});
} catch (error) {
console.log('Could not comment on translation PR:', error.message);
}
- name: Handle no updates needed
if: steps.find-translation-pr.outputs.found_translation_pr == 'true' && steps.update-translations.outputs.has_changes != 'true'
uses: actions/github-script@v7
continue-on-error: true
with:
script: |
const prNumber = ${{ github.event.pull_request.number }};
const translationPrNumber = '${{ steps.find-translation-pr.outputs.translation_pr_number }}';
const comment = `## ✅ Translation PR Already Up to Date
Your changes to PR #${prNumber} did not require translation updates.
The translation PR [#${translationPrNumber}](https://github.com/${{ github.repository }}/pull/${translationPrNumber}) remains current.
🤖 _Automatic check from the translation workflow._`;
try {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: comment
});
} catch (error) {
console.log('Could not comment on original PR:', error.message);
}