Fix workflow failure when PR is force-pushed

When a PR is force-pushed, github.event.before points to an orphaned
commit that isn't fetched by actions/checkout. This caused the workflow
to fail with "Error: command not found" (exit code 127) because:

1. pr_analyzer.py's git diff failed on the inaccessible commit
2. Error messages went to stdout, breaking the source command

This fix:
- Validates COMPARE_BASE accessibility before use via git cat-file
- Falls back to merge-base strategy if commit is orphaned
- Sends error messages to stderr for defensive error handling

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Gu
2025-12-16 18:19:41 -08:00
parent 5d7556d5f2
commit ed65f2c21c
2 changed files with 19 additions and 6 deletions

View File

@@ -105,6 +105,19 @@ jobs:
IS_INCREMENTAL="false"
fi
# Validate COMPARE_BASE is accessible (handles force-push orphaned commits)
if ! git cat-file -e "${COMPARE_BASE}^{commit}" 2>/dev/null; then
echo "⚠️ Commit $COMPARE_BASE not accessible (likely force-pushed away)"
echo "Falling back to merge-base strategy"
COMPARE_BASE=$(git merge-base "$PR_BASE" "$PR_HEAD")
if ! git cat-file -e "${COMPARE_BASE}^{commit}" 2>/dev/null; then
echo "❌ Error: Could not determine valid comparison base"
exit 1
fi
IS_INCREMENTAL="false"
echo "Using merge-base: $COMPARE_BASE"
fi
echo "compare_base=$COMPARE_BASE" >> $GITHUB_OUTPUT
echo "compare_head=$COMPARE_HEAD" >> $GITHUB_OUTPUT
echo "is_incremental=$IS_INCREMENTAL" >> $GITHUB_OUTPUT
@@ -123,9 +136,9 @@ jobs:
echo "Base SHA: $BASE_SHA"
echo "Head SHA: $HEAD_SHA"
# Run PR analyzer
# Run PR analyzer (stderr goes to console, stdout to file for sourcing)
cd tools/translate
python pr_analyzer.py "$BASE_SHA" "$HEAD_SHA" > /tmp/pr_analysis_output.txt 2>&1
python pr_analyzer.py "$BASE_SHA" "$HEAD_SHA" > /tmp/pr_analysis_output.txt
# Parse analyzer output
if [ $? -eq 0 ]; then

View File

@@ -51,7 +51,7 @@ class PRAnalyzer:
files = [f.strip() for f in result.stdout.strip().split('\n') if f.strip()]
return files
except subprocess.CalledProcessError as e:
print(f"Error getting changed files: {e}")
print(f"Error getting changed files: {e}", file=sys.stderr)
return []
def get_docs_json_at_sha(self, sha: str) -> Optional[Dict]:
@@ -63,7 +63,7 @@ class PRAnalyzer:
return json.loads(result.stdout)
except (subprocess.CalledProcessError, json.JSONDecodeError) as e:
print(f"Error loading docs.json at {sha}: {e}")
print(f"Error loading docs.json at {sha}: {e}", file=sys.stderr)
return None
def extract_language_navigation(self, docs_data: Dict, language: str) -> Optional[Dict]:
@@ -389,11 +389,11 @@ class SyncPlanGenerator:
if self._file_exists_at_commit(filepath, self.head_sha):
files_with_status.append((status, filepath))
else:
print(f"Skipping {filepath}: added then deleted in same PR")
print(f"Skipping {filepath}: added then deleted in same PR", file=sys.stderr)
return files_with_status
except subprocess.CalledProcessError as e:
print(f"Error getting changed files with status: {e}")
print(f"Error getting changed files with status: {e}", file=sys.stderr)
return []
def _file_exists_at_commit(self, filepath: str, commit_sha: str) -> bool: