diff --git a/.github/workflows/quality-check.yml b/.github/workflows/quality-check.yml
deleted file mode 100644
index 49fa04a7..00000000
--- a/.github/workflows/quality-check.yml
+++ /dev/null
@@ -1,288 +0,0 @@
-name: Documentation Quality Check
-
-on:
- pull_request:
- branches: [main, revamp]
- paths:
- - '**/*.md'
- - '**/*.mdx'
-
-permissions:
- contents: read
- pull-requests: write
-
-jobs:
- quality-check:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.9'
-
- - name: Install dependencies
- run: |
- pip install pyyaml
-
- - name: Get changed MDX files
- id: get-files
- run: |
- CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }} HEAD | grep -E '\.(md|mdx)$' || echo "")
-
- if [ -z "$CHANGED_FILES" ]; then
- echo "No MDX files changed"
- echo "has_changes=false" >> $GITHUB_OUTPUT
- exit 0
- fi
-
- echo "$CHANGED_FILES" > /tmp/changed_files.txt
- echo "has_changes=true" >> $GITHUB_OUTPUT
- echo "Changed files:"
- echo "$CHANGED_FILES"
-
- - name: Create quality check script
- if: steps.get-files.outputs.has_changes == 'true'
- run: |
- cat > /tmp/quality_check.py << 'EOFPYTHON'
- import re
- import sys
- from pathlib import Path
- from typing import List, Dict, Tuple
- import yaml
-
- class DocumentationQualityChecker:
- def __init__(self):
- self.errors = []
- self.warnings = []
-
- def extract_frontmatter(self, content: str) -> Tuple[Dict, str]:
- frontmatter_pattern = r'^---\s*\n(.*?)\n---\s*\n(.*)$'
- match = re.match(frontmatter_pattern, content, re.DOTALL)
-
- if not match:
- return {}, content
-
- try:
- frontmatter = yaml.safe_load(match.group(1))
- body = match.group(2)
- return frontmatter or {}, body
- except yaml.YAMLError as e:
- return {}, content
-
- def check_frontmatter(self, file_path: str, content: str):
- frontmatter, _ = self.extract_frontmatter(content)
-
- if not frontmatter:
- self.errors.append(f"{file_path}: Missing frontmatter")
- return
-
- if 'title' not in frontmatter or not frontmatter['title']:
- self.errors.append(f"{file_path}: Missing or empty 'title' in frontmatter")
-
- if 'description' not in frontmatter or not frontmatter['description']:
- self.errors.append(f"{file_path}: Missing or empty 'description' in frontmatter")
-
- def check_internal_links(self, file_path: str, content: str):
- _, body = self.extract_frontmatter(content)
-
- markdown_link_pattern = r'\[([^\]]+)\]\(([^\)]+)\)'
- links = re.findall(markdown_link_pattern, body)
-
- for link_text, link_url in links:
- if link_url.startswith(('http://', 'https://', 'mailto:', '#')):
- continue
-
- if link_url.startswith('/'):
- self.warnings.append(
- f"{file_path}: Absolute internal link detected: {link_url}. "
- "Consider using relative paths for internal links."
- )
-
- def check_image_paths(self, file_path: str, content: str):
- _, body = self.extract_frontmatter(content)
-
- markdown_img_pattern = r'!\[([^\]]*)\]\(([^\)]+)\)'
- markdown_matches = re.findall(markdown_img_pattern, body)
- for alt_text, img_path in markdown_matches:
- if img_path.startswith(('http://', 'https://', 'data:')):
- continue
-
- resolved_path = Path(file_path).parent / img_path
-
- if not resolved_path.exists() and not img_path.startswith('/'):
- self.warnings.append(
- f"{file_path}: Image path may not exist: {img_path}"
- )
-
- html_img_pattern = r'
]+src=["\']([^"\']+)["\']'
- html_matches = re.findall(html_img_pattern, body)
- for img_path in html_matches:
- if img_path.startswith(('http://', 'https://', 'data:')):
- continue
-
- resolved_path = Path(file_path).parent / img_path
-
- if not resolved_path.exists() and not img_path.startswith('/'):
- self.warnings.append(
- f"{file_path}: Image path may not exist: {img_path}"
- )
-
- def check_mintlify_components(self, file_path: str, content: str):
- _, body = self.extract_frontmatter(content)
-
- component_pattern = r'<(\w+)([^>]*)>(.*?)\1>|<(\w+)([^>]*)/>'
- matches = re.finditer(component_pattern, body, re.DOTALL)
-
- known_components = {
- 'Note', 'Info', 'Warning', 'Tip', 'Check', 'CodeGroup',
- 'Code', 'Accordion', 'AccordionGroup', 'Card', 'CardGroup',
- 'Steps', 'Step', 'Tabs', 'Tab', 'Frame', 'Icon'
- }
-
- for match in matches:
- component_name = match.group(1) or match.group(4)
-
- if component_name and component_name[0].isupper():
- if component_name not in known_components:
- self.warnings.append(
- f"{file_path}: Unknown Mintlify component: <{component_name}>"
- )
-
- def check_code_blocks(self, file_path: str, content: str):
- _, body = self.extract_frontmatter(content)
-
- code_block_pattern = r'```([^\n]*)\n(.*?)```'
- code_blocks = re.findall(code_block_pattern, body, re.DOTALL)
-
- for i, (language_tag, code_content) in enumerate(code_blocks):
- if not language_tag.strip():
- self.warnings.append(
- f"{file_path}: Code block #{i+1} missing language tag"
- )
-
- def check_file(self, file_path: str):
- try:
- with open(file_path, 'r', encoding='utf-8') as f:
- content = f.read()
- except Exception as e:
- self.errors.append(f"{file_path}: Could not read file: {e}")
- return
-
- self.check_frontmatter(file_path, content)
- self.check_internal_links(file_path, content)
- self.check_image_paths(file_path, content)
- self.check_mintlify_components(file_path, content)
- self.check_code_blocks(file_path, content)
-
- def run_checks(self, file_list: List[str]) -> bool:
- for file_path in file_list:
- if Path(file_path).exists():
- self.check_file(file_path)
-
- return len(self.errors) == 0
-
- if __name__ == "__main__":
- with open('/tmp/changed_files.txt', 'r') as f:
- files = [line.strip() for line in f if line.strip()]
-
- checker = DocumentationQualityChecker()
- success = checker.run_checks(files)
-
- if checker.errors:
- print("ERRORS:")
- for error in checker.errors:
- print(f" ❌ {error}")
-
- if checker.warnings:
- print("\nWARNINGS:")
- for warning in checker.warnings:
- print(f" ⚠️ {warning}")
-
- if success:
- if checker.warnings:
- print(f"\n✅ Quality check passed with {len(checker.warnings)} warning(s)")
- else:
- print("\n✅ All quality checks passed")
- sys.exit(0)
- else:
- print(f"\n❌ Quality check failed with {len(checker.errors)} error(s)")
- sys.exit(1)
- EOFPYTHON
-
- - name: Run quality checks
- if: steps.get-files.outputs.has_changes == 'true'
- id: check
- run: |
- python /tmp/quality_check.py 2>&1 | tee /tmp/quality_output.log
- CHECK_EXIT_CODE=${PIPESTATUS[0]}
-
- if [ $CHECK_EXIT_CODE -ne 0 ]; then
- echo "check_failed=true" >> $GITHUB_OUTPUT
- exit 1
- else
- echo "check_failed=false" >> $GITHUB_OUTPUT
- fi
-
- - name: Comment quality check results on PR
- if: always() && steps.get-files.outputs.has_changes == 'true'
- uses: actions/github-script@v7
- with:
- script: |
- const fs = require('fs');
- let qualityLog = '';
-
- try {
- qualityLog = fs.readFileSync('/tmp/quality_output.log', 'utf8');
- } catch (e) {
- qualityLog = 'Could not read quality check log';
- }
-
- const checkFailed = '${{ steps.check.outputs.check_failed }}' === 'true';
- const hasErrors = qualityLog.includes('ERRORS:');
- const hasWarnings = qualityLog.includes('WARNINGS:');
-
- if (!hasErrors && !hasWarnings) {
- const comment = `## ✅ Documentation Quality Check Passed
-
- All documentation quality checks passed successfully!`;
-
- await github.rest.issues.createComment({
- owner: context.repo.owner,
- repo: context.repo.repo,
- issue_number: context.issue.number,
- body: comment
- });
- return;
- }
-
- let statusEmoji = checkFailed ? '❌' : '⚠️';
- let statusText = checkFailed ? 'Failed' : 'Passed with Warnings';
-
- const comment = `## ${statusEmoji} Documentation Quality Check ${statusText}
-
- ${qualityLog}
-
- ${checkFailed ? `
- **Action Required**: Please fix the errors listed above before merging.
- ` : `
- **Warnings**: Consider addressing these warnings to improve documentation quality.
- `}
-
- Common fixes:
- - Add \`title\` and \`description\` to frontmatter in all MDX files
- - Use relative paths for internal links
- - Add language tags to code blocks (e.g., \`\`\`python, \`\`\`javascript)
- - Verify image paths are correct
- - Check Mintlify component syntax`;
-
- await github.rest.issues.createComment({
- owner: context.repo.owner,
- repo: context.repo.repo,
- issue_number: context.issue.number,
- body: comment
- });
diff --git a/.github/workflows/validate-docs-json.yml b/.github/workflows/validate-docs-json.yml
deleted file mode 100644
index 99d0a6aa..00000000
--- a/.github/workflows/validate-docs-json.yml
+++ /dev/null
@@ -1,275 +0,0 @@
-name: Validate docs.json
-
-on:
- pull_request:
- branches: [main, revamp]
- paths:
- - 'docs.json'
-
-permissions:
- contents: read
- pull-requests: write
-
-jobs:
- validate-structure:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- fetch-depth: 0
-
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: '3.9'
-
- - name: Create validation script
- run: |
- cat > /tmp/validate_docs_json.py << 'EOFPYTHON'
- import json
- import sys
- from pathlib import Path
- from typing import Set, List, Dict, Any
-
- class DocsJsonValidator:
- def __init__(self, repo_root: Path):
- self.repo_root = repo_root
- self.docs_json_path = repo_root / "docs.json"
- self.errors = []
- self.warnings = []
-
- with open("tools/translate/config.json", "r") as f:
- config = json.load(f)
- self.source_lang = config.get("source_language", "en")
- self.target_langs = config.get("target_languages", ["zh", "ja"])
- self.all_langs = [self.source_lang] + self.target_langs
-
- self.lang_dirs = {}
- for lang_code in self.all_langs:
- if lang_code in config.get("languages", {}):
- self.lang_dirs[lang_code] = config["languages"][lang_code]["directory"]
- else:
- self.lang_dirs[lang_code] = lang_code
-
- def load_docs_json(self) -> Dict:
- try:
- with open(self.docs_json_path, 'r', encoding='utf-8') as f:
- return json.load(f)
- except json.JSONDecodeError as e:
- self.errors.append(f"Invalid JSON syntax in docs.json: {e}")
- return {}
- except FileNotFoundError:
- self.errors.append("docs.json file not found")
- return {}
-
- def validate_json_schema(self, docs_data: Dict) -> bool:
- required_fields = ['name', 'navigation']
- for field in required_fields:
- if field not in docs_data:
- self.errors.append(f"Missing required field: {field}")
- return False
- return True
-
- def extract_file_paths(self, pages: Any, collected: Set[str]):
- if isinstance(pages, str):
- collected.add(pages)
- elif isinstance(pages, dict):
- if 'pages' in pages:
- self.extract_file_paths(pages['pages'], collected)
- elif 'page' in pages:
- collected.add(pages['page'])
- elif isinstance(pages, list):
- for item in pages:
- self.extract_file_paths(item, collected)
-
- def get_language_section(self, docs_data: Dict, lang: str) -> Dict:
- nav = docs_data.get('navigation', {})
-
- if 'versions' in nav and len(nav['versions']) > 0:
- languages = nav['versions'][0].get('languages', [])
- elif 'languages' in nav:
- languages = nav['languages']
- else:
- return {}
-
- for lang_data in languages:
- if lang_data.get('language') == lang:
- return lang_data
- return {}
-
- def validate_file_existence(self, docs_data: Dict):
- for lang in self.all_langs:
- lang_section = self.get_language_section(docs_data, lang)
- if not lang_section:
- continue
-
- file_paths = set()
- if 'dropdowns' in lang_section:
- for dropdown in lang_section['dropdowns']:
- if 'pages' in dropdown:
- self.extract_file_paths(dropdown['pages'], file_paths)
-
- for file_path in file_paths:
- mdx_path = self.repo_root / f"{file_path}.mdx"
- md_path = self.repo_root / f"{file_path}.md"
-
- if not mdx_path.exists() and not md_path.exists():
- self.errors.append(f"Referenced file not found: {file_path} (.md or .mdx)")
-
- def validate_language_consistency(self, docs_data: Dict):
- sections = {}
- for lang in self.all_langs:
- lang_section = self.get_language_section(docs_data, lang)
- if lang_section:
- sections[lang] = lang_section
-
- if len(sections) < len(self.all_langs):
- missing = set(self.all_langs) - set(sections.keys())
- self.warnings.append(f"Missing language sections: {', '.join(missing)}")
-
- if len(sections) < 2:
- return
-
- source_section = sections.get(self.source_lang)
- if not source_section:
- return
-
- source_dropdowns = source_section.get('dropdowns', [])
- source_dropdown_count = len(source_dropdowns)
-
- for lang in self.target_langs:
- if lang not in sections:
- continue
-
- target_dropdowns = sections[lang].get('dropdowns', [])
- target_dropdown_count = len(target_dropdowns)
-
- if source_dropdown_count != target_dropdown_count:
- self.warnings.append(
- f"Dropdown count mismatch between {self.source_lang} ({source_dropdown_count}) "
- f"and {lang} ({target_dropdown_count})"
- )
-
- def validate_no_duplicate_paths(self, docs_data: Dict):
- for lang in self.all_langs:
- lang_section = self.get_language_section(docs_data, lang)
- if not lang_section:
- continue
-
- file_paths = set()
- duplicates = []
-
- def check_duplicates(pages: Any):
- if isinstance(pages, str):
- if pages in file_paths:
- duplicates.append(pages)
- file_paths.add(pages)
- elif isinstance(pages, dict):
- if 'pages' in pages:
- check_duplicates(pages['pages'])
- elif 'page' in pages:
- path = pages['page']
- if path in file_paths:
- duplicates.append(path)
- file_paths.add(path)
- elif isinstance(pages, list):
- for item in pages:
- check_duplicates(item)
-
- if 'dropdowns' in lang_section:
- for dropdown in lang_section['dropdowns']:
- if 'pages' in dropdown:
- check_duplicates(dropdown['pages'])
-
- if duplicates:
- self.errors.append(f"Duplicate file paths in {lang} section: {', '.join(duplicates)}")
-
- def run_validation(self) -> bool:
- docs_data = self.load_docs_json()
- if not docs_data:
- return False
-
- if not self.validate_json_schema(docs_data):
- return False
-
- self.validate_file_existence(docs_data)
- self.validate_language_consistency(docs_data)
- self.validate_no_duplicate_paths(docs_data)
-
- return len(self.errors) == 0
-
- if __name__ == "__main__":
- repo_root = Path.cwd()
- validator = DocsJsonValidator(repo_root)
-
- success = validator.run_validation()
-
- if validator.errors:
- print("ERRORS:")
- for error in validator.errors:
- print(f" ❌ {error}")
-
- if validator.warnings:
- print("\nWARNINGS:")
- for warning in validator.warnings:
- print(f" ⚠️ {warning}")
-
- if success:
- print("\n✅ docs.json validation passed")
- sys.exit(0)
- else:
- print(f"\n❌ docs.json validation failed with {len(validator.errors)} error(s)")
- sys.exit(1)
- EOFPYTHON
-
- - name: Run validation
- id: validate
- run: |
- python /tmp/validate_docs_json.py 2>&1 | tee /tmp/validation_output.log
- VALIDATION_EXIT_CODE=${PIPESTATUS[0]}
-
- if [ $VALIDATION_EXIT_CODE -ne 0 ]; then
- echo "validation_failed=true" >> $GITHUB_OUTPUT
- exit 1
- else
- echo "validation_failed=false" >> $GITHUB_OUTPUT
- fi
-
- - name: Comment validation results on PR
- if: failure() && steps.validate.outputs.validation_failed == 'true'
- uses: actions/github-script@v7
- with:
- script: |
- const fs = require('fs');
- let validationLog = '';
-
- try {
- validationLog = fs.readFileSync('/tmp/validation_output.log', 'utf8');
- } catch (e) {
- validationLog = 'Could not read validation log';
- }
-
- const comment = `## ❌ docs.json Validation Failed
-
- The docs.json file has structural issues that need to be fixed:
-
- \`\`\`
- ${validationLog}
- \`\`\`
-
- Please ensure:
- - All referenced files exist in the repository
- - Language sections (en/zh/ja) are consistent
- - No duplicate file paths within a language section
- - Required fields are present (name, navigation)
- - Valid JSON syntax
-
- Refer to the [Mintlify docs.json schema](https://mintlify.com/docs.json) for more details.`;
-
- await github.rest.issues.createComment({
- owner: context.repo.owner,
- repo: context.repo.repo,
- issue_number: context.issue.number,
- body: comment
- });
diff --git a/AGENTS.md b/AGENTS.md
index 3cf00e69..a6c97c37 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -1,11 +1,5 @@
# Mintlify documentation
-This file provides guidance when working with the Dify documentation repository.
-
-## Project Overview
-
-Dify documentation repository built with Mintlify, supporting multi-language documentation (English, Chinese, Japanese) with AI-powered automatic translation via Dify workflows.
-
## Working relationship
- You can push back on ideas-this can lead to better documentation. Cite sources and explain your reasoning when you do so
- ALWAYS ask for clarification rather than making assumptions
@@ -16,12 +10,6 @@ Dify documentation repository built with Mintlify, supporting multi-language doc
- Config: docs.json for navigation, theme, settings
- Components: Mintlify components
-## Documentation Structure
-
-- **Format**: MDX files with YAML frontmatter (`title` and `description` required)
-- **Languages**: `en/`, `zh/`, `ja/` (source: en)
-- **Configuration**: `docs.json` (navigation structure) - [Mintlify schema](https://mintlify.com/docs.json)
-
## Content strategy
- Document just enough for user success - not too much, not too little
- Prioritize accuracy and usability
@@ -30,6 +18,10 @@ Dify documentation repository built with Mintlify, supporting multi-language doc
- Check existing patterns for consistency
- Start by making the smallest reasonable changes
+## docs.json
+
+- Refer to the [docs.json schema](https://mintlify.com/docs.json) when building the docs.json file and site navigation
+
## Frontmatter requirements for pages
- title: Clear, descriptive page title
- description: Concise summary for SEO/navigation
@@ -44,158 +36,15 @@ Dify documentation repository built with Mintlify, supporting multi-language doc
- Alt text on all images
- Relative paths for internal links
-## Translation System
-
-### Language Configuration
-
-All language settings in `tools/translate/config.json` (single source of truth):
-
-```json
-{
- "source_language": "en",
- "target_languages": ["zh", "ja"],
- "languages": {
- "en": {"code": "en", "name": "English", "directory": "en"},
- "zh": {
- "code": "zh",
- "name": "Chinese",
- "directory": "zh",
- "translation_notice": "⚠️ AI translation..."
- },
- "ja": {
- "code": "ja",
- "name": "Japanese",
- "directory": "ja",
- "translation_notice": "⚠️ AI translation..."
- }
- }
-}
-```
-
-**Adding new language**: Edit config.json only - add to `target_languages` and `languages` object with required fields (`code`, `name`, `directory`, `translation_notice`).
-
-### Workflow
-
-- **Trigger**: Push to non-main branches with `.md/.mdx` changes in `en/`
-- **Process**: Dify API streaming mode with terminology database (`termbase_i18n.md`)
-- **Timing**: New files ~30-60s/lang | Modified files ~2-3min/lang (context-aware with git diff)
-- **Auto-operations**: Translation notices, incremental docs.json sync
-
-### Surgical Reconciliation (Move & Rename)
-
-**Purpose**: Detect and apply structural changes (moves, renames) from English section to zh/ja automatically.
-
-**How it works**:
-1. Compares English section between base commit and HEAD
-2. Detects **moves** (same file, different `group_path`) and **renames** (deleted+added in same location)
-3. Applies identical operations to zh/ja using **index-based navigation**
-
-**Index-based navigation**:
-- Groups matched by position index, not name (works across translations: "Nodes" ≠ "节点")
-- Location tracked as `group_indices: [0, 1]` (parent group index 0, child index 1)
-- Navigates nested structures regardless of translated group names
-
-**Rename specifics**:
-- Detects file extension (.md, .mdx) from physical file
-- Preserves extension when renaming zh/ja files
-- Updates docs.json entries (stored without extensions)
-
-### Navigation Sync Behavior
-
-**Manual editing**: Only edit English (`en`) section in docs.json - workflow syncs to zh/ja automatically.
-
-**Auto-sync operations**:
-- **Added files**: Fresh translation, inserted at same index position as English
-- **Modified files**: Context-aware update using existing translation + git diff
-- **Deleted files**: Removed from all language sections + physical files
-- **Moved files**: Detected via `group_path` changes, zh/ja relocated using index-based navigation
-- **Renamed files**: Detected when deleted+added in same location, physical files renamed with extension preserved
-
-## Development Commands
-
-```bash
-# Local preview
-npm i -g mintlify
-mintlify dev
-
-# Local translation testing
-pip install -r tools/translate/requirements.txt
-echo "DIFY_API_KEY=your_key" > tools/translate/.env
-python tools/translate/main.py
-```
-
-**Configuration**:
-- Terminology: `tools/translate/termbase_i18n.md`
-- Languages: `tools/translate/config.json`
-- Model: Configure in Dify Studio
-
## Git workflow
-- NEVER use `--no-verify` or skip hooks
+- NEVER use --no-verify when committing
- Ask how to handle uncommitted changes before starting
-- Create new branch for each feature/fix
+- Create a new branch when no clear branch exists for changes
- Commit frequently throughout development
- NEVER skip or disable pre-commit hooks
-## Testing & Debugging
-
-### Test Translation Workflow
-
-Create test PR with branch name `test/{operation}-{scope}`:
-
-- **Add**: New file + docs.json entry
-- **Delete**: Remove file + docs.json entry
-- **Update**: Modify existing file content
-- **Move**: Move file between groups in docs.json (e.g., Getting Started → Nodes)
-- **Rename**: Rename file + update docs.json entry (tests extension preservation)
-
-### Common Issues
-
-**Translation failures**:
-- **HTTP 504**: Verify `response_mode: "streaming"` in `main.py` (NOT `"blocking"`)
-- **Missing output**: Check Dify workflow has output variable `output1`
-- **Failed workflow**: Review Dify workflow logs for node errors
-
-**Move/Rename issues**:
-- **Not detected**: Check logs for "INFO: Detected X moves, Y renames" - if 0 when expecting changes, verify `group_path` actually changed between commits
-- **Wrong location**: Structure mismatch between languages - verify group indices align (same nested structure)
-- **File not found**: Extension detection failed - ensure file has .md or .mdx extension
-
-**Success log pattern**:
-```
-INFO: Detected 1 moves, 0 renames, 0 adds, 0 deletes
-INFO: Moving en/test-file from 'Dropdown > GroupA' to 'Dropdown > GroupB'
-SUCCESS: Moved zh/test-file to new location
-SUCCESS: Moved ja/test-file to new location
-```
-
-## Translation A/B Testing
-
-For comparing translation quality between models or prompt variations:
-
-```bash
-cd tools/translate-test-dify
-./setup.sh
-source venv/bin/activate
-python run_test.py
-python compare.py results//
-```
-
-**Important**:
-- Never commit `results/`, `mock_docs/`, or real API keys
-- Always redact keys with `app-***` before committing
-- See `tools/translate-test-dify/README.md` for details
-
-## Key Paths
-
-- `docs.json` - Navigation structure
-- `tools/translate/config.json` - Language configuration (single source of truth)
-- `tools/translate/termbase_i18n.md` - Translation terminology database
-- `tools/translate/sync_and_translate.py` - Core translation + surgical reconciliation logic
-- `tools/translate-test-dify/` - Translation A/B testing framework
-- `.github/workflows/sync_docs_*.yml` - Auto-translation workflow triggers
-
## Do not
- Skip frontmatter on any MDX file
- Use absolute URLs for internal links
- Include untested code examples
-- Make assumptions - always ask for clarification
+- Make assumptions - always ask for clarification
\ No newline at end of file
diff --git a/CLAUDE.md b/CLAUDE.md
deleted file mode 120000
index 47dc3e3d..00000000
--- a/CLAUDE.md
+++ /dev/null
@@ -1 +0,0 @@
-AGENTS.md
\ No newline at end of file
diff --git a/CLAUDE.md b/CLAUDE.md
new file mode 100644
index 00000000..782106a8
--- /dev/null
+++ b/CLAUDE.md
@@ -0,0 +1,155 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Project Overview
+
+Dify documentation repository built with Mintlify, supporting multi-language documentation (English, Chinese, Japanese) with AI-powered automatic translation via Dify workflows.
+
+## Documentation Structure
+
+- **Format**: MDX files with YAML frontmatter (`title` and `description` required)
+- **Languages**: `en/`, `cn/`, `jp/` (source: en)
+- **Configuration**: `docs.json` (navigation structure) - [Mintlify schema](https://mintlify.com/docs.json)
+
+## Translation System
+
+### Language Configuration
+
+All language settings in `tools/translate/config.json` (single source of truth):
+
+```json
+{
+ "source_language": "en",
+ "target_languages": ["cn", "jp"],
+ "languages": {
+ "en": {"code": "en", "name": "English", "directory": "en"},
+ "cn": {
+ "code": "cn",
+ "name": "Chinese",
+ "directory": "cn",
+ "translation_notice": "⚠️ AI translation..."
+ }
+ }
+}
+```
+
+**Adding new language**: Edit config.json only - add to `target_languages` and `languages` object with required fields.
+
+### Workflow
+
+- **Trigger**: Push to non-main branches with `.md/.mdx` changes in `en/`
+- **Process**: Dify API streaming mode with terminology database (`termbase_i18n.md`)
+- **Timing**: New files ~30-60s/lang | Modified files ~2-3min/lang (context-aware with git diff)
+- **Auto-operations**: Translation notices, incremental docs.json sync
+
+### Surgical Reconciliation (Move & Rename)
+
+**Purpose**: Detect and apply structural changes (moves, renames) from English section to cn/jp automatically.
+
+**How it works**:
+1. Compares English section between base commit and HEAD
+2. Detects **moves** (same file, different `group_path`) and **renames** (deleted+added in same location)
+3. Applies identical operations to cn/jp using **index-based navigation**
+
+**Index-based navigation**:
+- Groups matched by position index, not name (works across translations: "Nodes" ≠ "节点")
+- Location tracked as `group_indices: [0, 1]` (parent group index 0, child index 1)
+- Navigates nested structures regardless of translated group names
+
+**Rename specifics**:
+- Detects file extension (.md, .mdx) from physical file
+- Preserves extension when renaming cn/jp files
+- Updates docs.json entries (stored without extensions)
+
+### Navigation Sync Behavior
+
+**Manual editing**: Only edit English (`en`) section in docs.json - workflow syncs to cn/jp automatically.
+
+**Auto-sync operations**:
+- **Added files**: Fresh translation, inserted at same index position as English
+- **Modified files**: Context-aware update using existing translation + git diff
+- **Deleted files**: Removed from all language sections + physical files
+- **Moved files**: Detected via `group_path` changes, cn/jp relocated using index-based navigation
+- **Renamed files**: Detected when deleted+added in same location, physical files renamed with extension preserved
+
+## Development Commands
+
+```bash
+# Local preview
+npm i -g mintlify
+mintlify dev
+
+# Local translation testing
+pip install -r tools/translate/requirements.txt
+echo "DIFY_API_KEY=your_key" > tools/translate/.env
+python tools/translate/main.py
+```
+
+**Configuration**:
+- Terminology: `tools/translate/termbase_i18n.md`
+- Languages: `tools/translate/config.json`
+- Model: Configure in Dify Studio
+
+**Git Rules**:
+- NEVER use `--no-verify` or skip hooks
+- Create new branch for each feature/fix
+- Commit frequently with descriptive messages
+
+## Testing & Debugging
+
+### Test Translation Workflow
+
+Create test PR with branch name `test/{operation}-{scope}`:
+
+- **Add**: New file + docs.json entry
+- **Delete**: Remove file + docs.json entry
+- **Update**: Modify existing file content
+- **Move**: Move file between groups in docs.json (e.g., Getting Started → Nodes)
+- **Rename**: Rename file + update docs.json entry (tests extension preservation)
+
+### Common Issues
+
+**Translation failures**:
+- **HTTP 504**: Verify `response_mode: "streaming"` in `main.py` (NOT `"blocking"`)
+- **Missing output**: Check Dify workflow has output variable `output1`
+- **Failed workflow**: Review Dify workflow logs for node errors
+
+**Move/Rename issues**:
+- **Not detected**: Check logs for "INFO: Detected X moves, Y renames" - if 0 when expecting changes, verify `group_path` actually changed between commits
+- **Wrong location**: Structure mismatch between languages - verify group indices align (same nested structure)
+- **File not found**: Extension detection failed - ensure file has .md or .mdx extension
+
+**Success log pattern**:
+```
+INFO: Detected 1 moves, 0 renames, 0 adds, 0 deletes
+INFO: Moving en/test-file from 'Dropdown > GroupA' to 'Dropdown > GroupB'
+SUCCESS: Moved cn/test-file to new location
+SUCCESS: Moved jp/test-file to new location
+```
+
+## Translation A/B Testing
+
+For comparing translation quality between models or prompt variations:
+
+```bash
+cd tools/translate-test-dify
+./setup.sh
+source venv/bin/activate
+python run_test.py
+python compare.py results//
+```
+
+**Important**:
+- Never commit `results/`, `mock_docs/`, or real API keys
+- Always redact keys with `app-***` before committing
+- See `tools/translate-test-dify/README.md` for details
+
+## Key Paths
+
+- `docs.json` - Navigation structure
+- `tools/translate/config.json` - Language configuration (single source of truth)
+- `tools/translate/termbase_i18n.md` - Translation terminology database
+- `tools/translate/sync_and_translate.py` - Core translation + surgical reconciliation logic
+- `tools/translate-test-dify/` - Translation A/B testing framework
+- `.github/workflows/sync_docs_*.yml` - Auto-translation workflow triggers
diff --git a/docs.json b/docs.json
index 415ce74c..776f2392 100644
--- a/docs.json
+++ b/docs.json
@@ -5035,10 +5035,6 @@
"source": "/en/guides/model-configuration/load-balancing",
"destination": "/en/use-dify/workspace/model-providers#configure-model-load-balancing"
},
- {
- "source": "/guides/model-configuration/load-balancing",
- "destination": "/en/use-dify/workspace/model-providers#configure-model-load-balancing"
- },
{
"source": "/zh-hans/guides/model-configuration/load-balancing",
"destination": "/zh/use-dify/workspace/model-providers#配置模型负载均衡"
diff --git a/en/use-dify/getting-started/key-concepts.mdx b/en/use-dify/getting-started/key-concepts.mdx
index 7365b2dd..293cf533 100644
--- a/en/use-dify/getting-started/key-concepts.mdx
+++ b/en/use-dify/getting-started/key-concepts.mdx
@@ -83,7 +83,7 @@ Additionally, the User Input node comes with a set of input variables that you c
| `sys.app_id` | String | App ID: A unique identifier automatically assigned by the system to each App. This parameter is used to record the basic information of the current application. | This parameter is used to differentiate and locate distinct Workflow applications for users with development capabilities. |
| `sys.workflow_id` | String | Workflow ID: This parameter records information about all nodes information in the current Workflow application. | This parameter can be used by users with development capabilities to track and record information about the nodes contained within a Workflow. |
| `sys.workflow_run_id` | String | Workflow Run ID: Used to record the runtime status and execution logs of a Workflow application. | This parameter can be used by users with development capabilities to track the application's historical execution records. |
- | `sys.timestamp` | Number | The start time of each workflow execution. | |
+ | `sys.timestamp` | String | The start time of each workflow execution. | |
diff --git a/plugin-dev-en/0321-plugin-auto-publish-pr.mdx b/plugin-dev-en/0321-plugin-auto-publish-pr.mdx
index 294d67cf..5c26f920 100644
--- a/plugin-dev-en/0321-plugin-auto-publish-pr.mdx
+++ b/plugin-dev-en/0321-plugin-auto-publish-pr.mdx
@@ -136,7 +136,7 @@ Once set up, the workflow automatically handles these parameters:
cd $RUNNER_TEMP/bin
# Download CLI tool
- wget https://github.com/langgenius/dify-plugin-daemon/releases/latest/download/dify-plugin-linux-amd64
+ wget https://github.com/langgenius/dify-plugin-daemon/releases/download/0.0.6/dify-plugin-linux-amd64
chmod +x dify-plugin-linux-amd64
# Show download location and file
diff --git a/plugin-dev-ja/0321-plugin-auto-publish-pr.mdx b/plugin-dev-ja/0321-plugin-auto-publish-pr.mdx
index 0bd749b2..94bbcc46 100644
--- a/plugin-dev-ja/0321-plugin-auto-publish-pr.mdx
+++ b/plugin-dev-ja/0321-plugin-auto-publish-pr.mdx
@@ -135,7 +135,7 @@ dify-plugins/
cd $RUNNER_TEMP/bin
# Download CLI tool
- wget https://github.com/langgenius/dify-plugin-daemon/releases/latest/download/dify-plugin-linux-amd64
+ wget https://github.com/langgenius/dify-plugin-daemon/releases/download/0.0.6/dify-plugin-linux-amd64
chmod +x dify-plugin-linux-amd64
# Show download location and file
diff --git a/plugin-dev-zh/0321-plugin-auto-publish-pr.mdx b/plugin-dev-zh/0321-plugin-auto-publish-pr.mdx
index 25d4eb6a..ee398c7e 100644
--- a/plugin-dev-zh/0321-plugin-auto-publish-pr.mdx
+++ b/plugin-dev-zh/0321-plugin-auto-publish-pr.mdx
@@ -136,7 +136,7 @@ dify-plugins/
cd $RUNNER_TEMP/bin
# Download CLI tool
- wget https://github.com/langgenius/dify-plugin-daemon/releases/latest/download/dify-plugin-linux-amd64
+ wget https://github.com/langgenius/dify-plugin-daemon/releases/download/0.0.6/dify-plugin-linux-amd64
chmod +x dify-plugin-linux-amd64
# Show download location and file
diff --git a/versions/3-0-x/en/user-guide/introduction 2.mdx b/versions/3-0-x/en/user-guide/introduction 2.mdx
new file mode 100644
index 00000000..2c0dc3e5
--- /dev/null
+++ b/versions/3-0-x/en/user-guide/introduction 2.mdx
@@ -0,0 +1,33 @@
+---
+title: "Introduction"
+mode: "wide"
+---
+
+Dify is an open-source platform for building agentic workflows. It lets you define processes visually, connect your existing tools and data sources, and deploy AI applications that solve real problems.
+
+
+
+
+
+ Start shipping powerful apps in minutes
+
+
+ Core Dify building blocks explained
+
+
+ Deploy Dify on your own laptop / server
+
+
+ Trade notes with the community
+
+
+ What's changed over past releases
+
+
+ Example Dify use case walkthroughs
+
+
+
+
+ The name Dify comes from **D**o **I**t **F**or **Y**ou.
+
diff --git a/versions/3-0-x/ja/user-guide/introduction 2.mdx b/versions/3-0-x/ja/user-guide/introduction 2.mdx
new file mode 100644
index 00000000..1e2f693f
--- /dev/null
+++ b/versions/3-0-x/ja/user-guide/introduction 2.mdx
@@ -0,0 +1,35 @@
+---
+title: "はじめに"
+mode: "wide"
+---
+
+ ⚠️ このドキュメントはAIによって自動翻訳されています。不正確な部分がある場合は、[英語版](/en/use-dify/getting-started/introduction)を参照してください。
+
+Difyは、AIワークフローを構築するためのオープンソースプラットフォームです。ビジュアルキャンバス上でAIモデルを編成し、データソースを接続し、処理フローを定義することで、ドメイン知識を直接動作するソフトウェアに変換できます。
+
+
+
+
+
+ 数分で強力なアプリの構築を開始
+
+
+ Difyの核となる構成要素の説明
+
+
+ 自分のノートパソコン/サーバーにDifyをインストールするガイド
+
+
+ コミュニティとの情報交換
+
+
+ 過去のリリースでの変更内容
+
+
+ Difyのユースケース例のウォークスルー
+
+
+
+
+ Difyという名前は、**D**o **I**t **F**or **Y**ou(あなたのためにそれを行う)に由来しています。
+
diff --git a/versions/3-0-x/ja/user-guide/knowledge-base/api-documentation/external-knowledge-api 2.json b/versions/3-0-x/ja/user-guide/knowledge-base/api-documentation/external-knowledge-api 2.json
new file mode 100644
index 00000000..24430268
--- /dev/null
+++ b/versions/3-0-x/ja/user-guide/knowledge-base/api-documentation/external-knowledge-api 2.json
@@ -0,0 +1,192 @@
+{
+ "openapi": "3.0.1",
+ "info": { "title": "Dify-test", "description": "", "version": "1.0.0" },
+ "tags": [],
+ "paths": {
+ "/retrieval": {
+ "post": {
+ "summary": "知识召回 API",
+ "deprecated": false,
+ "description": "该 API 用于连接团队内独立维护的知识库,如需了解更多操作指引,请参考阅读[连接外部知识库](/zh-hans/user-guide/knowledge-base/knowledge-base-creation/connect-external-knowledge-base)。你可以在 Authorization HTTP 头部中使用 API-Key 来验证权限,认证逻辑由开发者在检索 API 中定义,如下所示:\n\n```text\nAuthorization: Bearer {API_KEY}\n```",
+ "tags": [],
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "knowledge_id": {
+ "type": "string",
+ "description": "你的知识库唯一 ID"
+ },
+ "query": { "type": "string", "description": "用户的提问" },
+ "retrival_setting": {
+ "type": "object",
+ "properties": {
+ "top_k": {
+ "type": "integer",
+ "description": "检索结果的最大数量"
+ },
+ "score_threshold": {
+ "type": "number",
+ "description": "结果与查询相关性的分数限制,范围 0~1",
+ "format": "float",
+ "minimum": 0,
+ "maximum": 1
+ }
+ },
+ "description": "知识库的检索参数",
+ "required": ["top_k", "score_threshold"]
+ }
+ },
+ "required": ["knowledge_id", "query", "retrival_setting"]
+ },
+ "example": {
+ "knowledge_id": "your-knowledge-id",
+ "query": "your question",
+ "retrival_setting": { "top_k": 2, "score_threshold": 0.5 }
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "如果操作成功,服务将发回 HTTP 200 响应。",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "object",
+ "properties": {
+ "records": {
+ "type": "object",
+ "properties": {
+ "content": {
+ "type": "string",
+ "description": "包含知识库中数据源的一段文本。"
+ },
+ "score": {
+ "type": "number",
+ "format": "float",
+ "description": "结果与查询相关性的分数,范围: 0~1"
+ },
+ "title": {
+ "type": "string",
+ "description": " 文档标题"
+ },
+ "metadata": {
+ "type": "string",
+ "description": "包含数据源中文档的元数据属性及其值。"
+ }
+ },
+ "title": "从知识库查询的记录列表",
+ "required": ["content", "score", "title"]
+ }
+ },
+ "required": ["records"]
+ },
+ "examples": {
+ "1": {
+ "summary": "Success",
+ "value": {
+ "records": [
+ {
+ "metadata": {
+ "path": "s3://dify/knowledge.txt",
+ "description": "dify 知识文档"
+ },
+ "score": 0.98,
+ "title": "knowledge.txt",
+ "content": "外部知识的文件"
+ },
+ {
+ "metadata": {
+ "path": "s3://dify/introduce.txt",
+ "description": "Dify 介绍"
+ },
+ "score": 0.66,
+ "title": "introduce.txt",
+ "content": "The Innovation Engine for GenAI Applications"
+ }
+ ]
+ }
+ }
+ }
+ }
+ },
+ "headers": {}
+ },
+ "403": {
+ "description": "请求被拒绝因为缺失访问权限。请检查你的权限并在此发起请求。",
+ "content": {
+ "application/json": {
+ "schema": {
+ "title": "",
+ "type": "object",
+ "properties": {
+ "error_code": {
+ "type": "integer",
+ "description": "错误码"
+ },
+ "error_msg": {
+ "type": "string",
+ "description": "API 异常描述"
+ }
+ },
+ "required": ["error_code", "error_msg"]
+ },
+ "examples": {
+ "1": {
+ "summary": "Erros",
+ "value": {
+ "error_code": 1001,
+ "error_msg": "无效的鉴权头格式,预期应为 'Bearer ' 格式。"
+ }
+ }
+ }
+ }
+ },
+ "headers": {}
+ },
+ "500": {
+ "description": "发生了内部服务器错误,请再次连接。",
+ "content": {
+ "application/json": {
+ "schema": {
+ "title": "",
+ "type": "object",
+ "properties": {
+ "error_code": {
+ "type": "integer",
+ "description": "错误码"
+ },
+ "error_msg": {
+ "type": "string",
+ "description": " API 异常描述"
+ }
+ },
+ "required": ["error_code", "error_msg"]
+ },
+ "examples": {
+ "1": {
+ "summary": "Erros",
+ "value": {
+ "error_code": 1001,
+ "error_msg": "Invalid Authorization header format. Expected 'Bearer ' format."
+ }
+ }
+ }
+ }
+ },
+ "headers": {}
+ }
+ },
+ "security": [{ "bearer": [] }]
+ }
+ }
+ },
+ "components": {
+ "schemas": {},
+ "securitySchemes": { "bearer": { "type": "http", "scheme": "bearer" } }
+ },
+ "servers": [{ "url": "your-endpoint", "description": "test-environment" }]
+}
diff --git a/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/history 2.mdx b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/history 2.mdx
new file mode 100644
index 00000000..ea2749b7
--- /dev/null
+++ b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/history 2.mdx
@@ -0,0 +1,8 @@
+---
+title: 実行履歴
+---
+
+
+「実行履歴」では、現在のワークフローのデバッグ履歴の実行結果およびログ情報を確認できます。
+
+
diff --git a/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/log 2.mdx b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/log 2.mdx
new file mode 100644
index 00000000..7879cf26
--- /dev/null
+++ b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/log 2.mdx
@@ -0,0 +1,12 @@
+---
+title: 対話/実行ログ
+---
+
+
+「ログを表示—詳細」をクリックすると、詳細情報、入力/出力、メタデータ情報などの実行概要を見ることができます。
+
+
+
+「ログを表示-追跡」をクリックすると、ワークフローの各ノードの入力/出力、トークン消費、実行時間などの完全な実行過程を見ることができます。
+
+
diff --git a/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/preview-and-run 2.mdx b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/preview-and-run 2.mdx
new file mode 100644
index 00000000..312367ef
--- /dev/null
+++ b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/preview-and-run 2.mdx
@@ -0,0 +1,68 @@
+---
+title: 機能紹介
+---
+
+ワークフローを作成した後、開発者はアプリケーションを公開する前に、各ノードが正常に動作しているかどうかを確認し、変数内のデータが正しく流れているかを検証するためにデバッグを行うことができます。
+
+Difyは完全なデバッグツールを提供し、問題の迅速な特定、変数の受け渡しの確認、詳細なログの表示をサポートします。
+
+本記事では、ワークフローアプリケーションのデバッグの全プロセスを紹介します。これには、単一ノードのデバッグ、変数チェック、実行履歴の確認、および全体テスト方法が含まれます。
+
+
+
+## ワークフローアプリケーションのデバッグ
+
+### ノードデバッグ
+
+ワークフロー内の特定のノードに問題が発生し、ワークフローアプリケーションが正常に実行できない場合、そのノードを単独でデバッグして問題を迅速に特定します。
+
+ノードの右上にあるデバッグボタンをクリックし、テストデータを入力して出力結果を確認します。デバッグの過程では、シンプルなテストデータから始めて、徐々にデータの複雑さを増やし、さまざまな入力状況でノードが正常に動作することを確認することをお勧めします。
+
+単一ノードのデバッグ方法の詳細については、[ノードのデバッグ](/ja-jp/guides/workflow/debug-and-preview/step-run)を参照してください。
+
+
+
+### ステップデバッグ
+
+ステップデバッグでは、上流ノードの変数に基づいて、下流ノードが変数内のデータを正しく処理し、期待される結果を出力しているかを確認します。
+
+詳しい使用方法については、変数チェックを参照してください。
+
+
+
+### ノードの実行記録の確認
+
+ノードを実行すると、詳細な実行記録が生成され、開発者の問題解決をサポートします。ノードの詳細ページの「前回の実行」をクリックすると、入力パラメータ、出力結果、実行時間、エラー情報などの実行ログを確認できます。詳細については[デバッグ履歴 - ノード実行記録](/ja-jp/guides/workflow/debug-and-preview/history-and-logs#アプリケーション実行履歴)を参照してください。
+
+
+
+### 全体フローのデバッグ
+
+すべてのノードのデバッグが完了したら、次にワークフロー全体を実行してエンドツーエンドの実行効果を検証します。
+
+プレビュー機能を使用してアプリケーションの最終ユーザーインターフェースを体験できます。このモードでは、入力フィールド、出力表示、インタラクションプロセスなどが完全に表示され、主にユーザー体験が期待通りかを検証するために使用されます。システムはプレビュー時に自動チェックを行い、すべてのノードが正しく設定され、接続が完全であることを確認します。
+
+
+
+#### チェックリスト
+
+システムはワークフロー設定を自動的にチェックし、ノード接続エラーや必須パラメータの欠落などの問題が見つかった場合、画面右上に通知を表示します。すべての問題を解決した後にのみアプリケーションを実行できます。
+
+
+
+ワークフローを実行する際には、右側のパネルで各ノードの実行状態(入力パラメータ、出力結果、実行ログを含む)を確認できます。ノード上のステータスインジケーター(緑色は成功を示す)を通じて、フローがどのノードで異常が発生して中断したかを素早く識別できます。
+
+正式リリース前に複数回のテストを行い、異なる種類や複雑さのデータを使用してワークフローの安定性を検証することをお勧めします。
+
+
+
+## アプリケーションの実行ログの確認
+
+アプリケーションを公開した後、開発者はログとモニタリング機能を通じて、ユーザーがアプリケーションを使用した際の対話記録やパフォーマンスデータを確認できます。この履歴記録は、ユーザーの行動パスの確認、アプリケーションのパフォーマンス変化の分析、問題の原因の振り返りに役立ちます。
+
+
+
+詳細については以下のドキュメントを参照してください:
+
+- [実行履歴とログ](/ja-jp/guides/workflow/debug-and-preview/history-and-logs)
+- [モニタリング](/ja-jp/guides/monitoring/README)
diff --git a/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/step-run 2.mdx b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/step-run 2.mdx
new file mode 100644
index 00000000..41d2d6c6
--- /dev/null
+++ b/versions/3-0-x/ja/user-guide/workflow/debug-and-preview/step-run 2.mdx
@@ -0,0 +1,12 @@
+---
+title: ステップ実行
+---
+
+
+ワークフローはノードのステップの実行をサポートしており、ステップを実行中に現在のノードの実行が期待通りかどうかを繰り返しテストすることができます。
+
+
+
+ステップテスト実行後、実行ステータス、入力/出力、メタデータ情報を確認することができます。
+
+
diff --git a/versions/3-0-x/zh/user-guide/introduction 2.mdx b/versions/3-0-x/zh/user-guide/introduction 2.mdx
new file mode 100644
index 00000000..6838ca75
--- /dev/null
+++ b/versions/3-0-x/zh/user-guide/introduction 2.mdx
@@ -0,0 +1,35 @@
+---
+title: "介绍"
+mode: "wide"
+---
+
+ ⚠️ 本文档由 AI 自动翻译。如有任何不准确之处,请参考[英文原版](/en/use-dify/getting-started/introduction)。
+
+Dify 是一个用于构建 AI 工作流的开源平台。通过在可视化画布上编排 AI 模型、连接数据源、定义处理流程,直接将你的领域知识转化为可运行的软件。
+
+
+
+
+
+ 数分钟内开始构建强大的应用
+
+
+ 核心 Dify 构建模块解释
+
+
+ 在你的笔记本电脑/服务器上安装 Dify
+
+
+ 与社区交流心得
+
+
+ 查看过往版本的更新内容
+
+
+ Dify 用例示例演练
+
+
+
+
+ Dify 这个名字来自 **D**o **I**t **F**or **Y**ou。
+
diff --git a/versions/3-2-x/en/user-guide/introduction 2.mdx b/versions/3-2-x/en/user-guide/introduction 2.mdx
new file mode 100644
index 00000000..2c0dc3e5
--- /dev/null
+++ b/versions/3-2-x/en/user-guide/introduction 2.mdx
@@ -0,0 +1,33 @@
+---
+title: "Introduction"
+mode: "wide"
+---
+
+Dify is an open-source platform for building agentic workflows. It lets you define processes visually, connect your existing tools and data sources, and deploy AI applications that solve real problems.
+
+
+
+
+
+ Start shipping powerful apps in minutes
+
+
+ Core Dify building blocks explained
+
+
+ Deploy Dify on your own laptop / server
+
+
+ Trade notes with the community
+
+
+ What's changed over past releases
+
+
+ Example Dify use case walkthroughs
+
+
+
+
+ The name Dify comes from **D**o **I**t **F**or **Y**ou.
+
diff --git a/versions/3-2-x/ja/user-guide/introduction 2.mdx b/versions/3-2-x/ja/user-guide/introduction 2.mdx
new file mode 100644
index 00000000..1e2f693f
--- /dev/null
+++ b/versions/3-2-x/ja/user-guide/introduction 2.mdx
@@ -0,0 +1,35 @@
+---
+title: "はじめに"
+mode: "wide"
+---
+
+ ⚠️ このドキュメントはAIによって自動翻訳されています。不正確な部分がある場合は、[英語版](/en/use-dify/getting-started/introduction)を参照してください。
+
+Difyは、AIワークフローを構築するためのオープンソースプラットフォームです。ビジュアルキャンバス上でAIモデルを編成し、データソースを接続し、処理フローを定義することで、ドメイン知識を直接動作するソフトウェアに変換できます。
+
+
+
+
+
+ 数分で強力なアプリの構築を開始
+
+
+ Difyの核となる構成要素の説明
+
+
+ 自分のノートパソコン/サーバーにDifyをインストールするガイド
+
+
+ コミュニティとの情報交換
+
+
+ 過去のリリースでの変更内容
+
+
+ Difyのユースケース例のウォークスルー
+
+
+
+
+ Difyという名前は、**D**o **I**t **F**or **Y**ou(あなたのためにそれを行う)に由来しています。
+
diff --git a/versions/3-2-x/ja/user-guide/workflow/debug-and-preview/variable-inspect 2.mdx b/versions/3-2-x/ja/user-guide/workflow/debug-and-preview/variable-inspect 2.mdx
new file mode 100644
index 00000000..6a662422
--- /dev/null
+++ b/versions/3-2-x/ja/user-guide/workflow/debug-and-preview/variable-inspect 2.mdx
@@ -0,0 +1,69 @@
+---
+title: 変数インスペクター
+---
+
+**変数インスペクター**は、ワークフローエディターの下部に配置されているパネルです。このパネルは、実行後の各ノードの入力データと出力データをキャッチし、開発者がワークフロー全体のデータフローにおける問題を素早く特定し、検証するのに役立ちます。
+
+## なぜ重要か
+
+[変数](/versions/3-0-x/ja-jp/user-guide/workflow/variables)はノード間の主要な接続点として機能し、動的なフロー制御を可能にします。ワークフローアプリケーションでは、変数はユーザーからの入力だけでなく、各ノードからの出力(LLMの応答、ツールの結果、会話のコンテキスト、環境パラメータなど)を保存します。
+
+変数インスペクターは、実行後に各ノードの入力パラメータと出力結果を自動的にキャッシュします。以下の主な機能を提供しています:
+
+- **リアルタイムの変数監視**:ノードが実行された直後にワークフローの入出力を含むすべての変数を捕捉して表示します。
+- **キャッシュされた変数の編集**:上流のノードを再実行せずに、ほとんどのタイプの変数を手動で変更することができます — 異なるシナリオをテストするのに最適です。
+- **異常データの追跡**:各変数の実際の値とタイプを表示し、フォーマットの問題や欠落フィールド、型の不一致を特定しやすくします。
+- **グローバル状態管理**:システム変数、環境変数、カスタム定義されたノード変数を含むすべての変数を1つのパネルに表示します。
+
+変数インスペクターを使用することで、任意のノードから最新の出力変数を簡単に確認・編集でき、変更が下流のノードにどのように影響するかを確認できます。
+
+## 使用方法
+
+変数インスペクターはキャンバスの下部に位置し、ワークフローが実行されるとリアルタイムで更新されます。
+
+ノードが実行されると、その出力変数がパネルに表示され、迅速に検査または編集が可能です。
+
+
+
+以下は、"外部データベースクエリ"アプリケーションにおける変数インスペクターの使用例です。このワークフローでは、LLMノードが自然言語入力を受け取り、それをSQLに変換し、リモートデータベースにクエリを送信してデータを取得します。
+
+> [こちら](https://assets-docs.dify.ai/2025/06/ac4da5ed5506f8a34aae7f0b88323f37.yml)をクリックしてDSLワークフローファイルをダウンロードできます。
+
+### 表示
+
+ノードが実行されると、その出力変数は自動的に変数インスペクターに表示されます。変数をクリックすると、詳細な内容を確認できます。
+
+
+
+### 編集
+
+変数の値をクリックして編集します。更新された値は、次回の実行時に下流のノードで使用されます。
+
+
+ ここで変数の値を編集しても、ノードの「最終実行」記録内のキャッシュデータには影響しません。
+
+
+**例:**
+
+
+
+ ユーザーが「すべてのユーザーデータを表示」と入力すると、LLMノードが`SELECT * FROM users;`を生成します。データベースはすべてのデータを正しく返します。
+
+
+
+ ユーザーが変数インスペクターでSQLを手動で`SELECT username FROM users;`に変更し、データベースノードのみを再実行します。システムはユーザー名のリストを返します。
+
+ これにより、ワークフロー全体を再起動することなく、下流のデータベースノードだけを再実行して新しいデータをテストすることができます。
+
+
+
+
+### 元に戻す
+
+変数編集フィールドの右上にある元に戻すアイコンをクリックすると、変数を元の値に復元できます。
+
+
+
+### すべての変数をリセット
+
+左上の**すべてリセット**をクリックすると、キャッシュされたすべての変数を一度にクリアできます。
diff --git a/versions/3-3-x/en/user-guide/introduction 2.mdx b/versions/3-3-x/en/user-guide/introduction 2.mdx
new file mode 100644
index 00000000..2c0dc3e5
--- /dev/null
+++ b/versions/3-3-x/en/user-guide/introduction 2.mdx
@@ -0,0 +1,33 @@
+---
+title: "Introduction"
+mode: "wide"
+---
+
+Dify is an open-source platform for building agentic workflows. It lets you define processes visually, connect your existing tools and data sources, and deploy AI applications that solve real problems.
+
+
+
+
+
+ Start shipping powerful apps in minutes
+
+
+ Core Dify building blocks explained
+
+
+ Deploy Dify on your own laptop / server
+
+
+ Trade notes with the community
+
+
+ What's changed over past releases
+
+
+ Example Dify use case walkthroughs
+
+
+
+
+ The name Dify comes from **D**o **I**t **F**or **Y**ou.
+