docs-i18n: chunk raw doc translation (#62969)

Merged via squash.

Prepared head SHA: 6a16d66486
Co-authored-by: hxy91819 <8814856+hxy91819@users.noreply.github.com>
Co-authored-by: hxy91819 <8814856+hxy91819@users.noreply.github.com>
Reviewed-by: @hxy91819
This commit is contained in:
Mason
2026-04-09 23:22:16 +08:00
committed by GitHub
parent 635bb35b68
commit 06dea262c4
12 changed files with 2319 additions and 149 deletions

View File

@@ -0,0 +1,823 @@
package main
import (
"context"
"fmt"
"log"
"os"
"regexp"
"slices"
"strconv"
"strings"
)
const defaultDocChunkMaxBytes = 12000
const defaultDocChunkPromptBudget = 15000
var (
docsFenceRE = regexp.MustCompile(`^\s*(` + "```" + `|~~~)`)
docsComponentTagRE = regexp.MustCompile(`<(/?)([A-Z][A-Za-z0-9]*)\b[^>]*?/?>`)
)
var docsProtocolTokens = []string{
frontmatterTagStart,
frontmatterTagEnd,
bodyTagStart,
bodyTagEnd,
"[[[FM_",
}
type docChunkStructure struct {
fenceCount int
tagCounts map[string]int
}
type docChunkSplitPlan struct {
groups [][]string
reason string
}
func translateDocBodyChunked(ctx context.Context, translator docsTranslator, relPath, body, srcLang, tgtLang string) (string, error) {
if strings.TrimSpace(body) == "" {
return body, nil
}
blocks := splitDocBodyIntoBlocks(body)
groups := groupDocBlocks(blocks, docsI18nDocChunkMaxBytes())
logDocChunkPlan(relPath, blocks, groups)
out := strings.Builder{}
for index, group := range groups {
chunkID := fmt.Sprintf("%s.chunk-%03d", relPath, index+1)
translated, err := translateDocBlockGroup(ctx, translator, chunkID, group, srcLang, tgtLang)
if err != nil {
return "", err
}
out.WriteString(translated)
}
return out.String(), nil
}
func translateDocBlockGroup(ctx context.Context, translator docsTranslator, chunkID string, blocks []string, srcLang, tgtLang string) (string, error) {
source := strings.Join(blocks, "")
if strings.TrimSpace(source) == "" {
return source, nil
}
if plan, ok := planDocChunkSplit(blocks, docsI18nDocChunkMaxBytes(), docsI18nDocChunkPromptBudget()); ok {
logDocChunkPlanSplit(chunkID, plan, source)
return translatePlannedDocChunkGroups(ctx, translator, chunkID, plan.groups, srcLang, tgtLang)
}
normalizedSource, commonIndent := stripCommonIndent(source)
log.Printf("docs-i18n: chunk start %s blocks=%d bytes=%d", chunkID, len(blocks), len(source))
translated, err := translator.TranslateRaw(ctx, normalizedSource, srcLang, tgtLang)
if err == nil {
translated = sanitizeDocChunkProtocolWrappers(source, translated)
translated = reapplyCommonIndent(translated, commonIndent)
if validationErr := validateDocChunkTranslation(source, translated); validationErr == nil {
log.Printf("docs-i18n: chunk done %s out_bytes=%d", chunkID, len(translated))
return translated, nil
} else {
err = validationErr
}
}
if len(blocks) <= 1 {
if fallback, fallbackErr := translateDocLeafBlock(ctx, translator, chunkID, source, srcLang, tgtLang); fallbackErr == nil {
return fallback, nil
}
if plan, ok := planSingletonDocChunkRetry(source, docsI18nDocChunkMaxBytes(), docsI18nDocChunkPromptBudget()); ok {
logDocChunkPlanSplit(chunkID, plan, source)
return translatePlannedDocChunkGroups(ctx, translator, chunkID, plan.groups, srcLang, tgtLang)
}
return "", fmt.Errorf("%s: %w", chunkID, err)
}
if plan, ok := planDocChunkSplit(blocks, docsI18nDocChunkMaxBytes(), docsI18nDocChunkPromptBudget()); ok {
logDocChunkSplit(chunkID, len(blocks), err)
return translatePlannedDocChunkGroups(ctx, translator, chunkID, plan.groups, srcLang, tgtLang)
}
if plan, ok := splitDocChunkBlocksMidpointSimple(blocks); ok {
logDocChunkSplit(chunkID, len(blocks), err)
return translatePlannedDocChunkGroups(ctx, translator, chunkID, plan.groups, srcLang, tgtLang)
}
return "", fmt.Errorf("%s: %w", chunkID, err)
}
func translateDocLeafBlock(ctx context.Context, translator docsTranslator, chunkID, source, srcLang, tgtLang string) (string, error) {
sourceStructure := summarizeDocChunkStructure(source)
if sourceStructure.fenceCount != 0 {
return "", fmt.Errorf("%s: raw leaf fallback not applicable", chunkID)
}
normalizedSource, commonIndent := stripCommonIndent(source)
maskedSource, placeholders := maskDocComponentTags(normalizedSource)
translated, err := translator.Translate(ctx, maskedSource, srcLang, tgtLang)
if err != nil {
return "", err
}
translated, err = restoreDocComponentTags(translated, placeholders)
if err != nil {
return "", err
}
translated = sanitizeDocChunkProtocolWrappers(source, translated)
translated = reapplyCommonIndent(translated, commonIndent)
if validationErr := validateDocChunkTranslation(source, translated); validationErr != nil {
return "", validationErr
}
log.Printf("docs-i18n: chunk leaf-fallback done %s out_bytes=%d", chunkID, len(translated))
return translated, nil
}
func splitDocBodyIntoBlocks(body string) []string {
if body == "" {
return nil
}
lines := strings.SplitAfter(body, "\n")
blocks := make([]string, 0, len(lines))
var current strings.Builder
fenceDelimiter := ""
for _, line := range lines {
current.WriteString(line)
fenceDelimiter, _ = updateFenceDelimiter(fenceDelimiter, line)
inFence := fenceDelimiter != ""
if !inFence && strings.TrimSpace(line) == "" {
blocks = append(blocks, current.String())
current.Reset()
}
}
if current.Len() > 0 {
blocks = append(blocks, current.String())
}
if len(blocks) == 0 {
return []string{body}
}
return blocks
}
func groupDocBlocks(blocks []string, maxBytes int) [][]string {
if len(blocks) == 0 {
return nil
}
if maxBytes <= 0 {
maxBytes = defaultDocChunkMaxBytes
}
groups := make([][]string, 0, len(blocks))
current := make([]string, 0, 8)
currentBytes := 0
flush := func() {
if len(current) == 0 {
return
}
groups = append(groups, current)
current = make([]string, 0, 8)
currentBytes = 0
}
for _, block := range blocks {
blockBytes := len(block)
if len(current) > 0 && currentBytes+blockBytes > maxBytes {
flush()
}
if blockBytes > maxBytes {
groups = append(groups, []string{block})
continue
}
current = append(current, block)
currentBytes += blockBytes
}
flush()
return groups
}
func validateDocChunkTranslation(source, translated string) error {
if hasUnexpectedTopLevelProtocolWrapper(source, translated) {
return fmt.Errorf("protocol token leaked: top-level wrapper")
}
sourceLower := strings.ToLower(source)
translatedLower := strings.ToLower(translated)
for _, token := range docsProtocolTokens {
tokenLower := strings.ToLower(token)
if strings.Contains(sourceLower, tokenLower) {
continue
}
if strings.Contains(translatedLower, tokenLower) {
return fmt.Errorf("protocol token leaked: %s", token)
}
}
sourceStructure := summarizeDocChunkStructure(source)
translatedStructure := summarizeDocChunkStructure(translated)
if sourceStructure.fenceCount != translatedStructure.fenceCount {
return fmt.Errorf("code fence mismatch: source=%d translated=%d", sourceStructure.fenceCount, translatedStructure.fenceCount)
}
if !slices.Equal(sortedKeys(sourceStructure.tagCounts), sortedKeys(translatedStructure.tagCounts)) {
return fmt.Errorf("component tag set mismatch")
}
for _, key := range sortedKeys(sourceStructure.tagCounts) {
if sourceStructure.tagCounts[key] != translatedStructure.tagCounts[key] {
return fmt.Errorf("component tag mismatch for %s: source=%d translated=%d", key, sourceStructure.tagCounts[key], translatedStructure.tagCounts[key])
}
}
return nil
}
func sanitizeDocChunkProtocolWrappers(source, translated string) string {
if !containsProtocolWrapperToken(translated) {
return translated
}
trimmedTranslated := strings.TrimSpace(translated)
if !hasUnexpectedTopLevelProtocolWrapper(source, trimmedTranslated) {
return translated
}
if !hasAmbiguousTaggedBodyClose(source, trimmedTranslated) {
_, body, err := parseTaggedDocument(trimmedTranslated)
if err == nil {
if strings.TrimSpace(body) == "" {
return translated
}
return body
}
}
body, ok := stripBodyOnlyWrapper(trimmedTranslated)
if !ok || strings.TrimSpace(body) == "" {
return translated
}
return body
}
func stripBodyOnlyWrapper(text string) (string, bool) {
lower := strings.ToLower(text)
bodyStartLower := strings.ToLower(bodyTagStart)
bodyEndLower := strings.ToLower(bodyTagEnd)
if !strings.HasPrefix(lower, bodyStartLower) || !strings.HasSuffix(lower, bodyEndLower) {
return "", false
}
body := text[len(bodyTagStart) : len(text)-len(bodyTagEnd)]
bodyLower := lower[len(bodyTagStart) : len(lower)-len(bodyTagEnd)]
if strings.Contains(bodyLower, bodyStartLower) || strings.Contains(bodyLower, bodyEndLower) {
return "", false
}
return trimTagNewlines(body), true
}
func hasAmbiguousTaggedBodyClose(source, translated string) bool {
sourceLower := strings.ToLower(source)
if !strings.Contains(sourceLower, strings.ToLower(bodyTagStart)) && !strings.Contains(sourceLower, strings.ToLower(bodyTagEnd)) {
return false
}
translatedLower := strings.ToLower(translated)
if !strings.Contains(translatedLower, strings.ToLower(frontmatterTagStart)) {
return false
}
return strings.Count(translatedLower, strings.ToLower(bodyTagEnd)) == 1
}
func maskDocComponentTags(text string) (string, []string) {
placeholders := make([]string, 0, 4)
masked := docsComponentTagRE.ReplaceAllStringFunc(text, func(match string) string {
placeholder := fmt.Sprintf("__OC_DOC_TAG_%03d__", len(placeholders))
placeholders = append(placeholders, match)
return placeholder
})
return masked, placeholders
}
func restoreDocComponentTags(text string, placeholders []string) (string, error) {
restored := text
for index, original := range placeholders {
placeholder := fmt.Sprintf("__OC_DOC_TAG_%03d__", index)
if !strings.Contains(restored, placeholder) {
return "", fmt.Errorf("component tag placeholder missing: %s", placeholder)
}
restored = strings.ReplaceAll(restored, placeholder, original)
}
return restored, nil
}
func logDocChunkSplit(chunkID string, blockCount int, err error) {
if docsI18nVerboseLogs() || blockCount >= 16 {
log.Printf("docs-i18n: chunk split %s blocks=%d err=%v", chunkID, blockCount, err)
}
}
func logDocChunkPlanSplit(chunkID string, plan docChunkSplitPlan, source string) {
if plan.reason == "" {
plan.reason = "unknown"
}
log.Printf("docs-i18n: chunk pre-split %s reason=%s groups=%d bytes=%d", chunkID, plan.reason, len(plan.groups), len(source))
}
func summarizeDocChunkStructure(text string) docChunkStructure {
counts := map[string]int{}
lines := strings.Split(text, "\n")
fenceDelimiter := ""
for _, line := range lines {
var toggled bool
fenceDelimiter, toggled = updateFenceDelimiter(fenceDelimiter, line)
if toggled {
counts["__fence_toggle__"]++
}
for _, match := range docsComponentTagRE.FindAllStringSubmatch(line, -1) {
if len(match) < 3 {
continue
}
fullToken := match[0]
tagName := match[2]
direction := "open"
if match[1] == "/" {
direction = "close"
}
if strings.HasSuffix(fullToken, "/>") {
direction = "self"
}
counts[tagName+":"+direction]++
}
}
return docChunkStructure{
fenceCount: counts["__fence_toggle__"],
tagCounts: countsWithoutFence(counts),
}
}
func countsWithoutFence(counts map[string]int) map[string]int {
filtered := map[string]int{}
for key, value := range counts {
if key == "__fence_toggle__" {
continue
}
filtered[key] = value
}
return filtered
}
func sortedKeys(counts map[string]int) []string {
keys := make([]string, 0, len(counts))
for key := range counts {
keys = append(keys, key)
}
slices.Sort(keys)
return keys
}
func updateFenceDelimiter(current, line string) (string, bool) {
delimiter := leadingFenceDelimiter(line)
if delimiter == "" {
return current, false
}
if current == "" {
return delimiter, true
}
if delimiter[0] == current[0] && len(delimiter) >= len(current) && isClosingFenceLine(line, delimiter) {
return "", true
}
return current, false
}
func leadingFenceDelimiter(line string) string {
trimmed := strings.TrimLeft(line, " \t")
if len(trimmed) < 3 {
return ""
}
switch trimmed[0] {
case '`', '~':
default:
return ""
}
marker := trimmed[0]
index := 0
for index < len(trimmed) && trimmed[index] == marker {
index++
}
if index < 3 {
return ""
}
return trimmed[:index]
}
func isClosingFenceLine(line, delimiter string) bool {
trimmed := strings.TrimLeft(line, " \t")
if !strings.HasPrefix(trimmed, delimiter) {
return false
}
return strings.TrimSpace(trimmed[len(delimiter):]) == ""
}
func hasUnexpectedTopLevelProtocolWrapper(source, translated string) bool {
sourceTrimmed := strings.ToLower(strings.TrimSpace(source))
translatedTrimmed := strings.ToLower(strings.TrimSpace(translated))
checks := []struct {
token string
match func(string) bool
}{
{token: frontmatterTagStart, match: func(text string) bool { return strings.HasPrefix(text, strings.ToLower(frontmatterTagStart)) }},
{token: bodyTagStart, match: func(text string) bool { return strings.HasPrefix(text, strings.ToLower(bodyTagStart)) }},
{token: frontmatterTagEnd, match: func(text string) bool { return strings.HasSuffix(text, strings.ToLower(frontmatterTagEnd)) }},
{token: bodyTagEnd, match: func(text string) bool { return strings.HasSuffix(text, strings.ToLower(bodyTagEnd)) }},
}
for _, check := range checks {
if check.match(translatedTrimmed) && !check.match(sourceTrimmed) {
return true
}
}
return false
}
func containsProtocolWrapperToken(text string) bool {
lower := strings.ToLower(text)
return strings.Contains(lower, strings.ToLower(bodyTagStart)) || strings.Contains(lower, strings.ToLower(frontmatterTagStart))
}
func translatePlannedDocChunkGroups(ctx context.Context, translator docsTranslator, chunkID string, groups [][]string, srcLang, tgtLang string) (string, error) {
var out strings.Builder
for index, group := range groups {
translated, err := translateDocBlockGroup(ctx, translator, fmt.Sprintf("%s.%02d", chunkID, index+1), group, srcLang, tgtLang)
if err != nil {
return "", err
}
out.WriteString(translated)
}
return out.String(), nil
}
func planDocChunkSplit(blocks []string, maxBytes, promptBudget int) (docChunkSplitPlan, bool) {
if len(blocks) == 0 {
return docChunkSplitPlan{}, false
}
source := strings.Join(blocks, "")
if strings.TrimSpace(source) == "" {
return docChunkSplitPlan{}, false
}
normalizedSource, _ := stripCommonIndent(source)
estimatedPromptCost := estimateDocPromptCost(normalizedSource)
if len(blocks) > 1 && promptBudget > 0 && estimatedPromptCost > promptBudget {
return splitDocChunkBlocksMidpoint(blocks, estimatedPromptCost, promptBudget)
}
if len(blocks) == 1 {
return planSingletonDocChunk(blocks[0], maxBytes, promptBudget)
}
return docChunkSplitPlan{}, false
}
func splitDocChunkBlocksMidpoint(blocks []string, estimatedPromptCost, promptBudget int) (docChunkSplitPlan, bool) {
if len(blocks) <= 1 {
return docChunkSplitPlan{}, false
}
mid := len(blocks) / 2
if mid <= 0 || mid >= len(blocks) {
return docChunkSplitPlan{}, false
}
return docChunkSplitPlan{
groups: [][]string{blocks[:mid], blocks[mid:]},
reason: fmt.Sprintf("prompt-budget:%d>%d", estimatedPromptCost, promptBudget),
}, true
}
func splitDocChunkBlocksMidpointSimple(blocks []string) (docChunkSplitPlan, bool) {
if len(blocks) <= 1 {
return docChunkSplitPlan{}, false
}
mid := len(blocks) / 2
if mid <= 0 || mid >= len(blocks) {
return docChunkSplitPlan{}, false
}
return docChunkSplitPlan{
groups: [][]string{blocks[:mid], blocks[mid:]},
reason: "retry-midpoint",
}, true
}
func planSingletonDocChunk(block string, maxBytes, promptBudget int) (docChunkSplitPlan, bool) {
normalizedBlock, _ := stripCommonIndent(block)
estimatedPromptCost := estimateDocPromptCost(normalizedBlock)
overBytes := maxBytes > 0 && len(block) > maxBytes
overPrompt := promptBudget > 0 && estimatedPromptCost > promptBudget
if !overBytes && !overPrompt {
return docChunkSplitPlan{}, false
}
return planSingletonDocChunkWithMode(block, maxBytes, promptBudget, false)
}
func planSingletonDocChunkRetry(block string, maxBytes, promptBudget int) (docChunkSplitPlan, bool) {
return planSingletonDocChunkWithMode(block, maxBytes, promptBudget, true)
}
func planSingletonDocChunkWithMode(block string, maxBytes, promptBudget int, force bool) (docChunkSplitPlan, bool) {
if sections := splitDocBlockSections(block); len(sections) > 1 {
if groups := wrapDocChunkSections(sections); len(groups) > 1 {
reason := "singleton-structural"
if force {
reason = "singleton-retry-structural"
}
return docChunkSplitPlan{
groups: groups,
reason: reason,
}, true
}
}
if groups, ok := splitPureFencedDocSectionWithMode(block, maxBytes, promptBudget, force); ok {
reason := "singleton-fence"
if force {
reason = "singleton-retry-fence"
}
return docChunkSplitPlan{
groups: groups,
reason: reason,
}, true
}
if groups, ok := splitPlainDocSectionWithMode(block, maxBytes, promptBudget, force); ok {
reason := "singleton-lines"
if force {
reason = "singleton-retry-lines"
}
return docChunkSplitPlan{
groups: groups,
reason: reason,
}, true
}
return docChunkSplitPlan{}, false
}
func wrapDocChunkSections(sections []string) [][]string {
groups := make([][]string, 0, len(sections))
for _, section := range sections {
if strings.TrimSpace(section) == "" {
continue
}
groups = append(groups, []string{section})
}
return groups
}
func splitDocBlockSections(block string) []string {
lines := strings.SplitAfter(block, "\n")
if len(lines) == 0 {
return nil
}
sections := make([]string, 0, len(lines))
var current strings.Builder
fenceDelimiter := ""
for _, line := range lines {
lineDelimiter := leadingFenceDelimiter(line)
if fenceDelimiter == "" && lineDelimiter != "" {
if current.Len() > 0 {
sections = append(sections, current.String())
current.Reset()
}
current.WriteString(line)
fenceDelimiter = lineDelimiter
continue
}
current.WriteString(line)
if fenceDelimiter != "" {
if lineDelimiter != "" && lineDelimiter[0] == fenceDelimiter[0] && len(lineDelimiter) >= len(fenceDelimiter) && isClosingFenceLine(line, fenceDelimiter) {
sections = append(sections, current.String())
current.Reset()
fenceDelimiter = ""
}
continue
}
if strings.TrimSpace(line) == "" {
sections = append(sections, current.String())
current.Reset()
}
}
if current.Len() > 0 {
sections = append(sections, current.String())
}
if len(sections) <= 1 {
return nil
}
return sections
}
func splitPureFencedDocSection(block string, maxBytes, promptBudget int) ([][]string, bool) {
return splitPureFencedDocSectionWithMode(block, maxBytes, promptBudget, false)
}
func splitPureFencedDocSectionWithMode(block string, maxBytes, promptBudget int, force bool) ([][]string, bool) {
lines := strings.SplitAfter(block, "\n")
if len(lines) < 2 {
return nil, false
}
openingIndex := firstNonEmptyLineIndex(lines)
closingIndex := lastNonEmptyLineIndex(lines)
if openingIndex == -1 || closingIndex <= openingIndex {
return nil, false
}
opening := lines[openingIndex]
delimiter := leadingFenceDelimiter(opening)
if delimiter == "" || !isClosingFenceLine(lines[closingIndex], delimiter) {
return nil, false
}
prefix := strings.Join(lines[:openingIndex], "")
suffix := strings.Join(lines[closingIndex+1:], "")
if strings.TrimSpace(prefix) != "" || strings.TrimSpace(suffix) != "" {
return nil, false
}
closing := lines[closingIndex]
inner := strings.Join(lines[openingIndex+1:closingIndex], "")
groups, ok := splitPlainDocSectionWithMode(inner, maxBytes-len(opening)-len(closing), promptBudget, force)
if !ok {
return nil, false
}
for index, group := range groups {
joined := strings.Join(group, "")
groups[index] = []string{opening + joined + closing}
}
return groups, true
}
func splitPlainDocSection(text string, maxBytes, promptBudget int) ([][]string, bool) {
return splitPlainDocSectionWithMode(text, maxBytes, promptBudget, false)
}
func splitPlainDocSectionWithMode(text string, maxBytes, promptBudget int, force bool) ([][]string, bool) {
if maxBytes <= 0 {
maxBytes = len(text)
}
if promptBudget <= 0 {
promptBudget = defaultDocChunkPromptBudget
}
lines := strings.SplitAfter(text, "\n")
if len(lines) <= 1 {
return nil, false
}
groups := make([][]string, 0, len(lines))
var current strings.Builder
currentBytes := 0
currentPrompt := 0
for _, line := range lines {
linePrompt := estimateDocPromptCost(line)
if len(line) > maxBytes || linePrompt > promptBudget {
return nil, false
}
if currentBytes > 0 && (currentBytes+len(line) > maxBytes || currentPrompt+linePrompt > promptBudget) {
groups = append(groups, []string{current.String()})
current.Reset()
currentBytes = 0
currentPrompt = 0
}
current.WriteString(line)
currentBytes += len(line)
currentPrompt += linePrompt
}
if current.Len() > 0 {
groups = append(groups, []string{current.String()})
}
if len(groups) <= 1 {
if !force {
return nil, false
}
return splitPlainDocSectionMidpoint(lines)
}
return groups, true
}
func splitPlainDocSectionMidpoint(lines []string) ([][]string, bool) {
if len(lines) <= 1 {
return nil, false
}
mid := len(lines) / 2
if mid <= 0 || mid >= len(lines) {
return nil, false
}
left := strings.Join(lines[:mid], "")
right := strings.Join(lines[mid:], "")
if strings.TrimSpace(left) == "" || strings.TrimSpace(right) == "" {
return nil, false
}
return [][]string{{left}, {right}}, true
}
func firstNonEmptyLineIndex(lines []string) int {
for index, line := range lines {
if strings.TrimSpace(line) != "" {
return index
}
}
return -1
}
func lastNonEmptyLineIndex(lines []string) int {
for index := len(lines) - 1; index >= 0; index-- {
if strings.TrimSpace(lines[index]) != "" {
return index
}
}
return -1
}
func docsI18nDocChunkMaxBytes() int {
value := strings.TrimSpace(os.Getenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES"))
if value == "" {
return defaultDocChunkMaxBytes
}
parsed, err := strconv.Atoi(value)
if err != nil || parsed <= 0 {
return defaultDocChunkMaxBytes
}
return parsed
}
func docsI18nDocChunkPromptBudget() int {
value := strings.TrimSpace(os.Getenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_PROMPT_BUDGET"))
if value == "" {
return defaultDocChunkPromptBudget
}
parsed, err := strconv.Atoi(value)
if err != nil || parsed <= 0 {
return defaultDocChunkPromptBudget
}
return parsed
}
func estimateDocPromptCost(text string) int {
cost := len(text)
cost += strings.Count(text, "`") * 6
cost += strings.Count(text, "|") * 4
cost += strings.Count(text, "{") * 4
cost += strings.Count(text, "}") * 4
cost += strings.Count(text, "[") * 4
cost += strings.Count(text, "]") * 4
cost += strings.Count(text, ":") * 2
cost += strings.Count(text, "<") * 4
cost += strings.Count(text, ">") * 4
return cost
}
func stripCommonIndent(text string) (string, string) {
lines := strings.SplitAfter(text, "\n")
common := ""
for _, line := range lines {
trimmed := strings.TrimRight(line, "\r\n")
if strings.TrimSpace(trimmed) == "" {
continue
}
indent := leadingIndent(trimmed)
if common == "" {
common = indent
continue
}
common = commonIndentPrefix(common, indent)
if common == "" {
return text, ""
}
}
if common == "" {
return text, ""
}
var out strings.Builder
for _, line := range lines {
trimmed := strings.TrimRight(line, "\r\n")
if strings.TrimSpace(trimmed) == "" {
out.WriteString(line)
continue
}
if strings.HasPrefix(line, common) {
out.WriteString(strings.TrimPrefix(line, common))
continue
}
out.WriteString(line)
}
return out.String(), common
}
func reapplyCommonIndent(text, indent string) string {
if indent == "" || text == "" {
return text
}
lines := strings.SplitAfter(text, "\n")
var out strings.Builder
for _, line := range lines {
trimmed := strings.TrimRight(line, "\r\n")
if strings.TrimSpace(trimmed) == "" {
out.WriteString(line)
continue
}
out.WriteString(indent)
out.WriteString(line)
}
return out.String()
}
func leadingIndent(line string) string {
index := 0
for index < len(line) {
if line[index] != ' ' && line[index] != '\t' {
break
}
index++
}
return line[:index]
}
func commonIndentPrefix(a, b string) string {
limit := len(a)
if len(b) < limit {
limit = len(b)
}
index := 0
for index < limit && a[index] == b[index] {
index++
}
return a[:index]
}

View File

@@ -3,6 +3,7 @@ package main
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"strings"
@@ -47,28 +48,18 @@ func processFileDoc(ctx context.Context, translator docsTranslator, docsRoot, fi
return false, "", fmt.Errorf("frontmatter parse failed for %s: %w", relPath, err)
}
}
frontTemplate, markers := buildFrontmatterTemplate(frontData)
taggedInput := formatTaggedDocument(frontTemplate, sourceBody)
translatedDoc, err := translator.TranslateRaw(ctx, taggedInput, srcLang, tgtLang)
if err != nil {
return false, "", fmt.Errorf("translate failed (%s): %w", relPath, err)
}
translatedFront, translatedBody, err := parseTaggedDocument(translatedDoc)
if err != nil {
return false, "", fmt.Errorf("tagged output invalid for %s: %w", relPath, err)
}
if sourceFront != "" && strings.TrimSpace(translatedFront) == "" {
return false, "", fmt.Errorf("translation removed frontmatter for %s", relPath)
}
if err := applyFrontmatterTranslations(frontData, markers, translatedFront); err != nil {
docTM := &TranslationMemory{entries: map[string]TMEntry{}}
if err := translateFrontMatter(ctx, translator, docTM, frontData, relPath, srcLang, tgtLang); err != nil {
return false, "", fmt.Errorf("frontmatter translation failed for %s: %w", relPath, err)
}
updatedFront, err := encodeFrontMatter(frontData, relPath, content)
if err != nil {
return false, "", err
}
translatedBody, err := translateDocBodyChunked(ctx, translator, relPath, sourceBody, srcLang, tgtLang)
if err != nil {
return false, "", fmt.Errorf("body translate failed for %s: %w", relPath, err)
}
if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
return false, "", err
@@ -100,18 +91,12 @@ func parseTaggedDocument(text string) (string, string, error) {
}
bodyStart += frontEnd + len(bodyTagStart)
body := ""
suffix := ""
if bodyEnd := strings.Index(text[bodyStart:], bodyTagEnd); bodyEnd != -1 {
bodyEnd += bodyStart
body = trimTagNewlines(text[bodyStart:bodyEnd])
suffix = strings.TrimSpace(text[bodyEnd+len(bodyTagEnd):])
} else {
// Some model replies omit the final closing tag but otherwise return a
// valid document. Treat EOF as the end of <body> so doc retries do not
// burn through the whole workflow on a recoverable formatting slip.
body = trimTagNewlines(text[bodyStart:])
bodyEnd := findTaggedBodyEnd(text, bodyStart)
if bodyEnd == -1 {
return "", "", fmt.Errorf("missing %s", bodyTagEnd)
}
body := trimTagNewlines(text[bodyStart:bodyEnd])
suffix := strings.TrimSpace(text[bodyEnd+len(bodyTagEnd):])
prefix := strings.TrimSpace(text[:frontStart-len(frontmatterTagStart)])
if prefix != "" || suffix != "" {
@@ -122,107 +107,37 @@ func parseTaggedDocument(text string) (string, string, error) {
return frontMatter, body, nil
}
func findTaggedBodyEnd(text string, bodyStart int) int {
if bodyStart < 0 || bodyStart > len(text) {
return -1
}
search := text[bodyStart:]
candidate := -1
offset := 0
for {
index := strings.Index(search[offset:], bodyTagEnd)
if index == -1 {
return candidate
}
index += offset
absolute := bodyStart + index
suffix := strings.TrimSpace(text[absolute+len(bodyTagEnd):])
if suffix == "" {
candidate = absolute
}
offset = index + len(bodyTagEnd)
if offset >= len(search) {
return candidate
}
}
}
func trimTagNewlines(value string) string {
value = strings.TrimPrefix(value, "\n")
value = strings.TrimSuffix(value, "\n")
return value
}
type frontmatterMarker struct {
Field string
Index int
Start string
End string
}
func buildFrontmatterTemplate(data map[string]any) (string, []frontmatterMarker) {
if len(data) == 0 {
return "", nil
}
markers := []frontmatterMarker{}
lines := []string{}
if summary, ok := data["summary"].(string); ok {
start, end := markerPair("SUMMARY", 0)
markers = append(markers, frontmatterMarker{Field: "summary", Index: 0, Start: start, End: end})
lines = append(lines, fmt.Sprintf("summary: %s%s%s", start, summary, end))
}
if title, ok := data["title"].(string); ok {
start, end := markerPair("TITLE", 0)
markers = append(markers, frontmatterMarker{Field: "title", Index: 0, Start: start, End: end})
lines = append(lines, fmt.Sprintf("title: %s%s%s", start, title, end))
}
if readWhen, ok := data["read_when"].([]any); ok {
lines = append(lines, "read_when:")
for idx, item := range readWhen {
textValue, ok := item.(string)
if !ok {
lines = append(lines, fmt.Sprintf(" - %v", item))
continue
}
start, end := markerPair("READ_WHEN", idx)
markers = append(markers, frontmatterMarker{Field: "read_when", Index: idx, Start: start, End: end})
lines = append(lines, fmt.Sprintf(" - %s%s%s", start, textValue, end))
}
}
return strings.Join(lines, "\n"), markers
}
func markerPair(field string, index int) (string, string) {
return fmt.Sprintf("[[[FM_%s_%d_START]]]", field, index), fmt.Sprintf("[[[FM_%s_%d_END]]]", field, index)
}
func applyFrontmatterTranslations(data map[string]any, markers []frontmatterMarker, translatedFront string) error {
if len(markers) == 0 {
return nil
}
for _, marker := range markers {
value, err := extractMarkerValue(translatedFront, marker.Start, marker.End)
if err != nil {
return err
}
value = strings.TrimSpace(value)
switch marker.Field {
case "summary":
data["summary"] = value
case "title":
data["title"] = value
case "read_when":
data["read_when"] = setReadWhenValue(data["read_when"], marker.Index, value)
}
}
return nil
}
func extractMarkerValue(text, start, end string) (string, error) {
startIndex := strings.Index(text, start)
if startIndex == -1 {
return "", fmt.Errorf("missing marker %s", start)
}
startIndex += len(start)
endIndex := strings.Index(text[startIndex:], end)
if endIndex == -1 {
return "", fmt.Errorf("missing marker %s", end)
}
endIndex += startIndex
return text[startIndex:endIndex], nil
}
func setReadWhenValue(existing any, index int, value string) []any {
readWhen, ok := existing.([]any)
if !ok {
readWhen = []any{}
}
for len(readWhen) <= index {
readWhen = append(readWhen, "")
}
readWhen[index] = value
return readWhen
}
func shouldSkipDoc(outputPath string, sourceHash string) (bool, error) {
data, err := os.ReadFile(outputPath)
if err != nil {
@@ -258,6 +173,14 @@ func extractSourceHash(frontData map[string]any) string {
return strings.TrimSpace(value)
}
func logDocChunkPlan(relPath string, blocks []string, groups [][]string) {
totalBytes := 0
for _, block := range blocks {
totalBytes += len(block)
}
log.Printf("docs-i18n: body-chunks %s blocks=%d groups=%d bytes=%d", relPath, len(blocks), len(groups), totalBytes)
}
func resolveDocsPath(docsRoot, filePath string) (string, string, error) {
absPath, err := filepath.Abs(filePath)
if err != nil {

View File

@@ -1,21 +1,243 @@
package main
import "testing"
import (
"context"
"os"
"path/filepath"
"strconv"
"strings"
"testing"
)
func TestParseTaggedDocumentAcceptsMissingBodyCloseAtEOF(t *testing.T) {
type docChunkTranslator struct{}
func (docChunkTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (docChunkTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
switch {
case strings.Contains(text, "Alpha block") && strings.Contains(text, "Beta block"):
return strings.ReplaceAll(text, "</Accordion>", ""), nil
default:
replacer := strings.NewReplacer(
"Alpha block", "阿尔法段",
"Beta block", "贝塔段",
"Code sample", "代码示例",
)
return replacer.Replace(text), nil
}
}
func (docChunkTranslator) Close() {}
type docLeafFallbackTranslator struct{}
func (docLeafFallbackTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
replacer := strings.NewReplacer(
"Gateway refuses to start unless `local`.", "Gateway 只有在 `local` 时才会启动。",
"`gateway.auth.mode: \"trusted-proxy\"`", "`gateway.auth.mode: \"trusted-proxy\"`",
)
return replacer.Replace(text), nil
}
func (docLeafFallbackTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
if strings.Contains(text, "Gateway refuses to start unless `local`.") {
return strings.Replace(text, "Gateway refuses to start unless `local`.", "<Tip>Gateway only starts in local mode.</Tip>", 1), nil
}
return text, nil
}
func (docLeafFallbackTranslator) Close() {}
type docFrontmatterTranslator struct{}
func (docFrontmatterTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
replacer := strings.NewReplacer(
"Step-by-step Fly.io deployment for OpenClaw with persistent storage and HTTPS", "在 Fly.io 上逐步部署 OpenClaw包含持久化存储和 HTTPS",
"Deploying OpenClaw on Fly.io", "在 Fly.io 上部署 OpenClaw",
"Setting up Fly volumes, secrets, and first-run config", "设置 Fly volume、密钥和首次运行配置",
)
return replacer.Replace(text), nil
}
func (docFrontmatterTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
return "extra text outside tagged sections", nil
}
func (docFrontmatterTranslator) Close() {}
type docFrontmatterFallbackTranslator struct{}
func (docFrontmatterFallbackTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
switch text {
case "Step-by-step Fly.io deployment for OpenClaw with persistent storage and HTTPS":
return strings.Join([]string{
"<frontmatter>",
"title: Fly.io",
"summary: \"在 Fly.io 上部署 OpenClaw 的逐步指南,包含持久化存储和 HTTPS 设置\"",
"read_when:",
" - 在 Fly.io 上部署 OpenClaw",
" - 设置 Fly 卷、机密和初始运行配置",
"</frontmatter>",
"",
"<body>",
"# Fly.io 部署",
"</body>",
}, "\n"), nil
case "Deploying OpenClaw on Fly.io":
return "在 Fly.io 上部署 OpenClaw", nil
case "Setting up Fly volumes, secrets, and first-run config":
return "设置 Fly 卷、机密和初始运行配置", nil
default:
return text, nil
}
}
func (docFrontmatterFallbackTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (docFrontmatterFallbackTranslator) Close() {}
type docProtocolLeakTranslator struct{}
func (docProtocolLeakTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (docProtocolLeakTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
switch {
case strings.Contains(text, "First chunk") && strings.Contains(text, "Second chunk"):
return strings.Join([]string{
"<frontmatter>",
"title: leaked",
"</frontmatter>",
"",
"<body>",
"First translated",
"",
"Second translated",
"</body>",
}, "\n"), nil
default:
replacer := strings.NewReplacer(
"First chunk", "First translated",
"Second chunk", "Second translated",
)
return replacer.Replace(text), nil
}
}
func (docProtocolLeakTranslator) Close() {}
type docWrappedLeafTranslator struct{}
func (docWrappedLeafTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (docWrappedLeafTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
return strings.Join([]string{
"<frontmatter>",
"title: leaked",
"</frontmatter>",
"",
"<body>",
"# Fly.io 部署",
"</body>",
}, "\n"), nil
}
func (docWrappedLeafTranslator) Close() {}
type docComponentLeafFallbackTranslator struct{}
func (docComponentLeafFallbackTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return strings.ReplaceAll(text, "Yes.", "是的。"), nil
}
func (docComponentLeafFallbackTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
if strings.Contains(text, "Can I use Claude Max subscription without an API key?") {
return strings.ReplaceAll(text, "Yes.\n", "Yes.\n</Accordion>\n"), nil
}
return text, nil
}
func (docComponentLeafFallbackTranslator) Close() {}
type docPromptBudgetTranslator struct {
rawInputs []string
}
func (t *docPromptBudgetTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (t *docPromptBudgetTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
t.rawInputs = append(t.rawInputs, text)
replacer := strings.NewReplacer(
"First chunk with `json5` and { braces }", "第一块,含 `json5` 和 { braces }",
"Second chunk with | table | pipes |", "第二块,含 | table | pipes |",
)
return replacer.Replace(text), nil
}
func (t *docPromptBudgetTranslator) Close() {}
type uppercaseWrapperTranslator struct{}
func (uppercaseWrapperTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (uppercaseWrapperTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
return "<BODY>\n" + strings.ReplaceAll(text, "Regular paragraph.", "Translated paragraph.") + "\n</BODY>\n", nil
}
func (uppercaseWrapperTranslator) Close() {}
type oversizedBlockTranslator struct {
rawInputs []string
}
func (t *oversizedBlockTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (t *oversizedBlockTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
t.rawInputs = append(t.rawInputs, text)
return strings.ReplaceAll(text, "Line ", "Translated line "), nil
}
func (t *oversizedBlockTranslator) Close() {}
type singletonFenceRetryTranslator struct {
rawInputs []string
}
func (t *singletonFenceRetryTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (t *singletonFenceRetryTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
t.rawInputs = append(t.rawInputs, text)
if strings.Contains(text, "Line 01") && strings.Contains(text, "Line 04") {
return strings.Replace(text, "\n```\n", "\n", 1), nil
}
return strings.ReplaceAll(text, "Line ", "Translated line "), nil
}
func (t *singletonFenceRetryTranslator) Close() {}
func TestParseTaggedDocumentRejectsMissingBodyCloseAtEOF(t *testing.T) {
t.Parallel()
input := "<frontmatter>\ntitle: Test\n</frontmatter>\n<body>\nTranslated body\n"
front, body, err := parseTaggedDocument(input)
if err != nil {
t.Fatalf("parseTaggedDocument returned error: %v", err)
}
if front != "title: Test" {
t.Fatalf("unexpected frontmatter %q", front)
}
if body != "Translated body" {
t.Fatalf("unexpected body %q", body)
_, _, err := parseTaggedDocument(input)
if err == nil {
t.Fatal("expected error for missing </body>")
}
}
@@ -29,3 +251,642 @@ func TestParseTaggedDocumentRejectsTrailingTextOutsideTags(t *testing.T) {
t.Fatal("expected error for trailing text")
}
}
func TestFindTaggedBodyEndSearchesFromBodyStart(t *testing.T) {
t.Parallel()
text := strings.Join([]string{
"<frontmatter>",
"summary: literal </body> token in frontmatter",
"</frontmatter>",
"<body>",
"Translated body",
"</body>",
}, "\n")
bodyStart := strings.Index(text, bodyTagStart)
if bodyStart == -1 {
t.Fatal("expected body tag in test input")
}
bodyStart += len(bodyTagStart)
bodyEnd := findTaggedBodyEnd(text, bodyStart)
if bodyEnd == -1 {
t.Fatal("expected closing body tag to be found")
}
body := trimTagNewlines(text[bodyStart:bodyEnd])
if body != "Translated body" {
t.Fatalf("expected body slice to ignore pre-body literal token, got %q", body)
}
}
func TestSplitDocBodyIntoBlocksKeepsFenceTogether(t *testing.T) {
t.Parallel()
body := strings.Join([]string{
"<Accordion title=\"Alpha block\">",
"",
"Code sample:",
"```ts",
"console.log('hello')",
"```",
"",
"Beta block",
"",
"</Accordion>",
"",
}, "\n")
blocks := splitDocBodyIntoBlocks(body)
if len(blocks) != 4 {
t.Fatalf("expected 4 blocks, got %d", len(blocks))
}
if !strings.Contains(blocks[1], "```ts") || !strings.Contains(blocks[1], "```") {
t.Fatalf("expected code fence to stay in a single block:\n%s", blocks[1])
}
if !strings.Contains(blocks[2], "Beta block") {
t.Fatalf("expected Beta paragraph in its own block:\n%s", blocks[2])
}
}
func TestSplitDocBodyIntoBlocksKeepsNestedTripleBackticksInsideFourBacktickFence(t *testing.T) {
t.Parallel()
body := strings.Join([]string{
"````md",
"```ts",
"console.log('nested example')",
"```",
"````",
"",
"Outside paragraph",
"",
}, "\n")
blocks := splitDocBodyIntoBlocks(body)
if len(blocks) != 2 {
t.Fatalf("expected 2 blocks, got %d", len(blocks))
}
if !strings.Contains(blocks[0], "console.log('nested example')") || !strings.Contains(blocks[0], "````") {
t.Fatalf("expected the full fenced example to stay in one block:\n%s", blocks[0])
}
if !strings.Contains(blocks[1], "Outside paragraph") {
t.Fatalf("expected trailing paragraph in second block:\n%s", blocks[1])
}
}
func TestSanitizeDocChunkProtocolWrappersStripsOuterWrapperAroundBodyExamples(t *testing.T) {
t.Parallel()
source := strings.Join([]string{
"Paragraph mentioning literal tokens `<body>` and `</body>`.",
"",
"<html>",
" <body>",
" literal example",
" </body>",
"</html>",
}, "\n")
translated := strings.Join([]string{
"<frontmatter>",
"title: leaked",
"</frontmatter>",
"",
"<body>",
"提到字面量 `<body>` 和 `</body>` 的段落。",
"",
"<html>",
" <body>",
" literal example",
" </body>",
"</html>",
"</body>",
}, "\n")
sanitized := sanitizeDocChunkProtocolWrappers(source, translated)
if strings.Contains(sanitized, frontmatterTagStart) || strings.HasPrefix(strings.TrimSpace(sanitized), bodyTagStart) {
t.Fatalf("expected outer wrapper stripped, got:\n%s", sanitized)
}
if !strings.Contains(sanitized, "<html>") || !strings.Contains(sanitized, "<body>") || !strings.Contains(sanitized, "</body>") {
t.Fatalf("expected inner HTML example preserved, got:\n%s", sanitized)
}
}
func TestTranslateDocBodyChunkedFallsBackToSmallerChunks(t *testing.T) {
body := strings.Join([]string{
"<Accordion title=\"Alpha block\">",
"Alpha block",
"</Accordion>",
"",
"Beta block",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
translated, err := translateDocBodyChunked(context.Background(), docChunkTranslator{}, "help/faq.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if !strings.Contains(translated, "阿尔法段") || !strings.Contains(translated, "贝塔段") {
t.Fatalf("expected translated text after chunk split, got:\n%s", translated)
}
if strings.Count(translated, "</Accordion>") != 1 {
t.Fatalf("expected closing Accordion tag to be preserved after fallback split:\n%s", translated)
}
}
func TestStripAndReapplyCommonIndent(t *testing.T) {
t.Parallel()
source := strings.Join([]string{
" <Step title=\"Example\">",
" - item one",
" - item two",
" </Step>",
"",
}, "\n")
normalized, indent := stripCommonIndent(source)
if indent != " " {
t.Fatalf("expected common indent of four spaces, got %q", indent)
}
if strings.HasPrefix(normalized, " ") {
t.Fatalf("expected normalized text without common indent:\n%s", normalized)
}
roundTrip := reapplyCommonIndent(normalized, indent)
if roundTrip != source {
t.Fatalf("expected indent round-trip to preserve source\nwant:\n%s\ngot:\n%s", source, roundTrip)
}
}
func TestTranslateDocBodyChunkedFallsBackToMaskedTranslateForLeafValidationFailure(t *testing.T) {
body := strings.Join([]string{
"- `mode`: `local` or `remote`. Gateway refuses to start unless `local`.",
"- `gateway.auth.mode: \"trusted-proxy\"`: delegate auth to a reverse proxy.",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
translated, err := translateDocBodyChunked(
context.Background(),
docLeafFallbackTranslator{},
"gateway/configuration-reference.md",
body,
"en",
"zh-CN",
)
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if strings.Contains(translated, "<Tip>") {
t.Fatalf("expected masked fallback to remove hallucinated component tags:\n%s", translated)
}
if !strings.Contains(translated, "Gateway 只有在 `local` 时才会启动。") {
t.Fatalf("expected fallback translation to be applied:\n%s", translated)
}
}
func TestValidateDocChunkTranslationRejectsProtocolTokenLeakage(t *testing.T) {
t.Parallel()
source := "Regular paragraph.\n\n"
translated := "<frontmatter>\ntitle: leaked\n</frontmatter>\n<body>\nRegular paragraph.\n</body>\n"
err := validateDocChunkTranslation(source, translated)
if err == nil {
t.Fatal("expected protocol token leakage to be rejected")
}
if !strings.Contains(err.Error(), "protocol token leaked") {
t.Fatalf("expected protocol token leakage error, got %v", err)
}
}
func TestValidateDocChunkTranslationRejectsTopLevelBodyWrapperLeakEvenWhenSourceMentionsBodyTag(t *testing.T) {
t.Parallel()
source := "Use `<body>` in examples, but keep prose outside wrappers.\n"
translated := "<body>\nTranslated paragraph.\n"
err := validateDocChunkTranslation(source, translated)
if err == nil {
t.Fatal("expected top-level wrapper leakage to be rejected")
}
if !strings.Contains(err.Error(), "protocol token leaked") {
t.Fatalf("expected protocol token leakage error, got %v", err)
}
}
func TestTranslateDocBodyChunkedSplitsOnProtocolTokenLeakage(t *testing.T) {
body := strings.Join([]string{
"First chunk",
"",
"Second chunk",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
translated, err := translateDocBodyChunked(context.Background(), docProtocolLeakTranslator{}, "gateway/configuration-reference.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if strings.Contains(translated, "<frontmatter>") || strings.Contains(translated, "<body>") || strings.Contains(translated, "[[[FM_") {
t.Fatalf("expected protocol wrapper leakage to be removed after split:\n%s", translated)
}
if !strings.Contains(translated, "First translated") || !strings.Contains(translated, "Second translated") {
t.Fatalf("expected split chunks to translate successfully:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedStripsUppercaseBodyWrapper(t *testing.T) {
body := "Regular paragraph.\n"
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
translated, err := translateDocBodyChunked(context.Background(), uppercaseWrapperTranslator{}, "gateway/configuration-reference.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if strings.Contains(strings.ToLower(translated), "<body>") {
t.Fatalf("expected uppercase wrapper to be stripped:\n%s", translated)
}
if !strings.Contains(translated, "Translated paragraph.") {
t.Fatalf("expected translated body content to survive unwrap:\n%s", translated)
}
}
func TestSanitizeDocChunkProtocolWrappersStripsTopLevelWrapperEvenWhenSourceMentionsBodyTag(t *testing.T) {
t.Parallel()
source := "Use `<body>` and `</body>` in examples, but keep the paragraph text plain.\n"
translated := "<body>\nTranslated paragraph.\n</body>\n"
got := sanitizeDocChunkProtocolWrappers(source, translated)
if strings.Contains(got, "<body>") || strings.Contains(got, "</body>") {
t.Fatalf("expected top-level wrapper to be stripped, got %q", got)
}
if strings.TrimSpace(got) != "Translated paragraph." {
t.Fatalf("unexpected sanitized body %q", got)
}
}
func TestSanitizeDocChunkProtocolWrappersKeepsLegitimateTopLevelBodyBlock(t *testing.T) {
t.Parallel()
source := "<body>\nLiteral HTML block.\n</body>\n"
translated := "<body>\nLiteral HTML block.\n</body>\n"
got := sanitizeDocChunkProtocolWrappers(source, translated)
if got != translated {
t.Fatalf("expected legitimate top-level body block to remain unchanged\nwant:\n%s\ngot:\n%s", translated, got)
}
}
func TestSanitizeDocChunkProtocolWrappersKeepsAmbiguousTaggedWrapperForRetry(t *testing.T) {
t.Parallel()
source := strings.Join([]string{
"Paragraph mentioning literal tokens `<body>` and `</body>`.",
"",
"Closing example:",
"</body>",
}, "\n")
translated := strings.Join([]string{
"<frontmatter>",
"title: leaked",
"</frontmatter>",
"",
"<body>",
"提到字面量 `<body>` 和 `</body>` 的段落。",
}, "\n")
got := sanitizeDocChunkProtocolWrappers(source, translated)
if got != translated {
t.Fatalf("expected ambiguous tagged wrapper to remain unchanged for retry\nwant:\n%s\ngot:\n%s", translated, got)
}
}
func TestSplitDocBodyIntoBlocksKeepsInfoStringExampleInsideFence(t *testing.T) {
t.Parallel()
body := strings.Join([]string{
"```md",
"```ts",
"console.log('inside example')",
"```",
"",
"Outside paragraph",
"",
}, "\n")
blocks := splitDocBodyIntoBlocks(body)
if len(blocks) != 2 {
t.Fatalf("expected 2 blocks, got %d", len(blocks))
}
if !strings.Contains(blocks[0], "console.log('inside example')") || !strings.Contains(blocks[0], "```ts") {
t.Fatalf("expected fenced example to stay together:\n%s", blocks[0])
}
if !strings.Contains(blocks[1], "Outside paragraph") {
t.Fatalf("expected trailing paragraph in second block:\n%s", blocks[1])
}
}
func TestTranslateDocBodyChunkedPreSplitsOversizedPromptBudget(t *testing.T) {
body := strings.Join([]string{
"First chunk with `json5` and { braces }",
"",
"Second chunk with | table | pipes |",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_PROMPT_BUDGET", "60")
translator := &docPromptBudgetTranslator{}
translated, err := translateDocBodyChunked(
context.Background(),
translator,
"gateway/configuration-reference.md",
body,
"en",
"zh-CN",
)
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
for _, input := range translator.rawInputs {
if strings.Contains(input, "First chunk with `json5` and { braces }") && strings.Contains(input, "Second chunk with | table | pipes |") {
t.Fatalf("expected prompt budget guard to split before raw translation, saw combined input:\n%s", input)
}
}
if !strings.Contains(translated, "第一块") || !strings.Contains(translated, "第二块") {
t.Fatalf("expected split chunks to translate successfully:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedSplitsOversizedSingletonBlock(t *testing.T) {
body := strings.Join([]string{
"Line 01",
"Line 02",
"Line 03",
"Line 04",
"Line 05",
"Line 06",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "24")
translator := &oversizedBlockTranslator{}
translated, err := translateDocBodyChunked(context.Background(), translator, "gateway/configuration-reference.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if len(translator.rawInputs) < 2 {
t.Fatalf("expected oversized singleton block to be split before translation, saw %d input(s)", len(translator.rawInputs))
}
for _, input := range translator.rawInputs {
if len(input) > 24 {
t.Fatalf("expected split chunk under byte budget, got %d bytes:\n%s", len(input), input)
}
}
if !strings.Contains(translated, "Translated line 01") || !strings.Contains(translated, "Translated line 06") {
t.Fatalf("expected translated singleton parts to be reassembled:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedSplitsSingletonBlockWhenPromptBudgetExceeded(t *testing.T) {
lineA := "Alpha chunk with { braces }\n"
lineB := "Beta chunk with | pipes |\n"
body := lineA + lineB + "\n"
budget := max(estimateDocPromptCost(lineA), estimateDocPromptCost(lineB)) + 1
if estimateDocPromptCost(body) <= budget {
t.Fatalf("test setup expected combined singleton prompt cost to exceed budget; cost=%d budget=%d", estimateDocPromptCost(body), budget)
}
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_PROMPT_BUDGET", strconv.Itoa(budget))
translator := &oversizedBlockTranslator{}
translated, err := translateDocBodyChunked(context.Background(), translator, "gateway/configuration-reference.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if len(translator.rawInputs) < 2 {
t.Fatalf("expected prompt-budget singleton split before translation, saw %d input(s)", len(translator.rawInputs))
}
for _, input := range translator.rawInputs {
if estimateDocPromptCost(input) > budget {
t.Fatalf("expected split chunk under prompt budget, got cost=%d budget=%d:\n%s", estimateDocPromptCost(input), budget, input)
}
}
if !strings.Contains(translated, "Alpha chunk") || !strings.Contains(translated, "Beta chunk") {
t.Fatalf("expected translated singleton parts to be reassembled:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedSplitsOversizedFenceBeforeTrailingProse(t *testing.T) {
body := strings.Join([]string{
"```md",
"Line 01",
"Line 02",
"Line 03",
"Line 04",
"```",
"Trailing paragraph after the fence.",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "24")
translator := &oversizedBlockTranslator{}
translated, err := translateDocBodyChunked(context.Background(), translator, "gateway/configuration-reference.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if len(translator.rawInputs) < 3 {
t.Fatalf("expected oversized fenced block with trailing prose to split, saw %d input(s)", len(translator.rawInputs))
}
for _, input := range translator.rawInputs {
if strings.Contains(input, "Line 01") || strings.Contains(input, "Line 02") || strings.Contains(input, "Line 03") || strings.Contains(input, "Line 04") {
if !strings.Contains(input, "```md") || !strings.Contains(input, "```") {
t.Fatalf("expected fenced split input to keep matched fence wrappers:\n%s", input)
}
}
}
if !strings.Contains(translated, "Translated line 01") || !strings.Contains(translated, "Trailing paragraph after the fence.") {
t.Fatalf("expected fence content and trailing prose to survive split:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedRetriesSingletonFenceAfterValidationFailure(t *testing.T) {
body := strings.Join([]string{
"```md",
"Line 01",
"Line 02",
"Line 03",
"Line 04",
"```",
"",
}, "\n")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_PROMPT_BUDGET", "4096")
translator := &singletonFenceRetryTranslator{}
translated, err := translateDocBodyChunked(context.Background(), translator, "gateway/configuration-reference.md", body, "en", "zh-CN")
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if len(translator.rawInputs) < 3 {
t.Fatalf("expected singleton fence retry to split after validation failure, saw %d input(s)", len(translator.rawInputs))
}
if !strings.Contains(translator.rawInputs[0], "Line 01") || !strings.Contains(translator.rawInputs[0], "Line 04") {
t.Fatalf("expected first raw attempt to include the original fenced block:\n%s", translator.rawInputs[0])
}
for _, input := range translator.rawInputs[1:] {
if strings.Contains(input, "Line 01") || strings.Contains(input, "Line 02") || strings.Contains(input, "Line 03") || strings.Contains(input, "Line 04") {
if !strings.Contains(input, "```md") || !strings.Contains(input, "```") {
t.Fatalf("expected split retry inputs to preserve fence wrappers:\n%s", input)
}
}
}
if !strings.Contains(translated, "Translated line 01") || !strings.Contains(translated, "Translated line 04") {
t.Fatalf("expected singleton fence retry to reassemble translated output:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedUnwrapsTaggedLeafProtocolLeakage(t *testing.T) {
body := "# Fly.io Deployment\n\n"
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
translated, err := translateDocBodyChunked(
context.Background(),
docWrappedLeafTranslator{},
"install/fly.md",
body,
"en",
"zh-CN",
)
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if strings.Contains(translated, "<frontmatter>") || strings.Contains(translated, "<body>") {
t.Fatalf("expected wrapped leaf translation to unwrap protocol tags:\n%s", translated)
}
if !strings.Contains(translated, "# Fly.io 部署") {
t.Fatalf("expected unwrapped body translation:\n%s", translated)
}
}
func TestTranslateDocBodyChunkedFallsBackForComponentLeafValidationFailure(t *testing.T) {
body := " <Accordion title=\"Can I use Claude Max subscription without an API key?\">\n Yes.\n\n"
t.Setenv("OPENCLAW_DOCS_I18N_DOC_CHUNK_MAX_BYTES", "4096")
translated, err := translateDocBodyChunked(
context.Background(),
docComponentLeafFallbackTranslator{},
"help/faq.md",
body,
"en",
"zh-CN",
)
if err != nil {
t.Fatalf("translateDocBodyChunked returned error: %v", err)
}
if strings.Contains(translated, "</Accordion>") {
t.Fatalf("expected component leaf fallback to avoid hallucinated closing tag:\n%s", translated)
}
if !strings.Contains(translated, "是的。") {
t.Fatalf("expected body text to be translated after component leaf fallback:\n%s", translated)
}
if !strings.Contains(translated, "<Accordion title=\"Can I use Claude Max subscription without an API key?\">") {
t.Fatalf("expected Accordion opening tag to be preserved:\n%s", translated)
}
}
func TestProcessFileDocUsesFieldLevelFrontmatterTranslation(t *testing.T) {
t.Parallel()
docsRoot := t.TempDir()
sourcePath := filepath.Join(docsRoot, "install")
if err := os.MkdirAll(sourcePath, 0o755); err != nil {
t.Fatalf("mkdir failed: %v", err)
}
sourceFile := filepath.Join(sourcePath, "fly.md")
source := strings.Join([]string{
"---",
"title: Fly.io",
"summary: \"Step-by-step Fly.io deployment for OpenClaw with persistent storage and HTTPS\"",
"read_when:",
" - Deploying OpenClaw on Fly.io",
" - Setting up Fly volumes, secrets, and first-run config",
"---",
"",
}, "\n")
if err := os.WriteFile(sourceFile, []byte(source), 0o644); err != nil {
t.Fatalf("write failed: %v", err)
}
skipped, outputPath, err := processFileDoc(context.Background(), docFrontmatterTranslator{}, docsRoot, sourceFile, "en", "zh-CN", true)
if err != nil {
t.Fatalf("processFileDoc returned error: %v", err)
}
if skipped {
t.Fatal("expected file to be processed")
}
if outputPath == "" {
t.Fatal("expected output path")
}
output, err := os.ReadFile(outputPath)
if err != nil {
t.Fatalf("read output failed: %v", err)
}
text := string(output)
if !strings.Contains(text, "在 Fly.io 上逐步部署 OpenClaw包含持久化存储和 HTTPS") {
t.Fatalf("expected translated summary in output:\n%s", text)
}
if !strings.Contains(text, "在 Fly.io 上部署 OpenClaw") {
t.Fatalf("expected translated read_when entry in output:\n%s", text)
}
}
func TestProcessFileDocRejectsSuspiciousFrontmatterScalarExpansion(t *testing.T) {
t.Parallel()
docsRoot := t.TempDir()
sourcePath := filepath.Join(docsRoot, "install")
if err := os.MkdirAll(sourcePath, 0o755); err != nil {
t.Fatalf("mkdir failed: %v", err)
}
sourceFile := filepath.Join(sourcePath, "fly.md")
source := strings.Join([]string{
"---",
"title: Fly.io",
"summary: \"Step-by-step Fly.io deployment for OpenClaw with persistent storage and HTTPS\"",
"read_when:",
" - Deploying OpenClaw on Fly.io",
" - Setting up Fly volumes, secrets, and first-run config",
"---",
"",
}, "\n")
if err := os.WriteFile(sourceFile, []byte(source), 0o644); err != nil {
t.Fatalf("write failed: %v", err)
}
skipped, outputPath, err := processFileDoc(context.Background(), docFrontmatterFallbackTranslator{}, docsRoot, sourceFile, "en", "zh-CN", true)
if err != nil {
t.Fatalf("processFileDoc returned error: %v", err)
}
if skipped {
t.Fatal("expected file to be processed")
}
output, err := os.ReadFile(outputPath)
if err != nil {
t.Fatalf("read output failed: %v", err)
}
text := string(output)
if strings.Contains(text, "<frontmatter>") || strings.Contains(text, "<body>") {
t.Fatalf("expected suspicious frontmatter expansion to be rejected:\n%s", text)
}
if !strings.Contains(text, "summary: Step-by-step Fly.io deployment for OpenClaw with persistent storage and HTTPS") {
t.Fatalf("expected original summary to be preserved after fallback:\n%s", text)
}
if !strings.Contains(text, "在 Fly.io 上部署 OpenClaw") {
t.Fatalf("expected read_when translation to survive fallback:\n%s", text)
}
}

View File

@@ -25,6 +25,18 @@ func (fakeDocsTranslator) TranslateRaw(_ context.Context, text, _, _ string) (st
func (fakeDocsTranslator) Close() {}
type invalidFrontmatterTranslator struct{}
func (invalidFrontmatterTranslator) Translate(_ context.Context, text, _, _ string) (string, error) {
return "<body>\n" + text + "\n</body>\n", nil
}
func (invalidFrontmatterTranslator) TranslateRaw(_ context.Context, text, _, _ string) (string, error) {
return text, nil
}
func (invalidFrontmatterTranslator) Close() {}
func TestRunDocsI18NRewritesFinalLocalizedPageLinks(t *testing.T) {
t.Parallel()
@@ -74,3 +86,23 @@ func TestRunDocsI18NRewritesFinalLocalizedPageLinks(t *testing.T) {
}
}
}
func TestTranslateSnippetDoesNotCacheFallbackToSource(t *testing.T) {
t.Parallel()
tm := &TranslationMemory{entries: map[string]TMEntry{}}
source := "Gateway"
translated, err := translateSnippet(context.Background(), invalidFrontmatterTranslator{}, tm, "gateway/index.md:frontmatter:title", source, "en", "zh-CN")
if err != nil {
t.Fatalf("translateSnippet returned error: %v", err)
}
if translated != source {
t.Fatalf("expected fallback to source text, got %q", translated)
}
cacheKey := cacheKey(cacheNamespace(), "en", "zh-CN", "gateway/index.md:frontmatter:title", hashText(source))
if _, ok := tm.Get(cacheKey); ok {
t.Fatalf("expected fallback translation not to be cached")
}
}

View File

@@ -16,6 +16,7 @@ const (
envDocsPiExecutable = "OPENCLAW_DOCS_I18N_PI_EXECUTABLE"
envDocsPiArgs = "OPENCLAW_DOCS_I18N_PI_ARGS"
envDocsPiPackageVersion = "OPENCLAW_DOCS_I18N_PI_PACKAGE_VERSION"
envDocsPiOmitProvider = "OPENCLAW_DOCS_I18N_PI_OMIT_PROVIDER"
defaultPiPackageVersion = "0.58.3"
)
@@ -118,3 +119,12 @@ func getMaterializedPiPackageVersion() string {
}
return defaultPiPackageVersion
}
func docsPiOmitProvider() bool {
switch strings.ToLower(strings.TrimSpace(os.Getenv(envDocsPiOmitProvider))) {
case "1", "true", "yes", "on":
return true
default:
return false
}
}

View File

@@ -71,10 +71,12 @@ func startDocsPiClient(ctx context.Context, options docsPiClientOptions) (*docsP
}
args := append([]string{}, command.Args...)
args = append(args, "--mode", "rpc")
if provider := docsPiProviderArg(); provider != "" && !docsPiOmitProvider() {
args = append(args, "--provider", provider)
}
args = append(args,
"--mode", "rpc",
"--provider", docsPiProvider(),
"--model", docsPiModel(),
"--model", docsPiModelRef(),
"--thinking", options.Thinking,
"--no-session",
)
@@ -83,7 +85,7 @@ func startDocsPiClient(ctx context.Context, options docsPiClientOptions) (*docsP
}
process := exec.Command(command.Executable, args...)
agentDir, err := getDocsPiAgentDir()
agentDir, err := resolveDocsPiAgentDir()
if err != nil {
return nil, err
}
@@ -238,12 +240,9 @@ func extractTranslationResult(raw json.RawMessage) (string, error) {
if message.Role != "assistant" {
continue
}
if message.ErrorMessage != "" || strings.EqualFold(message.StopReason, "error") {
msg := strings.TrimSpace(message.ErrorMessage)
if msg == "" {
msg = "unknown error"
}
return "", fmt.Errorf("pi error: %s", msg)
if message.ErrorMessage != "" || isTerminalPiStopReason(message.StopReason) {
text, _ := extractContentText(message.Content)
return "", formatPiAgentError(message, text)
}
text, err := extractContentText(message.Content)
if err != nil {
@@ -254,6 +253,46 @@ func extractTranslationResult(raw json.RawMessage) (string, error) {
return "", errors.New("assistant message not found")
}
func isTerminalPiStopReason(stopReason string) bool {
switch strings.ToLower(strings.TrimSpace(stopReason)) {
case "error", "terminated", "cancelled", "canceled", "aborted":
return true
default:
return false
}
}
func formatPiAgentError(message agentMessage, assistantText string) error {
parts := []string{}
if msg := strings.TrimSpace(message.ErrorMessage); msg != "" {
parts = append(parts, msg)
}
if stop := strings.TrimSpace(message.StopReason); stop != "" {
parts = append(parts, "stopReason="+stop)
}
if preview := previewPiAssistantText(assistantText); preview != "" {
parts = append(parts, "assistant="+preview)
}
if len(parts) == 0 {
parts = append(parts, "unknown error")
}
return fmt.Errorf("pi error: %s", strings.Join(parts, "; "))
}
func previewPiAssistantText(text string) string {
trimmed := strings.TrimSpace(text)
if trimmed == "" {
return ""
}
trimmed = strings.ReplaceAll(trimmed, "\n", " ")
trimmed = strings.Join(strings.Fields(trimmed), " ")
const limit = 160
if len(trimmed) <= limit {
return trimmed
}
return trimmed[:limit] + "..."
}
func extractContentText(content json.RawMessage) (string, error) {
trimmed := strings.TrimSpace(string(content))
if trimmed == "" {
@@ -300,3 +339,13 @@ func getDocsPiAgentDir() (string, error) {
}
return dir, nil
}
func resolveDocsPiAgentDir() (string, error) {
if override := strings.TrimSpace(os.Getenv("PI_CODING_AGENT_DIR")); override != "" {
if err := os.MkdirAll(override, 0o700); err != nil {
return "", err
}
return override, nil
}
return getDocsPiAgentDir()
}

View File

@@ -0,0 +1,84 @@
package main
import (
"strings"
"testing"
)
func TestExtractTranslationResultIncludesStopReasonAndPreview(t *testing.T) {
t.Parallel()
raw := []byte(`{
"type":"agent_end",
"messages":[
{
"role":"assistant",
"stopReason":"terminated",
"content":[
{"type":"text","text":"provider disconnected while streaming the translation chunk"}
]
}
]
}`)
_, err := extractTranslationResult(raw)
if err == nil {
t.Fatal("expected error")
}
message := err.Error()
for _, want := range []string{
"pi error:",
"stopReason=terminated",
"assistant=provider disconnected while streaming the translation chunk",
} {
if !strings.Contains(message, want) {
t.Fatalf("expected %q in error, got %q", want, message)
}
}
}
func TestPreviewPiAssistantTextTruncatesAndFlattensWhitespace(t *testing.T) {
t.Parallel()
input := "line one\n\nline two\tline three " + strings.Repeat("x", 200)
preview := previewPiAssistantText(input)
if strings.Contains(preview, "\n") {
t.Fatalf("expected flattened whitespace, got %q", preview)
}
if !strings.HasPrefix(preview, "line one line two line three ") {
t.Fatalf("unexpected preview prefix: %q", preview)
}
if !strings.HasSuffix(preview, "...") {
t.Fatalf("expected truncation suffix, got %q", preview)
}
}
func TestExtractTranslationResultReturnsPiErrorBeforeDecodingStructuredErrorContent(t *testing.T) {
t.Parallel()
raw := []byte(`{
"type":"agent_end",
"messages":[
{
"role":"assistant",
"stopReason":"terminated",
"content":{"type":"error","message":"provider disconnected"}
}
]
}`)
_, err := extractTranslationResult(raw)
if err == nil {
t.Fatal("expected error")
}
message := err.Error()
if !strings.Contains(message, "pi error:") {
t.Fatalf("expected normalized pi error, got %q", message)
}
if !strings.Contains(message, "stopReason=terminated") {
t.Fatalf("expected stopReason in error, got %q", message)
}
if strings.Contains(message, "cannot unmarshal") {
t.Fatalf("expected terminal pi error before decode failure, got %q", message)
}
}

View File

@@ -3,6 +3,7 @@ package main
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"strings"
@@ -138,17 +139,29 @@ func translateFrontMatter(ctx context.Context, translator docsTranslator, tm *Tr
return nil
}
if summary, ok := data["summary"].(string); ok {
if docsI18nVerboseLogs() {
log.Printf("docs-i18n: frontmatter start %s field=summary bytes=%d", relPath, len(summary))
}
translated, err := translateSnippet(ctx, translator, tm, relPath+":frontmatter:summary", summary, srcLang, tgtLang)
if err != nil {
return err
}
if docsI18nVerboseLogs() {
log.Printf("docs-i18n: frontmatter done %s field=summary out_bytes=%d", relPath, len(translated))
}
data["summary"] = translated
}
if title, ok := data["title"].(string); ok {
if docsI18nVerboseLogs() {
log.Printf("docs-i18n: frontmatter start %s field=title bytes=%d", relPath, len(title))
}
translated, err := translateSnippet(ctx, translator, tm, relPath+":frontmatter:title", title, srcLang, tgtLang)
if err != nil {
return err
}
if docsI18nVerboseLogs() {
log.Printf("docs-i18n: frontmatter done %s field=title out_bytes=%d", relPath, len(translated))
}
data["title"] = translated
}
if readWhen, ok := data["read_when"].([]any); ok {
@@ -159,10 +172,16 @@ func translateFrontMatter(ctx context.Context, translator docsTranslator, tm *Tr
translated = append(translated, item)
continue
}
if docsI18nVerboseLogs() {
log.Printf("docs-i18n: frontmatter start %s field=read_when[%d] bytes=%d", relPath, idx, len(textValue))
}
value, err := translateSnippet(ctx, translator, tm, fmt.Sprintf("%s:frontmatter:read_when:%d", relPath, idx), textValue, srcLang, tgtLang)
if err != nil {
return err
}
if docsI18nVerboseLogs() {
log.Printf("docs-i18n: frontmatter done %s field=read_when[%d] out_bytes=%d", relPath, idx, len(value))
}
translated = append(translated, value)
}
data["read_when"] = translated
@@ -170,6 +189,19 @@ func translateFrontMatter(ctx context.Context, translator docsTranslator, tm *Tr
return nil
}
func docsI18nVerboseLogs() bool {
value := strings.TrimSpace(os.Getenv("OPENCLAW_DOCS_I18N_VERBOSE_LOGS"))
if value == "" {
return false
}
switch strings.ToLower(value) {
case "1", "true", "yes", "on", "debug", "verbose":
return true
default:
return false
}
}
func translateSnippet(ctx context.Context, translator docsTranslator, tm *TranslationMemory, segmentID, textValue, srcLang, tgtLang string) (string, error) {
if strings.TrimSpace(textValue) == "" {
return textValue, nil
@@ -184,6 +216,12 @@ func translateSnippet(ctx context.Context, translator docsTranslator, tm *Transl
if err != nil {
return "", err
}
shouldCache := true
if validationErr := validateFrontmatterScalarTranslation(textValue, translated); validationErr != nil {
log.Printf("docs-i18n: frontmatter fallback %s reason=%v", segmentID, validationErr)
translated = textValue
shouldCache = false
}
entry := TMEntry{
CacheKey: ck,
SegmentID: segmentID,
@@ -197,6 +235,45 @@ func translateSnippet(ctx context.Context, translator docsTranslator, tm *Transl
TgtLang: tgtLang,
UpdatedAt: time.Now().UTC().Format(time.RFC3339),
}
tm.Put(entry)
if shouldCache {
tm.Put(entry)
}
return translated, nil
}
func validateFrontmatterScalarTranslation(source, translated string) error {
trimmed := strings.TrimSpace(translated)
if trimmed == "" {
return fmt.Errorf("empty translation")
}
lower := strings.ToLower(trimmed)
if strings.Contains(lower, "<frontmatter>") || strings.Contains(lower, "</frontmatter>") || strings.Contains(lower, "<body>") || strings.Contains(lower, "</body>") {
return fmt.Errorf("tagged document wrapper detected")
}
if strings.Contains(trimmed, "[[[FM_") {
return fmt.Errorf("frontmatter marker leaked into scalar translation")
}
if strings.Contains(trimmed, "\n---\n") || strings.HasPrefix(trimmed, "---\n") {
return fmt.Errorf("yaml document boundary detected")
}
if !strings.Contains(source, "\n") && strings.Count(trimmed, "\n") >= 3 {
return fmt.Errorf("unexpected multiline expansion")
}
sourceLen := len(strings.TrimSpace(source))
translatedLen := len(trimmed)
if sourceLen > 0 {
limit := sourceLen*8 + 256
if limit < 512 {
limit = 512
}
if translatedLen > limit {
return fmt.Errorf("unexpected size expansion source=%d translated=%d", sourceLen, translatedLen)
}
}
for _, key := range []string{"title:", "summary:", "read_when:"} {
if strings.Contains(lower, "\n"+key) || strings.HasPrefix(lower, key) {
return fmt.Errorf("frontmatter key leaked into scalar translation")
}
}
return nil
}

View File

@@ -19,7 +19,8 @@ const (
var errEmptyTranslation = errors.New("empty translation")
type PiTranslator struct {
client *docsPiClient
client docsPiPromptClient
clientFactory docsPiClientFactory
}
type docsTranslator interface {
@@ -30,15 +31,26 @@ type docsTranslator interface {
type docsTranslatorFactory func(string, string, []GlossaryEntry, string) (docsTranslator, error)
type docsPiPromptClient interface {
promptRunner
Close() error
}
type docsPiClientFactory func(context.Context) (docsPiPromptClient, error)
func NewPiTranslator(srcLang, tgtLang string, glossary []GlossaryEntry, thinking string) (*PiTranslator, error) {
client, err := startDocsPiClient(context.Background(), docsPiClientOptions{
options := docsPiClientOptions{
SystemPrompt: translationPrompt(srcLang, tgtLang, glossary),
Thinking: normalizeThinking(thinking),
})
}
clientFactory := func(ctx context.Context) (docsPiPromptClient, error) {
return startDocsPiClient(ctx, options)
}
client, err := clientFactory(context.Background())
if err != nil {
return nil, err
}
return &PiTranslator{client: client}, nil
return &PiTranslator{client: client, clientFactory: clientFactory}, nil
}
func (t *PiTranslator) Translate(ctx context.Context, text, srcLang, tgtLang string) (string, error) {
@@ -78,6 +90,12 @@ func (t *PiTranslator) translateWithRetry(ctx context.Context, run func(context.
}
lastErr = err
if attempt+1 < translateMaxAttempts {
if shouldRestartPiClientForError(err) {
if err := t.restartClient(ctx); err != nil {
return "", fmt.Errorf("%w (pi client restart failed: %v)", lastErr, err)
}
continue
}
delay := translateBaseDelay * time.Duration(attempt+1)
if err := sleepWithContext(ctx, delay); err != nil {
return "", err
@@ -132,7 +150,41 @@ func isRetryableTranslateError(err error) bool {
if strings.Contains(message, "authentication failed") {
return false
}
return strings.Contains(message, "placeholder missing") || strings.Contains(message, "rate limit") || strings.Contains(message, "429")
return strings.Contains(message, "placeholder missing") ||
strings.Contains(message, "rate limit") ||
strings.Contains(message, "429") ||
shouldRestartPiClientForError(err)
}
func shouldRestartPiClientForError(err error) bool {
if err == nil {
return false
}
message := strings.ToLower(err.Error())
return strings.Contains(message, "pi error: terminated") ||
strings.Contains(message, "stopreason=cancelled") ||
strings.Contains(message, "stopreason=canceled") ||
strings.Contains(message, "stopreason=aborted") ||
strings.Contains(message, "stopreason=terminated") ||
strings.Contains(message, "stopreason=error") ||
strings.Contains(message, "pi process closed") ||
strings.Contains(message, "pi event stream closed")
}
func (t *PiTranslator) restartClient(ctx context.Context) error {
if t.clientFactory == nil {
return errors.New("pi client restart unavailable")
}
if t.client != nil {
_ = t.client.Close()
t.client = nil
}
client, err := t.clientFactory(ctx)
if err != nil {
return err
}
t.client = client
return nil
}
func sleepWithContext(ctx context.Context, delay time.Duration) error {

View File

@@ -23,6 +23,25 @@ func (runner fakePromptRunner) Stderr() string {
return runner.stderr
}
type fakePiPromptClient struct {
prompt func(context.Context, string) (string, error)
stderr string
closed bool
}
func (client *fakePiPromptClient) Prompt(ctx context.Context, message string) (string, error) {
return client.prompt(ctx, message)
}
func (client *fakePiPromptClient) Stderr() string {
return client.stderr
}
func (client *fakePiPromptClient) Close() error {
client.closed = true
return nil
}
func TestRunPromptAddsTimeout(t *testing.T) {
t.Parallel()
@@ -79,6 +98,36 @@ func TestIsRetryableTranslateErrorRejectsAuthenticationFailures(t *testing.T) {
}
}
func TestIsRetryableTranslateErrorRetriesPiTermination(t *testing.T) {
t.Parallel()
if !isRetryableTranslateError(errors.New("pi error: terminated; stopReason=error; assistant=partial output")) {
t.Fatal("terminated pi session should retry")
}
}
func TestIsRetryableTranslateErrorRetriesTerminatedStopReason(t *testing.T) {
t.Parallel()
if !isRetryableTranslateError(errors.New("pi error: stopReason=terminated; assistant=partial output")) {
t.Fatal("terminated stopReason should retry")
}
}
func TestIsRetryableTranslateErrorRetriesCanceledStopReasons(t *testing.T) {
t.Parallel()
for _, message := range []string{
"pi error: stopReason=cancelled; assistant=partial output",
"pi error: stopReason=canceled; assistant=partial output",
"pi error: stopReason=aborted; assistant=partial output",
} {
if !isRetryableTranslateError(errors.New(message)) {
t.Fatalf("expected retryable stop reason for %q", message)
}
}
}
func TestRunPromptIncludesStderr(t *testing.T) {
t.Parallel()
@@ -132,6 +181,19 @@ func TestResolveDocsPiCommandUsesOverrideEnv(t *testing.T) {
}
}
func TestDocsPiModelRefUsesProviderPrefixWhenProviderFlagIsOmitted(t *testing.T) {
t.Setenv(envDocsI18nProvider, "openai")
t.Setenv(envDocsI18nModel, "gpt-5.4")
t.Setenv(envDocsPiOmitProvider, "1")
if got := docsPiProviderArg(); got != "" {
t.Fatalf("expected empty provider arg when omit-provider is enabled, got %q", got)
}
if got := docsPiModelRef(); got != "openai/gpt-5.4" {
t.Fatalf("expected provider-qualified model ref, got %q", got)
}
}
func TestShouldMaterializePiRuntimeForPiMonoWrapper(t *testing.T) {
t.Parallel()
@@ -158,3 +220,141 @@ func TestShouldMaterializePiRuntimeForPiMonoWrapper(t *testing.T) {
t.Fatal("expected pi-mono wrapper to materialize runtime")
}
}
func TestPiTranslatorRestartsClientAfterPiTermination(t *testing.T) {
t.Parallel()
clients := []*fakePiPromptClient{}
factoryCalls := 0
factory := func(context.Context) (docsPiPromptClient, error) {
factoryCalls++
index := factoryCalls
client := &fakePiPromptClient{
prompt: func(context.Context, string) (string, error) {
if index == 1 {
return "", errors.New("pi error: terminated; stopReason=error; assistant=partial output")
}
return "translated", nil
},
}
clients = append(clients, client)
return client, nil
}
client, err := factory(context.Background())
if err != nil {
t.Fatalf("factory failed: %v", err)
}
translator := &PiTranslator{client: client, clientFactory: factory}
got, err := translator.TranslateRaw(context.Background(), "Translate me", "en", "zh-CN")
if err != nil {
t.Fatalf("TranslateRaw returned error: %v", err)
}
if got != "translated" {
t.Fatalf("unexpected translation %q", got)
}
if factoryCalls != 2 {
t.Fatalf("expected factory to run twice, got %d", factoryCalls)
}
if len(clients) != 2 {
t.Fatalf("expected 2 clients, got %d", len(clients))
}
if !clients[0].closed {
t.Fatal("expected first client to close before retry")
}
if clients[1].closed {
t.Fatal("expected replacement client to remain open")
}
}
func TestPiTranslatorRestartsClientAfterTerminatedStopReason(t *testing.T) {
t.Parallel()
clients := []*fakePiPromptClient{}
factoryCalls := 0
factory := func(context.Context) (docsPiPromptClient, error) {
factoryCalls++
index := factoryCalls
client := &fakePiPromptClient{
prompt: func(context.Context, string) (string, error) {
if index == 1 {
return "", errors.New("pi error: stopReason=terminated; assistant=partial output")
}
return "translated", nil
},
}
clients = append(clients, client)
return client, nil
}
client, err := factory(context.Background())
if err != nil {
t.Fatalf("factory failed: %v", err)
}
translator := &PiTranslator{client: client, clientFactory: factory}
got, err := translator.TranslateRaw(context.Background(), "Translate me", "en", "zh-CN")
if err != nil {
t.Fatalf("TranslateRaw returned error: %v", err)
}
if got != "translated" {
t.Fatalf("unexpected translation %q", got)
}
if factoryCalls != 2 {
t.Fatalf("expected factory to run twice, got %d", factoryCalls)
}
if len(clients) != 2 {
t.Fatalf("expected 2 clients, got %d", len(clients))
}
if !clients[0].closed {
t.Fatal("expected first client to close before retry")
}
if clients[1].closed {
t.Fatal("expected replacement client to remain open")
}
}
func TestPiTranslatorRestartsClientAfterCanceledStopReason(t *testing.T) {
t.Parallel()
clients := []*fakePiPromptClient{}
factoryCalls := 0
factory := func(context.Context) (docsPiPromptClient, error) {
factoryCalls++
index := factoryCalls
client := &fakePiPromptClient{
prompt: func(context.Context, string) (string, error) {
if index == 1 {
return "", errors.New("pi error: stopReason=aborted; assistant=partial output")
}
return "translated", nil
},
}
clients = append(clients, client)
return client, nil
}
client, err := factory(context.Background())
if err != nil {
t.Fatalf("factory failed: %v", err)
}
translator := &PiTranslator{client: client, clientFactory: factory}
got, err := translator.TranslateRaw(context.Background(), "Translate me", "en", "zh-CN")
if err != nil {
t.Fatalf("TranslateRaw returned error: %v", err)
}
if got != "translated" {
t.Fatalf("unexpected translation %q", got)
}
if factoryCalls != 2 {
t.Fatalf("expected factory to run twice, got %d", factoryCalls)
}
if !clients[0].closed {
t.Fatal("expected first client to close before retry")
}
if clients[1].closed {
t.Fatal("expected replacement client to remain open")
}
}

View File

@@ -78,6 +78,64 @@ func docsPiModel() string {
}
}
func docsPiProviderArg() string {
provider := docsPiProvider()
if provider == "" {
return ""
}
if docsPiOmitProvider() {
return ""
}
if strings.Contains(docsPiModel(), "/") {
return ""
}
if hasDocsPiAgentDirOverride() {
return ""
}
if !isBuiltInPiProvider(provider) {
return ""
}
return provider
}
func docsPiModelRef() string {
model := docsPiModel()
if model == "" {
return ""
}
if strings.Contains(model, "/") {
return model
}
if docsPiOmitProvider() {
provider := docsPiProvider()
if provider == "" {
return model
}
return provider + "/" + model
}
if docsPiProviderArg() != "" {
return model
}
provider := docsPiProvider()
if provider == "" {
return model
}
return provider + "/" + model
}
func isBuiltInPiProvider(provider string) bool {
switch strings.ToLower(strings.TrimSpace(provider)) {
case "anthropic", "openai":
return true
default:
return false
}
}
func hasDocsPiAgentDirOverride() bool {
return strings.TrimSpace(os.Getenv("PI_CODING_AGENT_DIR")) != ""
}
func segmentID(relPath, textHash string) string {
shortHash := textHash
if len(shortHash) > 16 {