refactor(ai): split the 1180-line ai.go into focused files

Decomposes backend/api/rest/ai.go (which the review flagged at 1180 lines
and which was the largest file in the repo by a wide margin) into six
purpose-built files inside the same package, so no import paths change
for any caller and *Server receivers keep working:

  ai.go           198  handlers + feature flags + exported AI* DTOs
  ai_context.go   381  buildAIContext + indexed-DB queries
                       (stats / tx / address / block) + regex patterns +
                       extractBlockReference
  ai_routes.go    139  queryAIRoutes + filterAIRouteMatches +
                       routeMatchesQuery + normalizeHexString
  ai_docs.go      136  loadAIDocSnippets + findAIWorkspaceRoot +
                       scanDocForTerms + buildDocSearchTerms
  ai_xai.go       267  xAI / OpenAI request/response types +
                       normalizeAIMessages + latestUserMessage +
                       callXAIChatCompletions + parseXAIError +
                       extractOutputText
  ai_helpers.go   112  pure-function utilities (firstRegexMatch,
                       compactStringMap, compactAnyMap, stringValue,
                       stringSliceValue, uniqueStrings, clipString,
                       fileExists)

ai_runtime.go (rate limiter + metrics + audit log) is unchanged.

This is a pure move: no logic changes, no new public API, no changes to
HTTP routes. Each file carries only the imports it actually uses so
goimports is clean on every file individually. Every exported symbol
retained its original spelling so callers (routes.go, server.go, and
the AI e2e tests) keep compiling without edits.

Verification:
  go build  ./...  clean
  go vet    ./...  clean
  go test   ./api/rest/...  PASS
  staticcheck ./...  clean on the SA* correctness family

Advances completion criterion 6 (backend maintainability): 'no single
Go file exceeds a few hundred lines; AI/LLM plumbing is separated from
HTTP handlers; context-building is separated from upstream calls.'
This commit is contained in:
2026-04-18 19:13:38 +00:00
parent e1c3b40cb0
commit 945e637d1d
6 changed files with 1046 additions and 993 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,381 @@
package rest
import (
"context"
"fmt"
"regexp"
"strings"
"time"
)
var (
addressPattern = regexp.MustCompile(`0x[a-fA-F0-9]{40}`)
transactionPattern = regexp.MustCompile(`0x[a-fA-F0-9]{64}`)
blockRefPattern = regexp.MustCompile(`(?i)\bblock\s+#?(\d+)\b`)
)
func (s *Server) buildAIContext(ctx context.Context, query string, pageContext map[string]string) (AIContextEnvelope, []string) {
warnings := []string{}
envelope := AIContextEnvelope{
ChainID: s.chainID,
Explorer: "SolaceScan",
PageContext: compactStringMap(pageContext),
CapabilityNotice: "This assistant is wired for read-only explorer analysis. It can summarize indexed chain data, liquidity routes, and curated workspace docs, but it does not sign transactions or execute private operations.",
}
sources := []AIContextSource{
{Type: "system", Label: "Explorer REST backend"},
}
if stats, err := s.queryAIStats(ctx); err == nil {
envelope.Stats = stats
sources = append(sources, AIContextSource{Type: "database", Label: "Explorer indexer database"})
} else if err != nil {
warnings = append(warnings, "indexed explorer stats unavailable: "+err.Error())
}
if strings.TrimSpace(query) != "" {
if txHash := firstRegexMatch(transactionPattern, query); txHash != "" && s.db != nil {
if tx, err := s.queryAITransaction(ctx, txHash); err == nil && len(tx) > 0 {
envelope.Transaction = tx
} else if err != nil {
warnings = append(warnings, "transaction context unavailable: "+err.Error())
}
}
if addr := firstRegexMatch(addressPattern, query); addr != "" && s.db != nil {
if addressInfo, err := s.queryAIAddress(ctx, addr); err == nil && len(addressInfo) > 0 {
envelope.Address = addressInfo
} else if err != nil {
warnings = append(warnings, "address context unavailable: "+err.Error())
}
}
if blockNumber := extractBlockReference(query); blockNumber > 0 && s.db != nil {
if block, err := s.queryAIBlock(ctx, blockNumber); err == nil && len(block) > 0 {
envelope.Block = block
} else if err != nil {
warnings = append(warnings, "block context unavailable: "+err.Error())
}
}
}
if routeMatches, routeWarning := s.queryAIRoutes(ctx, query); len(routeMatches) > 0 {
envelope.RouteMatches = routeMatches
sources = append(sources, AIContextSource{Type: "routes", Label: "Token aggregation live routes", Origin: firstNonEmptyEnv("TOKEN_AGGREGATION_API_BASE", "TOKEN_AGGREGATION_URL", "TOKEN_AGGREGATION_BASE_URL")})
} else if routeWarning != "" {
warnings = append(warnings, routeWarning)
}
if docs, root, docWarning := loadAIDocSnippets(query); len(docs) > 0 {
envelope.DocSnippets = docs
sources = append(sources, AIContextSource{Type: "docs", Label: "Workspace docs", Origin: root})
} else if docWarning != "" {
warnings = append(warnings, docWarning)
}
envelope.Sources = sources
return envelope, uniqueStrings(warnings)
}
func (s *Server) queryAIStats(ctx context.Context) (map[string]any, error) {
if s.db == nil {
return nil, fmt.Errorf("database unavailable")
}
ctx, cancel := context.WithTimeout(ctx, 4*time.Second)
defer cancel()
stats := map[string]any{}
var totalBlocks int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM blocks WHERE chain_id = $1`, s.chainID).Scan(&totalBlocks); err == nil {
stats["total_blocks"] = totalBlocks
}
var totalTransactions int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM transactions WHERE chain_id = $1`, s.chainID).Scan(&totalTransactions); err == nil {
stats["total_transactions"] = totalTransactions
}
var totalAddresses int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM (
SELECT from_address AS address
FROM transactions
WHERE chain_id = $1 AND from_address IS NOT NULL AND from_address <> ''
UNION
SELECT to_address AS address
FROM transactions
WHERE chain_id = $1 AND to_address IS NOT NULL AND to_address <> ''
) unique_addresses`, s.chainID).Scan(&totalAddresses); err == nil {
stats["total_addresses"] = totalAddresses
}
var latestBlock int64
if err := s.db.QueryRow(ctx, `SELECT COALESCE(MAX(number), 0) FROM blocks WHERE chain_id = $1`, s.chainID).Scan(&latestBlock); err == nil {
stats["latest_block"] = latestBlock
}
if len(stats) == 0 {
var totalBlocks int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM blocks`).Scan(&totalBlocks); err == nil {
stats["total_blocks"] = totalBlocks
}
var totalTransactions int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM transactions`).Scan(&totalTransactions); err == nil {
stats["total_transactions"] = totalTransactions
}
var totalAddresses int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM addresses`).Scan(&totalAddresses); err == nil {
stats["total_addresses"] = totalAddresses
}
var latestBlock int64
if err := s.db.QueryRow(ctx, `SELECT COALESCE(MAX(number), 0) FROM blocks`).Scan(&latestBlock); err == nil {
stats["latest_block"] = latestBlock
}
}
if len(stats) == 0 {
return nil, fmt.Errorf("no indexed stats available")
}
return stats, nil
}
func (s *Server) queryAITransaction(ctx context.Context, hash string) (map[string]any, error) {
ctx, cancel := context.WithTimeout(ctx, 4*time.Second)
defer cancel()
query := `
SELECT hash, block_number, from_address, to_address, value, gas_used, gas_price, status, timestamp_iso
FROM transactions
WHERE chain_id = $1 AND hash = $2
LIMIT 1
`
var txHash, fromAddress, value string
var blockNumber int64
var toAddress *string
var gasUsed, gasPrice *int64
var status *int64
var timestampISO *string
err := s.db.QueryRow(ctx, query, s.chainID, hash).Scan(
&txHash, &blockNumber, &fromAddress, &toAddress, &value, &gasUsed, &gasPrice, &status, &timestampISO,
)
if err != nil {
normalizedHash := normalizeHexString(hash)
blockscoutQuery := `
SELECT
concat('0x', encode(hash, 'hex')) AS hash,
block_number,
concat('0x', encode(from_address_hash, 'hex')) AS from_address,
CASE
WHEN to_address_hash IS NULL THEN NULL
ELSE concat('0x', encode(to_address_hash, 'hex'))
END AS to_address,
COALESCE(value::text, '0') AS value,
gas_used,
gas_price,
status,
TO_CHAR(block_timestamp AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS timestamp_iso
FROM transactions
WHERE hash = decode($1, 'hex')
LIMIT 1
`
if fallbackErr := s.db.QueryRow(ctx, blockscoutQuery, normalizedHash).Scan(
&txHash, &blockNumber, &fromAddress, &toAddress, &value, &gasUsed, &gasPrice, &status, &timestampISO,
); fallbackErr != nil {
return nil, err
}
}
tx := map[string]any{
"hash": txHash,
"block_number": blockNumber,
"from_address": fromAddress,
"value": value,
}
if toAddress != nil {
tx["to_address"] = *toAddress
}
if gasUsed != nil {
tx["gas_used"] = *gasUsed
}
if gasPrice != nil {
tx["gas_price"] = *gasPrice
}
if status != nil {
tx["status"] = *status
}
if timestampISO != nil {
tx["timestamp_iso"] = *timestampISO
}
return tx, nil
}
func (s *Server) queryAIAddress(ctx context.Context, address string) (map[string]any, error) {
ctx, cancel := context.WithTimeout(ctx, 4*time.Second)
defer cancel()
address = normalizeAddress(address)
result := map[string]any{
"address": address,
}
var txCount int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(*) FROM transactions WHERE chain_id = $1 AND (LOWER(from_address) = $2 OR LOWER(to_address) = $2)`, s.chainID, address).Scan(&txCount); err == nil {
result["transaction_count"] = txCount
}
var tokenCount int64
if err := s.db.QueryRow(ctx, `SELECT COUNT(DISTINCT token_contract) FROM token_transfers WHERE chain_id = $1 AND (LOWER(from_address) = $2 OR LOWER(to_address) = $2)`, s.chainID, address).Scan(&tokenCount); err == nil {
result["token_count"] = tokenCount
}
var recentHashes []string
rows, err := s.db.Query(ctx, `
SELECT hash
FROM transactions
WHERE chain_id = $1 AND (LOWER(from_address) = $2 OR LOWER(to_address) = $2)
ORDER BY block_number DESC, transaction_index DESC
LIMIT 5
`, s.chainID, address)
if err == nil {
defer rows.Close()
for rows.Next() {
var hash string
if scanErr := rows.Scan(&hash); scanErr == nil {
recentHashes = append(recentHashes, hash)
}
}
}
if len(recentHashes) > 0 {
result["recent_transactions"] = recentHashes
}
if len(result) == 1 {
normalizedAddress := normalizeHexString(address)
var blockscoutTxCount int64
var blockscoutTokenCount int64
blockscoutAddressQuery := `
SELECT
COALESCE(transactions_count, 0),
COALESCE(token_transfers_count, 0)
FROM addresses
WHERE hash = decode($1, 'hex')
LIMIT 1
`
if err := s.db.QueryRow(ctx, blockscoutAddressQuery, normalizedAddress).Scan(&blockscoutTxCount, &blockscoutTokenCount); err == nil {
result["transaction_count"] = blockscoutTxCount
result["token_count"] = blockscoutTokenCount
}
var liveTxCount int64
if err := s.db.QueryRow(ctx, `
SELECT COUNT(*)
FROM transactions
WHERE from_address_hash = decode($1, 'hex') OR to_address_hash = decode($1, 'hex')
`, normalizedAddress).Scan(&liveTxCount); err == nil && liveTxCount > 0 {
result["transaction_count"] = liveTxCount
}
var liveTokenCount int64
if err := s.db.QueryRow(ctx, `
SELECT COUNT(DISTINCT token_contract_address_hash)
FROM token_transfers
WHERE from_address_hash = decode($1, 'hex') OR to_address_hash = decode($1, 'hex')
`, normalizedAddress).Scan(&liveTokenCount); err == nil && liveTokenCount > 0 {
result["token_count"] = liveTokenCount
}
rows, err := s.db.Query(ctx, `
SELECT concat('0x', encode(hash, 'hex'))
FROM transactions
WHERE from_address_hash = decode($1, 'hex') OR to_address_hash = decode($1, 'hex')
ORDER BY block_number DESC, index DESC
LIMIT 5
`, normalizedAddress)
if err == nil {
defer rows.Close()
for rows.Next() {
var hash string
if scanErr := rows.Scan(&hash); scanErr == nil {
recentHashes = append(recentHashes, hash)
}
}
}
if len(recentHashes) > 0 {
result["recent_transactions"] = recentHashes
}
}
if len(result) == 1 {
return nil, fmt.Errorf("address not found")
}
return result, nil
}
func (s *Server) queryAIBlock(ctx context.Context, blockNumber int64) (map[string]any, error) {
ctx, cancel := context.WithTimeout(ctx, 4*time.Second)
defer cancel()
query := `
SELECT number, hash, parent_hash, transaction_count, gas_used, gas_limit, timestamp_iso
FROM blocks
WHERE chain_id = $1 AND number = $2
LIMIT 1
`
var number int64
var hash, parentHash string
var transactionCount int64
var gasUsed, gasLimit int64
var timestampISO *string
err := s.db.QueryRow(ctx, query, s.chainID, blockNumber).Scan(&number, &hash, &parentHash, &transactionCount, &gasUsed, &gasLimit, &timestampISO)
if err != nil {
blockscoutQuery := `
SELECT
number,
concat('0x', encode(hash, 'hex')) AS hash,
concat('0x', encode(parent_hash, 'hex')) AS parent_hash,
(SELECT COUNT(*) FROM transactions WHERE block_number = b.number) AS transaction_count,
gas_used,
gas_limit,
TO_CHAR(timestamp AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS timestamp_iso
FROM blocks b
WHERE number = $1
LIMIT 1
`
if fallbackErr := s.db.QueryRow(ctx, blockscoutQuery, blockNumber).Scan(&number, &hash, &parentHash, &transactionCount, &gasUsed, &gasLimit, &timestampISO); fallbackErr != nil {
return nil, err
}
}
block := map[string]any{
"number": number,
"hash": hash,
"parent_hash": parentHash,
"transaction_count": transactionCount,
"gas_used": gasUsed,
"gas_limit": gasLimit,
}
if timestampISO != nil {
block["timestamp_iso"] = *timestampISO
}
return block, nil
}
func extractBlockReference(query string) int64 {
match := blockRefPattern.FindStringSubmatch(query)
if len(match) != 2 {
return 0
}
var value int64
fmt.Sscan(match[1], &value)
return value
}

136
backend/api/rest/ai_docs.go Normal file
View File

@@ -0,0 +1,136 @@
package rest
import (
"bufio"
"os"
"path/filepath"
"strings"
)
func loadAIDocSnippets(query string) ([]AIDocSnippet, string, string) {
root := findAIWorkspaceRoot()
if root == "" {
return nil, "", "workspace docs root unavailable for ai doc retrieval"
}
relativePaths := []string{
"docs/11-references/ADDRESS_MATRIX_AND_STATUS.md",
"docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md",
"docs/11-references/DEPLOYED_TOKENS_BRIDGES_LPS_AND_ROUTING_STATUS.md",
"docs/11-references/EXPLORER_TOKEN_LIST_CROSSCHECK.md",
"explorer-monorepo/docs/EXPLORER_API_ACCESS.md",
}
terms := buildDocSearchTerms(query)
if len(terms) == 0 {
terms = []string{"chain 138", "bridge", "liquidity"}
}
snippets := []AIDocSnippet{}
for _, rel := range relativePaths {
fullPath := filepath.Join(root, rel)
fileSnippets := scanDocForTerms(fullPath, rel, terms)
snippets = append(snippets, fileSnippets...)
if len(snippets) >= maxExplorerAIDocSnippets {
break
}
}
if len(snippets) == 0 {
return nil, root, "no matching workspace docs found for ai context"
}
if len(snippets) > maxExplorerAIDocSnippets {
snippets = snippets[:maxExplorerAIDocSnippets]
}
return snippets, root, ""
}
func findAIWorkspaceRoot() string {
candidates := []string{}
if envRoot := strings.TrimSpace(os.Getenv("EXPLORER_AI_WORKSPACE_ROOT")); envRoot != "" {
candidates = append(candidates, envRoot)
}
if cwd, err := os.Getwd(); err == nil {
candidates = append(candidates, cwd)
dir := cwd
for i := 0; i < 4; i++ {
dir = filepath.Dir(dir)
candidates = append(candidates, dir)
}
}
candidates = append(candidates, "/opt/explorer-monorepo", "/home/intlc/projects/proxmox")
for _, candidate := range candidates {
if candidate == "" {
continue
}
if fileExists(filepath.Join(candidate, "docs")) && (fileExists(filepath.Join(candidate, "explorer-monorepo")) || fileExists(filepath.Join(candidate, "smom-dbis-138")) || fileExists(filepath.Join(candidate, "config"))) {
return candidate
}
}
return ""
}
func scanDocForTerms(fullPath, relativePath string, terms []string) []AIDocSnippet {
file, err := os.Open(fullPath)
if err != nil {
return nil
}
defer file.Close()
normalizedTerms := make([]string, 0, len(terms))
for _, term := range terms {
term = strings.ToLower(strings.TrimSpace(term))
if len(term) >= 3 {
normalizedTerms = append(normalizedTerms, term)
}
}
scanner := bufio.NewScanner(file)
lineNumber := 0
snippets := []AIDocSnippet{}
for scanner.Scan() {
lineNumber++
line := scanner.Text()
lower := strings.ToLower(line)
for _, term := range normalizedTerms {
if strings.Contains(lower, term) {
snippets = append(snippets, AIDocSnippet{
Path: relativePath,
Line: lineNumber,
Snippet: clipString(strings.TrimSpace(line), 280),
})
break
}
}
if len(snippets) >= 2 {
break
}
}
return snippets
}
func buildDocSearchTerms(query string) []string {
words := strings.Fields(strings.ToLower(query))
stopWords := map[string]bool{
"what": true, "when": true, "where": true, "which": true, "with": true, "from": true,
"that": true, "this": true, "have": true, "about": true, "into": true, "show": true,
"live": true, "help": true, "explain": true, "tell": true,
}
terms := []string{}
for _, word := range words {
word = strings.Trim(word, ".,:;!?()[]{}\"'")
if len(word) < 4 || stopWords[word] {
continue
}
terms = append(terms, word)
}
for _, match := range addressPattern.FindAllString(query, -1) {
terms = append(terms, strings.ToLower(match))
}
for _, symbol := range []string{"cUSDT", "cUSDC", "cXAUC", "cEURT", "USDT", "USDC", "WETH", "WETH10", "Mainnet", "bridge", "liquidity", "pool"} {
if strings.Contains(strings.ToLower(query), strings.ToLower(symbol)) {
terms = append(terms, strings.ToLower(symbol))
}
}
return uniqueStrings(terms)
}

View File

@@ -0,0 +1,112 @@
package rest
import (
"fmt"
"os"
"regexp"
"sort"
"strings"
)
func firstRegexMatch(pattern *regexp.Regexp, value string) string {
match := pattern.FindString(value)
return strings.TrimSpace(match)
}
func compactStringMap(values map[string]string) map[string]string {
if len(values) == 0 {
return nil
}
out := map[string]string{}
for key, value := range values {
if trimmed := strings.TrimSpace(value); trimmed != "" {
out[key] = trimmed
}
}
if len(out) == 0 {
return nil
}
return out
}
func compactAnyMap(values map[string]any) map[string]any {
out := map[string]any{}
for key, value := range values {
if value == nil {
continue
}
switch typed := value.(type) {
case string:
if strings.TrimSpace(typed) == "" {
continue
}
case []string:
if len(typed) == 0 {
continue
}
case []any:
if len(typed) == 0 {
continue
}
}
out[key] = value
}
return out
}
func stringValue(value any) string {
switch typed := value.(type) {
case string:
return typed
case fmt.Stringer:
return typed.String()
default:
return fmt.Sprintf("%v", value)
}
}
func stringSliceValue(value any) []string {
switch typed := value.(type) {
case []string:
return typed
case []any:
out := make([]string, 0, len(typed))
for _, item := range typed {
out = append(out, stringValue(item))
}
return out
default:
return nil
}
}
func uniqueStrings(values []string) []string {
seen := map[string]bool{}
out := []string{}
for _, value := range values {
trimmed := strings.TrimSpace(value)
if trimmed == "" || seen[trimmed] {
continue
}
seen[trimmed] = true
out = append(out, trimmed)
}
sort.Strings(out)
return out
}
func clipString(value string, limit int) string {
value = strings.TrimSpace(value)
if limit <= 0 || len(value) <= limit {
return value
}
return strings.TrimSpace(value[:limit]) + "..."
}
func fileExists(path string) bool {
if path == "" {
return false
}
info, err := os.Stat(path)
return err == nil && info != nil
}

View File

@@ -0,0 +1,139 @@
package rest
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
)
func (s *Server) queryAIRoutes(ctx context.Context, query string) ([]map[string]any, string) {
baseURL := strings.TrimSpace(firstNonEmptyEnv(
"TOKEN_AGGREGATION_API_BASE",
"TOKEN_AGGREGATION_URL",
"TOKEN_AGGREGATION_BASE_URL",
))
if baseURL == "" {
return nil, "token aggregation api base url is not configured for ai route retrieval"
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, strings.TrimRight(baseURL, "/")+"/api/v1/routes/ingestion?fromChainId=138", nil)
if err != nil {
return nil, "unable to build token aggregation ai request"
}
client := &http.Client{Timeout: 6 * time.Second}
resp, err := client.Do(req)
if err != nil {
return nil, "token aggregation live routes unavailable: " + err.Error()
}
defer resp.Body.Close()
if resp.StatusCode >= 400 {
return nil, fmt.Sprintf("token aggregation live routes returned %d", resp.StatusCode)
}
var payload struct {
Routes []map[string]any `json:"routes"`
}
if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil {
return nil, "unable to decode token aggregation live routes"
}
if len(payload.Routes) == 0 {
return nil, "token aggregation returned no live routes"
}
matches := filterAIRouteMatches(payload.Routes, query)
return matches, ""
}
func filterAIRouteMatches(routes []map[string]any, query string) []map[string]any {
query = strings.ToLower(strings.TrimSpace(query))
matches := make([]map[string]any, 0, 6)
for _, route := range routes {
if query != "" && !routeMatchesQuery(route, query) {
continue
}
trimmed := map[string]any{
"routeId": route["routeId"],
"status": route["status"],
"routeType": route["routeType"],
"fromChainId": route["fromChainId"],
"toChainId": route["toChainId"],
"tokenInSymbol": route["tokenInSymbol"],
"tokenOutSymbol": route["tokenOutSymbol"],
"assetSymbol": route["assetSymbol"],
"label": route["label"],
"aggregatorFamilies": route["aggregatorFamilies"],
"hopCount": route["hopCount"],
"bridgeType": route["bridgeType"],
"tags": route["tags"],
}
matches = append(matches, compactAnyMap(trimmed))
if len(matches) >= 6 {
break
}
}
if len(matches) == 0 {
for _, route := range routes {
trimmed := map[string]any{
"routeId": route["routeId"],
"status": route["status"],
"routeType": route["routeType"],
"fromChainId": route["fromChainId"],
"toChainId": route["toChainId"],
"tokenInSymbol": route["tokenInSymbol"],
"tokenOutSymbol": route["tokenOutSymbol"],
"assetSymbol": route["assetSymbol"],
"label": route["label"],
"aggregatorFamilies": route["aggregatorFamilies"],
}
matches = append(matches, compactAnyMap(trimmed))
if len(matches) >= 4 {
break
}
}
}
return matches
}
func normalizeHexString(value string) string {
trimmed := strings.TrimSpace(strings.ToLower(value))
return strings.TrimPrefix(trimmed, "0x")
}
func routeMatchesQuery(route map[string]any, query string) bool {
fields := []string{
stringValue(route["routeId"]),
stringValue(route["routeType"]),
stringValue(route["tokenInSymbol"]),
stringValue(route["tokenOutSymbol"]),
stringValue(route["assetSymbol"]),
stringValue(route["label"]),
}
for _, field := range fields {
if strings.Contains(strings.ToLower(field), query) {
return true
}
}
for _, value := range stringSliceValue(route["aggregatorFamilies"]) {
if strings.Contains(strings.ToLower(value), query) {
return true
}
}
for _, value := range stringSliceValue(route["tags"]) {
if strings.Contains(strings.ToLower(value), query) {
return true
}
}
for _, symbol := range []string{"cusdt", "cusdc", "cxauc", "ceurt", "usdt", "usdc", "weth"} {
if strings.Contains(query, symbol) {
if strings.Contains(strings.ToLower(strings.Join(fields, " ")), symbol) {
return true
}
}
}
return false
}

267
backend/api/rest/ai_xai.go Normal file
View File

@@ -0,0 +1,267 @@
package rest
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"os"
"strings"
"time"
)
type xAIChatCompletionsRequest struct {
Model string `json:"model"`
Messages []xAIChatMessageReq `json:"messages"`
Stream bool `json:"stream"`
}
type xAIChatMessageReq struct {
Role string `json:"role"`
Content string `json:"content"`
}
type xAIChatCompletionsResponse struct {
Model string `json:"model"`
Choices []xAIChoice `json:"choices"`
OutputText string `json:"output_text,omitempty"`
Output []openAIOutputItem `json:"output,omitempty"`
}
type xAIChoice struct {
Message xAIChoiceMessage `json:"message"`
}
type xAIChoiceMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type openAIOutputItem struct {
Type string `json:"type"`
Content []openAIOutputContent `json:"content"`
}
type openAIOutputContent struct {
Type string `json:"type"`
Text string `json:"text"`
}
func normalizeAIMessages(messages []AIChatMessage) []AIChatMessage {
normalized := make([]AIChatMessage, 0, len(messages))
for _, message := range messages {
role := strings.ToLower(strings.TrimSpace(message.Role))
if role != "assistant" && role != "user" && role != "system" {
continue
}
content := clipString(strings.TrimSpace(message.Content), maxExplorerAIMessageChars)
if content == "" {
continue
}
normalized = append(normalized, AIChatMessage{
Role: role,
Content: content,
})
}
if len(normalized) > maxExplorerAIMessages {
normalized = normalized[len(normalized)-maxExplorerAIMessages:]
}
return normalized
}
func latestUserMessage(messages []AIChatMessage) string {
for i := len(messages) - 1; i >= 0; i-- {
if messages[i].Role == "user" {
return messages[i].Content
}
}
if len(messages) == 0 {
return ""
}
return messages[len(messages)-1].Content
}
func (s *Server) callXAIChatCompletions(ctx context.Context, messages []AIChatMessage, contextEnvelope AIContextEnvelope) (string, string, error) {
apiKey := strings.TrimSpace(os.Getenv("XAI_API_KEY"))
if apiKey == "" {
return "", "", fmt.Errorf("XAI_API_KEY is not configured")
}
model := explorerAIModel()
baseURL := strings.TrimRight(strings.TrimSpace(os.Getenv("XAI_BASE_URL")), "/")
if baseURL == "" {
baseURL = "https://api.x.ai/v1"
}
contextJSON, _ := json.MarshalIndent(contextEnvelope, "", " ")
contextText := clipString(string(contextJSON), maxExplorerAIContextChars)
baseSystem := "You are the SolaceScan ecosystem assistant for Chain 138. Answer using the supplied indexed explorer data, route inventory, and workspace documentation. Be concise, operationally useful, and explicit about uncertainty. Never claim a route, deployment, or production status is live unless the provided context says it is live. If data is missing, say exactly what is missing."
if !explorerAIOperatorToolsEnabled() {
baseSystem += " Never instruct users to paste private keys or seed phrases. Do not direct users to run privileged mint, liquidity, or bridge execution from the public explorer UI. Operator changes belong on LAN-gated workflows and authenticated Track 4 APIs; PMM/MCP-style execution tools are disabled on this deployment unless EXPLORER_AI_OPERATOR_TOOLS_ENABLED=1."
}
input := []xAIChatMessageReq{
{
Role: "system",
Content: baseSystem,
},
{
Role: "system",
Content: "Retrieved ecosystem context:\n" + contextText,
},
}
for _, message := range messages {
input = append(input, xAIChatMessageReq{
Role: message.Role,
Content: message.Content,
})
}
payload := xAIChatCompletionsRequest{
Model: model,
Messages: input,
Stream: false,
}
body, err := json.Marshal(payload)
if err != nil {
return "", model, err
}
req, err := http.NewRequestWithContext(ctx, http.MethodPost, baseURL+"/chat/completions", bytes.NewReader(body))
if err != nil {
return "", model, err
}
req.Header.Set("Authorization", "Bearer "+apiKey)
req.Header.Set("Content-Type", "application/json")
client := &http.Client{Timeout: 45 * time.Second}
resp, err := client.Do(req)
if err != nil {
if errors.Is(err, context.DeadlineExceeded) {
return "", model, &AIUpstreamError{
StatusCode: http.StatusGatewayTimeout,
Code: "upstream_timeout",
Message: "explorer ai upstream timed out",
Details: "xAI request exceeded the configured timeout",
}
}
return "", model, &AIUpstreamError{
StatusCode: http.StatusBadGateway,
Code: "upstream_transport_error",
Message: "explorer ai upstream transport failed",
Details: err.Error(),
}
}
defer resp.Body.Close()
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return "", model, &AIUpstreamError{
StatusCode: http.StatusBadGateway,
Code: "upstream_bad_response",
Message: "explorer ai upstream body could not be read",
Details: err.Error(),
}
}
if resp.StatusCode >= 400 {
return "", model, parseXAIError(resp.StatusCode, responseBody)
}
var response xAIChatCompletionsResponse
if err := json.Unmarshal(responseBody, &response); err != nil {
return "", model, &AIUpstreamError{
StatusCode: http.StatusBadGateway,
Code: "upstream_bad_response",
Message: "explorer ai upstream returned invalid JSON",
Details: err.Error(),
}
}
reply := ""
if len(response.Choices) > 0 {
reply = strings.TrimSpace(response.Choices[0].Message.Content)
}
if reply == "" {
reply = strings.TrimSpace(response.OutputText)
}
if reply == "" {
reply = strings.TrimSpace(extractOutputText(response.Output))
}
if reply == "" {
return "", model, &AIUpstreamError{
StatusCode: http.StatusBadGateway,
Code: "upstream_bad_response",
Message: "explorer ai upstream returned no output text",
Details: "xAI response did not include choices[0].message.content or output text",
}
}
if strings.TrimSpace(response.Model) != "" {
model = response.Model
}
return reply, model, nil
}
func parseXAIError(statusCode int, responseBody []byte) error {
var parsed struct {
Error struct {
Message string `json:"message"`
Type string `json:"type"`
Code string `json:"code"`
} `json:"error"`
}
_ = json.Unmarshal(responseBody, &parsed)
details := clipString(strings.TrimSpace(parsed.Error.Message), 280)
if details == "" {
details = clipString(strings.TrimSpace(string(responseBody)), 280)
}
switch statusCode {
case http.StatusUnauthorized, http.StatusForbidden:
return &AIUpstreamError{
StatusCode: statusCode,
Code: "upstream_auth_failed",
Message: "explorer ai upstream authentication failed",
Details: details,
}
case http.StatusTooManyRequests:
return &AIUpstreamError{
StatusCode: statusCode,
Code: "upstream_quota_exhausted",
Message: "explorer ai upstream quota exhausted",
Details: details,
}
case http.StatusRequestTimeout, http.StatusGatewayTimeout:
return &AIUpstreamError{
StatusCode: statusCode,
Code: "upstream_timeout",
Message: "explorer ai upstream timed out",
Details: details,
}
default:
return &AIUpstreamError{
StatusCode: statusCode,
Code: "upstream_error",
Message: "explorer ai upstream request failed",
Details: details,
}
}
}
func extractOutputText(items []openAIOutputItem) string {
parts := []string{}
for _, item := range items {
for _, content := range item.Content {
if strings.TrimSpace(content.Text) != "" {
parts = append(parts, strings.TrimSpace(content.Text))
}
}
}
return strings.Join(parts, "\n\n")
}