fix(nordagpt): smart router complexity for long queries + show routing info in badge
Some checks are pending
NordaBiz Tests / Unit & Integration Tests (push) Waiting to run
NordaBiz Tests / E2E Tests (Playwright) (push) Blocked by required conditions
NordaBiz Tests / Smoke Tests (Production) (push) Blocked by required conditions
NordaBiz Tests / Send Failure Notification (push) Blocked by required conditions

- Long messages (>150 chars) with 2+ categories → complex (was medium)
- Badge shows actual model used, complexity icon, thinking level
- Done chunk includes complexity, thinking, routed_by metadata

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Maciej Pienczyn 2026-03-28 06:08:43 +01:00
parent 86c7e83886
commit cc78711e17
3 changed files with 22 additions and 8 deletions

View File

@ -1689,7 +1689,10 @@ W dyskusji [Artur Wiertel](link) pytał o moderację. Pełna treść: [moje uwag
'latency_ms': latency_ms,
'model': actual_model,
'cost_usd': round(cost_usd, 6),
'full_text': full_response_text
'full_text': full_response_text,
'complexity': route_decision.get('complexity', '?'),
'thinking': route_decision.get('thinking', '?'),
'routed_by': route_decision.get('routed_by', '?'),
}
except Exception as e:

View File

@ -141,13 +141,16 @@ def route_query_fast(message: str, user_context: dict) -> Optional[dict]:
logger.debug("fast_router: no confident match, deferring to AI router")
return None
# Determine complexity by number of matched categories
if len(matched_categories) == 1:
complexity = 'medium'
elif len(matched_categories) <= 3:
# Determine complexity by number of matched categories + message length
is_long = len(message) > 150
multi_question = any(w in msg for w in ['jak ', 'jakie ', 'w jaki sposób', 'kto mógł'])
if len(matched_categories) >= 3 or (len(matched_categories) >= 2 and is_long):
complexity = 'complex'
elif len(matched_categories) >= 2 or is_long or multi_question:
complexity = 'medium'
else:
complexity = 'complex'
complexity = 'simple' if len(message) < 80 else 'medium'
logger.debug(
"fast_router: matched categories=%s complexity=%s",

View File

@ -2514,12 +2514,20 @@ async function sendMessage() {
const modelLabels = {
'flash': '⚡ Flash', 'pro': '🧠 Pro',
'gemini-3-flash-preview': '⚡ Flash',
'gemini-3.1-flash-lite-preview': '⚡ Lite',
'gemini-3.1-pro-preview': '🧠 Pro'
};
const modelLabel = modelLabels[currentModel] || currentModel;
const actualModel = chunk.model || currentModel;
const modelLabel = modelLabels[actualModel] || modelLabels[currentModel] || actualModel;
const latencySec = ((chunk.latency_ms || 0) / 1000).toFixed(1);
const costStr = (chunk.cost_usd || 0) > 0 ? `$${(chunk.cost_usd).toFixed(4)}` : '$0.00';
let badgeHTML = `<span class="thinking-badge-level">${modelLabel}</span> · <span class="thinking-badge-time">${latencySec}s</span> · <span class="thinking-badge-cost">${costStr}</span>`;
const complexityIcons = {'simple': '💬', 'medium': '🔍', 'complex': '🧠'};
const thinkingLabels = {'minimal': 'szybki', 'low': 'analiza', 'high': 'głęboka analiza'};
const cIcon = complexityIcons[chunk.complexity] || '';
const tLabel = thinkingLabels[chunk.thinking] || '';
let badgeHTML = `<span class="thinking-badge-level">${modelLabel}</span>`;
if (cIcon && tLabel) badgeHTML += ` · ${cIcon} ${tLabel}`;
badgeHTML += ` · <span class="thinking-badge-time">${latencySec}s</span> · <span class="thinking-badge-cost">${costStr}</span>`;
if (currentModel === 'flash') {
badgeHTML += ` · <a href="#" class="pro-upgrade-hint" onclick="event.preventDefault(); setModel('pro');" title="Przełącz na Gemini 3 Pro dla lepszych odpowiedzi">Lepsze odpowiedzi? <strong>Spróbuj Pro</strong> 🧠</a>`;
}