modularizado de prompts

This commit is contained in:
Lucas Tettamanti
2026-01-25 20:51:33 -03:00
parent b91ece867b
commit a489ec66a2
43 changed files with 5408 additions and 89 deletions

View File

@@ -0,0 +1,170 @@
/**
* Browse Specialist - Consultas de catálogo, precios y recomendaciones
*/
import OpenAI from "openai";
import { loadPrompt } from "../promptLoader.js";
import { validateBrowse, getValidationErrors, createEmptyNlu } from "../schemas.js";
let _client = null;
function getClient() {
const apiKey = process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is not set");
}
if (!_client) {
_client = new OpenAI({ apiKey });
}
return _client;
}
function extractJson(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) {
try {
return JSON.parse(s.slice(i, j + 1));
} catch {
return null;
}
}
return null;
}
/**
* Detecta tipo de consulta por patrones simples
*/
function detectBrowseType(text) {
const t = String(text || "").toLowerCase();
// Price query
if (/\b(cu[aá]nto (sale|cuesta|est[aá])|precio|precios)\b/i.test(t)) {
return "price_query";
}
// Recommend
if (/\b(recomend[aá]|qu[eé] llevo|para \d+ personas?|para un asado)\b/i.test(t)) {
return "recommend";
}
// Browse (availability)
if (/\b(ten[eé]s|tienen|hay|vend[eé]s)\b/i.test(t)) {
return "browse";
}
return "browse";
}
/**
* Extrae número de personas del texto
*/
function extractPeopleCount(text) {
const t = String(text || "");
// "para X personas"
let match = /para\s+(\d+)\s*(personas?|comensales?|invitados?)?/i.exec(t);
if (match) return parseInt(match[1], 10);
// "somos X"
match = /somos\s+(\d+)/i.exec(t);
if (match) return parseInt(match[1], 10);
// "X personas"
match = /(\d+)\s*(personas?|comensales?)/i.exec(t);
if (match) return parseInt(match[1], 10);
return null;
}
/**
* Extrae producto mencionado (simple)
*/
function extractProductMention(text) {
const t = String(text || "").toLowerCase();
// Patrones comunes de preguntas
const patterns = [
/(?:ten[eé]s|hay|vend[eé]s|precio de|cu[aá]nto (?:sale|cuesta) (?:el|la|los|las)?)\s*(.+?)(?:\?|$)/i,
/(.+?)\s*(?:tienen|hay|venden)\?/i,
];
for (const pattern of patterns) {
const match = pattern.exec(t);
if (match && match[1]) {
return match[1].trim();
}
}
return null;
}
/**
* Procesa una consulta de catálogo
*
* @param {Object} params
* @param {number} params.tenantId - ID del tenant
* @param {string} params.text - Mensaje del usuario
* @param {Object} params.storeConfig - Config de la tienda
* @returns {Object} NLU unificado
*/
export async function browseNlu({ tenantId, text, storeConfig = {} }) {
const openai = getClient();
// Cargar prompt de browse
const { content: systemPrompt, model } = await loadPrompt({
tenantId,
promptKey: "browse",
variables: {
bot_name: storeConfig.botName || "Piaf",
store_name: storeConfig.name || "la carnicería",
...storeConfig,
},
});
// Hacer la llamada al LLM
const response = await openai.chat.completions.create({
model: model || "gpt-4-turbo",
temperature: 0.2,
max_tokens: 200,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: text },
],
});
const rawText = response?.choices?.[0]?.message?.content || "";
let parsed = extractJson(rawText);
// Validar
if (!parsed || !validateBrowse(parsed)) {
// Fallback con detección por patrones
const browseType = detectBrowseType(text);
parsed = {
intent: browseType,
product_query: extractProductMention(text),
people_count: extractPeopleCount(text),
event_type: /asado/i.test(text) ? "asado" : null,
};
}
// Convertir a formato NLU unificado
const nlu = createEmptyNlu();
nlu.intent = parsed.intent || "browse";
nlu.confidence = 0.85;
nlu.entities.product_query = parsed.product_query || null;
nlu.entities.people_count = parsed.people_count || null;
nlu.entities.event_type = parsed.event_type || null;
nlu.needs.catalog_lookup = true;
nlu.needs.knowledge_lookup = nlu.intent === "recommend";
return {
nlu,
raw_text: rawText,
model,
usage: response?.usage || null,
validation: { ok: true },
};
}