modularizado de prompts

This commit is contained in:
Lucas Tettamanti
2026-01-25 20:51:33 -03:00
parent b91ece867b
commit a489ec66a2
43 changed files with 5408 additions and 89 deletions

View File

@@ -0,0 +1,170 @@
/**
* Browse Specialist - Consultas de catálogo, precios y recomendaciones
*/
import OpenAI from "openai";
import { loadPrompt } from "../promptLoader.js";
import { validateBrowse, getValidationErrors, createEmptyNlu } from "../schemas.js";
let _client = null;
function getClient() {
const apiKey = process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is not set");
}
if (!_client) {
_client = new OpenAI({ apiKey });
}
return _client;
}
function extractJson(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) {
try {
return JSON.parse(s.slice(i, j + 1));
} catch {
return null;
}
}
return null;
}
/**
* Detecta tipo de consulta por patrones simples
*/
function detectBrowseType(text) {
const t = String(text || "").toLowerCase();
// Price query
if (/\b(cu[aá]nto (sale|cuesta|est[aá])|precio|precios)\b/i.test(t)) {
return "price_query";
}
// Recommend
if (/\b(recomend[aá]|qu[eé] llevo|para \d+ personas?|para un asado)\b/i.test(t)) {
return "recommend";
}
// Browse (availability)
if (/\b(ten[eé]s|tienen|hay|vend[eé]s)\b/i.test(t)) {
return "browse";
}
return "browse";
}
/**
* Extrae número de personas del texto
*/
function extractPeopleCount(text) {
const t = String(text || "");
// "para X personas"
let match = /para\s+(\d+)\s*(personas?|comensales?|invitados?)?/i.exec(t);
if (match) return parseInt(match[1], 10);
// "somos X"
match = /somos\s+(\d+)/i.exec(t);
if (match) return parseInt(match[1], 10);
// "X personas"
match = /(\d+)\s*(personas?|comensales?)/i.exec(t);
if (match) return parseInt(match[1], 10);
return null;
}
/**
* Extrae producto mencionado (simple)
*/
function extractProductMention(text) {
const t = String(text || "").toLowerCase();
// Patrones comunes de preguntas
const patterns = [
/(?:ten[eé]s|hay|vend[eé]s|precio de|cu[aá]nto (?:sale|cuesta) (?:el|la|los|las)?)\s*(.+?)(?:\?|$)/i,
/(.+?)\s*(?:tienen|hay|venden)\?/i,
];
for (const pattern of patterns) {
const match = pattern.exec(t);
if (match && match[1]) {
return match[1].trim();
}
}
return null;
}
/**
* Procesa una consulta de catálogo
*
* @param {Object} params
* @param {number} params.tenantId - ID del tenant
* @param {string} params.text - Mensaje del usuario
* @param {Object} params.storeConfig - Config de la tienda
* @returns {Object} NLU unificado
*/
export async function browseNlu({ tenantId, text, storeConfig = {} }) {
const openai = getClient();
// Cargar prompt de browse
const { content: systemPrompt, model } = await loadPrompt({
tenantId,
promptKey: "browse",
variables: {
bot_name: storeConfig.botName || "Piaf",
store_name: storeConfig.name || "la carnicería",
...storeConfig,
},
});
// Hacer la llamada al LLM
const response = await openai.chat.completions.create({
model: model || "gpt-4-turbo",
temperature: 0.2,
max_tokens: 200,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: text },
],
});
const rawText = response?.choices?.[0]?.message?.content || "";
let parsed = extractJson(rawText);
// Validar
if (!parsed || !validateBrowse(parsed)) {
// Fallback con detección por patrones
const browseType = detectBrowseType(text);
parsed = {
intent: browseType,
product_query: extractProductMention(text),
people_count: extractPeopleCount(text),
event_type: /asado/i.test(text) ? "asado" : null,
};
}
// Convertir a formato NLU unificado
const nlu = createEmptyNlu();
nlu.intent = parsed.intent || "browse";
nlu.confidence = 0.85;
nlu.entities.product_query = parsed.product_query || null;
nlu.entities.people_count = parsed.people_count || null;
nlu.entities.event_type = parsed.event_type || null;
nlu.needs.catalog_lookup = true;
nlu.needs.knowledge_lookup = nlu.intent === "recommend";
return {
nlu,
raw_text: rawText,
model,
usage: response?.usage || null,
validation: { ok: true },
};
}

View File

@@ -0,0 +1,100 @@
/**
* Greeting Specialist - Maneja saludos con personalidad de carnicero argentino
*/
import OpenAI from "openai";
import { loadPrompt } from "../promptLoader.js";
import { validateGreeting, getValidationErrors, createEmptyNlu } from "../schemas.js";
let _client = null;
function getClient() {
const apiKey = process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is not set");
}
if (!_client) {
_client = new OpenAI({ apiKey });
}
return _client;
}
function extractJson(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) {
try {
return JSON.parse(s.slice(i, j + 1));
} catch {
return null;
}
}
return null;
}
/**
* Procesa un saludo y genera respuesta con personalidad
*
* @param {Object} params
* @param {number} params.tenantId - ID del tenant
* @param {string} params.text - Mensaje del usuario
* @param {Object} params.storeConfig - Config de la tienda
* @returns {Object} NLU unificado con reply
*/
export async function greetingNlu({ tenantId, text, storeConfig = {} }) {
const openai = getClient();
// Cargar prompt de greeting
const { content: systemPrompt, model } = await loadPrompt({
tenantId,
promptKey: "greeting",
variables: {
bot_name: storeConfig.botName || "Piaf",
store_name: storeConfig.name || "la carnicería",
store_hours: storeConfig.hours || "",
store_address: storeConfig.address || "",
store_phone: storeConfig.phone || "",
},
});
// Hacer la llamada al LLM
const response = await openai.chat.completions.create({
model: model || "gpt-4-turbo",
temperature: 0.7, // Un poco más de creatividad para saludos
max_tokens: 200,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: text },
],
});
const rawText = response?.choices?.[0]?.message?.content || "";
let parsed = extractJson(rawText);
// Validar respuesta
if (!parsed || !validateGreeting(parsed)) {
// Fallback con respuesta genérica
parsed = {
intent: "greeting",
reply: "¡Hola! ¿En qué te puedo ayudar?",
};
}
// Convertir a formato NLU unificado
const nlu = createEmptyNlu();
nlu.intent = "greeting";
nlu.confidence = 0.95;
nlu.reply = parsed.reply;
nlu.needs.catalog_lookup = false;
nlu.needs.knowledge_lookup = false;
return {
nlu,
raw_text: rawText,
model,
usage: response?.usage || null,
validation: { ok: true },
};
}

View File

@@ -0,0 +1,162 @@
/**
* Orders Specialist - Extracción de productos y cantidades
*
* El specialist más importante: maneja add_to_cart, remove_from_cart,
* view_cart, confirm_order con soporte para multi-items.
*/
import OpenAI from "openai";
import { loadPrompt } from "../promptLoader.js";
import { validateOrders, getValidationErrors, createEmptyNlu } from "../schemas.js";
let _client = null;
function getClient() {
const apiKey = process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is not set");
}
if (!_client) {
_client = new OpenAI({ apiKey });
}
return _client;
}
function extractJson(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) {
try {
return JSON.parse(s.slice(i, j + 1));
} catch {
return null;
}
}
return null;
}
/**
* Normaliza unidades a formato estándar
*/
function normalizeUnit(unit) {
if (!unit) return null;
const u = String(unit).toLowerCase().trim();
if (["kg", "kilo", "kilos", "kilogramo", "kilogramos"].includes(u)) return "kg";
if (["g", "gr", "gramo", "gramos"].includes(u)) return "g";
if (["unidad", "unidades", "u", "un"].includes(u)) return "unidad";
return null;
}
/**
* Normaliza items extraídos
*/
function normalizeItems(items) {
if (!Array.isArray(items) || items.length === 0) return null;
return items
.filter(item => item && item.product_query)
.map(item => ({
product_query: String(item.product_query || "").trim(),
quantity: typeof item.quantity === "number" ? item.quantity : null,
unit: normalizeUnit(item.unit),
}))
.filter(item => item.product_query.length > 0);
}
/**
* Procesa un mensaje de pedido
*
* @param {Object} params
* @param {number} params.tenantId - ID del tenant
* @param {string} params.text - Mensaje del usuario
* @param {Object} params.storeConfig - Config de la tienda
* @returns {Object} NLU unificado
*/
export async function ordersNlu({ tenantId, text, storeConfig = {} }) {
const openai = getClient();
// Cargar prompt de orders
const { content: systemPrompt, model } = await loadPrompt({
tenantId,
promptKey: "orders",
variables: storeConfig,
});
// Hacer la llamada al LLM
const response = await openai.chat.completions.create({
model: model || "gpt-4-turbo",
temperature: 0.1, // Baja temperatura para extracción precisa
max_tokens: 500,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: text },
],
});
const rawText = response?.choices?.[0]?.message?.content || "";
let parsed = extractJson(rawText);
// Intentar validar
let validationOk = false;
if (parsed && validateOrders(parsed)) {
validationOk = true;
} else if (parsed) {
// Intentar normalizar respuesta parcialmente válida
parsed = {
intent: parsed.intent || "add_to_cart",
confidence: parsed.confidence || 0.8,
items: parsed.items || null,
product_query: parsed.product_query || null,
quantity: parsed.quantity || null,
unit: parsed.unit || null,
};
validationOk = true;
} else {
// Fallback total
parsed = {
intent: "add_to_cart",
confidence: 0.5,
items: null,
product_query: text.length < 50 ? text : null,
quantity: null,
unit: null,
};
}
// Normalizar items - SIEMPRE convertir a array
let normalizedItems = normalizeItems(parsed.items);
// Si no hay items pero hay product_query en raíz, convertir a array
if ((!normalizedItems || normalizedItems.length === 0) && parsed.product_query) {
normalizedItems = [{
product_query: String(parsed.product_query).trim(),
quantity: typeof parsed.quantity === "number" ? parsed.quantity : null,
unit: normalizeUnit(parsed.unit),
}];
}
// Convertir a formato NLU unificado
const nlu = createEmptyNlu();
nlu.intent = parsed.intent || "add_to_cart";
nlu.confidence = parsed.confidence || 0.8;
// Entities - siempre usar items[], nunca campos individuales
nlu.entities.items = normalizedItems || [];
nlu.entities.product_query = null; // Deprecado, usar items[]
nlu.entities.quantity = null;
nlu.entities.unit = null;
// Needs
nlu.needs.catalog_lookup = ["add_to_cart", "remove_from_cart"].includes(nlu.intent);
nlu.needs.knowledge_lookup = false;
return {
nlu,
raw_text: rawText,
model,
usage: response?.usage || null,
validation: { ok: validationOk, errors: validationOk ? [] : getValidationErrors(validateOrders) },
};
}

View File

@@ -0,0 +1,135 @@
/**
* Payment Specialist - Extracción de método de pago
*/
import OpenAI from "openai";
import { loadPrompt } from "../promptLoader.js";
import { validatePayment, getValidationErrors, createEmptyNlu } from "../schemas.js";
let _client = null;
function getClient() {
const apiKey = process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is not set");
}
if (!_client) {
_client = new OpenAI({ apiKey });
}
return _client;
}
function extractJson(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) {
try {
return JSON.parse(s.slice(i, j + 1));
} catch {
return null;
}
}
return null;
}
/**
* Detecta método de pago por patrones simples
*/
function detectPaymentMethod(text) {
const t = String(text || "").toLowerCase().trim();
// Números (asumiendo 1=efectivo, 2=link del contexto)
if (/^1$/.test(t)) return "cash";
if (/^2$/.test(t)) return "link";
// Cash patterns
if (/\b(efectivo|cash|plata|billete|cuando (llega|llegue)|en mano)\b/i.test(t)) {
return "cash";
}
// Link patterns
if (/\b(tarjeta|link|transfer|qr|mercadopago|mp|d[eé]bito|cr[eé]dito)\b/i.test(t)) {
return "link";
}
return null;
}
/**
* Procesa un mensaje de pago
*
* @param {Object} params
* @param {number} params.tenantId - ID del tenant
* @param {string} params.text - Mensaje del usuario
* @param {Object} params.storeConfig - Config de la tienda
* @returns {Object} NLU unificado
*/
export async function paymentNlu({ tenantId, text, storeConfig = {} }) {
// Intentar detección rápida primero
const quickMethod = detectPaymentMethod(text);
// Si es claramente un número o patrón simple, no llamar al LLM
if (quickMethod && text.trim().length < 30) {
const nlu = createEmptyNlu();
nlu.intent = "select_payment";
nlu.confidence = 0.9;
nlu.entities.payment_method = quickMethod;
return {
nlu,
raw_text: "",
model: null,
usage: null,
validation: { ok: true, skipped_llm: true },
};
}
const openai = getClient();
// Cargar prompt de payment
const { content: systemPrompt, model } = await loadPrompt({
tenantId,
promptKey: "payment",
variables: storeConfig,
});
// Hacer la llamada al LLM
const response = await openai.chat.completions.create({
model: model || "gpt-4o-mini",
temperature: 0.1,
max_tokens: 100,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: text },
],
});
const rawText = response?.choices?.[0]?.message?.content || "";
let parsed = extractJson(rawText);
// Validar
if (!parsed || !validatePayment(parsed)) {
// Fallback con detección por patrones
parsed = {
intent: "select_payment",
payment_method: quickMethod,
};
}
// Convertir a formato NLU unificado
const nlu = createEmptyNlu();
nlu.intent = "select_payment";
nlu.confidence = 0.85;
nlu.entities.payment_method = parsed.payment_method || null;
nlu.needs.catalog_lookup = false;
return {
nlu,
raw_text: rawText,
model,
usage: response?.usage || null,
validation: { ok: true },
};
}

View File

@@ -0,0 +1,157 @@
/**
* Shipping Specialist - Extracción de método de envío y dirección
*/
import OpenAI from "openai";
import { loadPrompt } from "../promptLoader.js";
import { validateShipping, getValidationErrors, createEmptyNlu } from "../schemas.js";
let _client = null;
function getClient() {
const apiKey = process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY is not set");
}
if (!_client) {
_client = new OpenAI({ apiKey });
}
return _client;
}
function extractJson(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) {
try {
return JSON.parse(s.slice(i, j + 1));
} catch {
return null;
}
}
return null;
}
/**
* Detecta método de envío por patrones simples
*/
function detectShippingMethod(text) {
const t = String(text || "").toLowerCase();
// Números (asumiendo 1=delivery, 2=pickup del contexto)
if (/^1$/.test(t.trim())) return "delivery";
if (/^2$/.test(t.trim())) return "pickup";
// Delivery patterns
if (/\b(delivery|env[ií]o|enviar|traigan|llev|domicilio)\b/i.test(t)) {
return "delivery";
}
// Pickup patterns
if (/\b(retiro|retirar|buscar|paso|sucursal|local)\b/i.test(t)) {
return "pickup";
}
return null;
}
/**
* Detecta si el texto parece una dirección
*/
function looksLikeAddress(text) {
const t = String(text || "").trim();
// Tiene números y letras, más de 10 caracteres
if (t.length > 10 && /\d/.test(t) && /[a-záéíóú]/i.test(t)) {
return true;
}
// Menciona calles, avenidas, barrios
if (/\b(calle|av|avenida|entre|esquina|piso|depto|dto|barrio)\b/i.test(t)) {
return true;
}
return false;
}
/**
* Procesa un mensaje de shipping
*
* @param {Object} params
* @param {number} params.tenantId - ID del tenant
* @param {string} params.text - Mensaje del usuario
* @param {Object} params.storeConfig - Config de la tienda
* @returns {Object} NLU unificado
*/
export async function shippingNlu({ tenantId, text, storeConfig = {} }) {
const openai = getClient();
// Intentar detección rápida primero
const quickMethod = detectShippingMethod(text);
const isAddress = looksLikeAddress(text);
// Si es claramente un número o patrón simple, no llamar al LLM
if (quickMethod && !isAddress && text.trim().length < 20) {
const nlu = createEmptyNlu();
nlu.intent = "select_shipping";
nlu.confidence = 0.9;
nlu.entities.shipping_method = quickMethod;
return {
nlu,
raw_text: "",
model: null,
usage: null,
validation: { ok: true, skipped_llm: true },
};
}
// Cargar prompt de shipping
const { content: systemPrompt, model } = await loadPrompt({
tenantId,
promptKey: "shipping",
variables: storeConfig,
});
// Hacer la llamada al LLM
const response = await openai.chat.completions.create({
model: model || "gpt-4o-mini",
temperature: 0.1,
max_tokens: 150,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: systemPrompt },
{ role: "user", content: text },
],
});
const rawText = response?.choices?.[0]?.message?.content || "";
let parsed = extractJson(rawText);
// Validar
if (!parsed || !validateShipping(parsed)) {
// Fallback con detección por patrones
parsed = {
intent: isAddress ? "provide_address" : "select_shipping",
shipping_method: quickMethod,
address: isAddress ? text.trim() : null,
};
}
// Convertir a formato NLU unificado
const nlu = createEmptyNlu();
nlu.intent = parsed.intent || "select_shipping";
nlu.confidence = 0.85;
nlu.entities.shipping_method = parsed.shipping_method || null;
nlu.entities.address = parsed.address || null;
nlu.needs.catalog_lookup = false;
return {
nlu,
raw_text: rawText,
model,
usage: response?.usage || null,
validation: { ok: true },
};
}