Files
botino/src/services/openai.js

173 lines
4.6 KiB
JavaScript

import OpenAI from "openai";
import { z } from "zod";
let _client = null;
let _clientKey = null;
function getApiKey() {
return process.env.OPENAI_API_KEY || process.env.OPENAI_APIKEY || null;
}
function getClient() {
const apiKey = getApiKey();
if (!apiKey) {
const err = new Error("OPENAI_API_KEY is not set");
err.code = "OPENAI_NO_KEY";
throw err;
}
if (_client && _clientKey === apiKey) return _client;
_clientKey = apiKey;
_client = new OpenAI({ apiKey });
return _client;
}
const NextStateSchema = z.enum([
"IDLE",
"BROWSING",
"BUILDING_ORDER",
"WAITING_ADDRESS",
"WAITING_PAYMENT",
"COMPLETED",
]);
const IntentSchema = z.enum([
"ask_recommendation",
"ask_price",
"browse_products",
"create_order",
"add_item",
"remove_item",
"checkout",
"provide_address",
"confirm_payment",
"track_order",
"other",
]);
const OrderActionSchema = z.enum(["none", "create", "update", "cancel", "checkout"]);
const BasketItemSchema = z.object({
product_id: z.number().int().nonnegative(),
variation_id: z.number().int().nonnegative().nullable(),
quantity: z.number().positive(),
unit: z.enum(["kg", "g", "unit"]),
label: z.string().min(1),
});
const PlanSchema = z
.object({
reply: z.string().min(1).max(350).catch(z.string().min(1)), // respetar guideline, sin romper si excede
next_state: NextStateSchema,
intent: IntentSchema,
missing_fields: z.array(z.string()).default([]),
order_action: OrderActionSchema.default("none"),
basket_resolved: z
.object({
items: z.array(BasketItemSchema).default([]),
})
.default({ items: [] }),
})
.strict();
const ExtractItemSchema = z.object({
label: z.string().min(1),
quantity: z.number().positive(),
unit: z.enum(["kg", "g", "unit"]),
});
const ExtractSchema = z
.object({
intent: IntentSchema,
items: z.array(ExtractItemSchema).default([]),
})
.strict();
function extractJsonObject(text) {
const s = String(text || "");
const i = s.indexOf("{");
const j = s.lastIndexOf("}");
if (i >= 0 && j > i) return s.slice(i, j + 1);
return null;
}
async function jsonCompletion({ system, user, model }) {
const openai = getClient();
const chosenModel = model || process.env.OPENAI_MODEL || "gpt-4o-mini";
const debug = String(process.env.LLM_DEBUG || "") === "1";
if (debug) console.log("[llm] openai.request", { model: chosenModel });
const resp = await openai.chat.completions.create({
model: chosenModel,
temperature: 0.2,
response_format: { type: "json_object" },
messages: [
{ role: "system", content: system },
{ role: "user", content: user },
],
});
if (debug)
console.log("[llm] openai.response", {
id: resp?.id || null,
model: resp?.model || null,
usage: resp?.usage || null,
});
const text = resp?.choices?.[0]?.message?.content || "";
let parsed;
try {
parsed = JSON.parse(text);
} catch {
const extracted = extractJsonObject(text);
if (!extracted) throw new Error("openai_invalid_json");
parsed = JSON.parse(extracted);
}
return { parsed, raw_text: text, model: chosenModel, usage: resp?.usage || null };
}
/**
* Genera un "plan" de conversación (salida estructurada) usando OpenAI.
*
* - `promptSystem`: instrucciones del bot
* - `input`: { last_user_message, conversation_history, current_conversation_state, context }
*/
export async function llmPlan({ promptSystem, input, model } = {}) {
const system =
`${promptSystem}\n\n` +
"Respondé SOLO con un JSON válido (sin markdown). Respetá estrictamente el formato requerido.";
const { parsed, raw_text, model: chosenModel, usage } = await jsonCompletion({
system,
user: JSON.stringify(input ?? {}),
model,
});
const plan = PlanSchema.parse(parsed);
return {
plan,
raw_text,
model: chosenModel,
usage,
};
}
/**
* Paso 1: extracción de intención + items mencionados (sin resolver IDs).
* Devuelve SOLO: intent + items[{label, quantity, unit}]
*/
export async function llmExtract({ input, model } = {}) {
const system =
"Extraé intención e items del mensaje del usuario.\n" +
"Respondé SOLO JSON válido (sin markdown) con keys EXACTAS:\n" +
`intent (one of: ${IntentSchema.options.join("|")}), items (array of {label, quantity, unit(kg|g|unit)}).\n` +
"Si no hay items claros, devolvé items: [].";
const { parsed, raw_text, model: chosenModel, usage } = await jsonCompletion({
system,
user: JSON.stringify(input ?? {}),
model,
});
const extracted = ExtractSchema.parse(parsed);
return { extracted, raw_text, model: chosenModel, usage };
}