openai service and basic tables with migrations

This commit is contained in:
Lucas Tettamanti
2026-01-01 23:20:13 -03:00
parent 5c67b27859
commit 5e9cc8fe1a
9 changed files with 172 additions and 4 deletions

View File

@@ -0,0 +1,12 @@
-- migrate:up
create table wa_identity_map (
tenant_id uuid not null references tenants(id) on delete cascade,
wa_chat_id text not null,
woo_customer_id bigint not null,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
primary key (tenant_id, wa_chat_id)
);
-- migrate:down
drop table if exists wa_identity_map;

View File

@@ -0,0 +1,13 @@
-- migrate:up
CREATE EXTENSION IF NOT EXISTS pgcrypto;
CREATE TABLE tenants (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
key text NOT NULL UNIQUE,
name text NOT NULL,
created_at timestamptz NOT NULL DEFAULT now()
);
-- migrate:down
DROP TABLE IF EXISTS tenants;
DROP EXTENSION IF EXISTS pgcrypto;

View File

@@ -0,0 +1,23 @@
-- migrate:up
create table wa_conversation_state (
tenant_id uuid not null references tenants(id) on delete cascade,
wa_chat_id text not null,
state text not null, -- IDLE / BUILDING_ORDER / WAITING_PAYMENT
last_intent text null,
last_order_id bigint null,
context jsonb not null default '{}'::jsonb,
state_updated_at timestamptz not null default now(),
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
primary key (tenant_id, wa_chat_id)
);
create index idx_state_tenant_updated
on wa_conversation_state (tenant_id, updated_at desc);
-- migrate:down
drop table if exists wa_conversation_state;

View File

@@ -0,0 +1,24 @@
-- migrate:up
create table wa_messages (
id bigserial primary key,
tenant_id uuid not null references tenants(id) on delete cascade,
wa_chat_id text not null,
provider text not null, -- sim / evolution / twilio
message_id text not null, -- idempotencia por provider
direction text not null, -- in / out
ts timestamptz not null default now(),
text text null,
payload jsonb not null default '{}'::jsonb,
run_id uuid null,
unique (tenant_id, provider, message_id)
);
create index idx_msgs_tenant_chat_ts
on wa_messages (tenant_id, wa_chat_id, ts desc);
-- migrate:down
drop table if exists wa_messages;

View File

@@ -0,0 +1,32 @@
-- migrate:up
create table conversation_runs (
id uuid primary key default gen_random_uuid(),
tenant_id uuid not null references tenants(id) on delete cascade,
wa_chat_id text not null,
message_id text not null,
ts timestamptz not null default now(),
prev_state text null,
user_text text null,
llm_output jsonb null,
tools jsonb not null default '[]'::jsonb,
invariants jsonb not null default '{}'::jsonb,
final_reply text null,
order_id bigint null,
payment_link text null,
status text not null default 'ok', -- ok | warn | error
error_code text null,
error_detail text null,
latency_ms int null,
unique (tenant_id, message_id)
);
create index idx_runs_tenant_chat_ts
on conversation_runs (tenant_id, wa_chat_id, ts desc);
-- migrate:down
drop table if exists conversation_runs;

View File

@@ -1,3 +1,5 @@
version: "3.9"
services: services:
app: app:
image: node:20-alpine image: node:20-alpine
@@ -5,7 +7,10 @@ services:
command: sh -c "npm install && npm run dev" command: sh -c "npm install && npm run dev"
ports: ports:
- "3000:3000" - "3000:3000"
env_file:
- .env
environment: environment:
- NODE_ENV=development
- PORT=3000 - PORT=3000
- DATABASE_URL=postgres://${POSTGRES_USER:-botino}:${POSTGRES_PASSWORD:-botino}@db:5432/${POSTGRES_DB:-botino} - DATABASE_URL=postgres://${POSTGRES_USER:-botino}:${POSTGRES_PASSWORD:-botino}@db:5432/${POSTGRES_DB:-botino}
- REDIS_URL=redis://redis:6379 - REDIS_URL=redis://redis:6379
@@ -13,12 +18,16 @@ services:
- .:/usr/src/app - .:/usr/src/app
- /usr/src/app/node_modules - /usr/src/app/node_modules
depends_on: depends_on:
- db db:
- redis condition: service_healthy
redis:
condition: service_healthy
restart: unless-stopped restart: unless-stopped
db: db:
image: postgres:16-alpine image: postgres:16-alpine
env_file:
- .env
environment: environment:
- POSTGRES_DB=${POSTGRES_DB:-botino} - POSTGRES_DB=${POSTGRES_DB:-botino}
- POSTGRES_USER=${POSTGRES_USER:-botino} - POSTGRES_USER=${POSTGRES_USER:-botino}

34
package-lock.json generated
View File

@@ -10,7 +10,9 @@
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"cors": "^2.8.5", "cors": "^2.8.5",
"express": "^4.19.2" "express": "^4.19.2",
"openai": "^6.15.0",
"zod": "^4.3.4"
}, },
"devDependencies": { "devDependencies": {
"dbmate": "^2.0.0", "dbmate": "^2.0.0",
@@ -965,6 +967,27 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/openai": {
"version": "6.15.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-6.15.0.tgz",
"integrity": "sha512-F1Lvs5BoVvmZtzkUEVyh8mDQPPFolq4F+xdsx/DO8Hee8YF3IGAlZqUIsF+DVGhqf4aU0a3bTghsxB6OIsRy1g==",
"license": "Apache-2.0",
"bin": {
"openai": "bin/cli"
},
"peerDependencies": {
"ws": "^8.18.0",
"zod": "^3.25 || ^4.0"
},
"peerDependenciesMeta": {
"ws": {
"optional": true
},
"zod": {
"optional": true
}
}
},
"node_modules/parseurl": { "node_modules/parseurl": {
"version": "1.3.3", "version": "1.3.3",
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
@@ -1340,6 +1363,15 @@
"engines": { "engines": {
"node": ">= 0.8" "node": ">= 0.8"
} }
},
"node_modules/zod": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/zod/-/zod-4.3.4.tgz",
"integrity": "sha512-Zw/uYiiyF6pUT1qmKbZziChgNPRu+ZRneAsMUDU6IwmXdWt5JwcUfy2bvLOCUtz5UniaN/Zx5aFttZYbYc7O/A==",
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
} }
} }
} }

View File

@@ -17,7 +17,9 @@
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"cors": "^2.8.5", "cors": "^2.8.5",
"express": "^4.19.2" "express": "^4.19.2",
"openai": "^6.15.0",
"zod": "^4.3.4"
}, },
"devDependencies": { "devDependencies": {
"dbmate": "^2.0.0", "dbmate": "^2.0.0",

21
src/services/openai.js Normal file
View File

@@ -0,0 +1,21 @@
// src/services/openai.js (o directo en main.js por ahora)
import OpenAI from "openai";
export const openai = new OpenAI({ apiKey: process.env.OPENAI_APIKEY });
// promptSystem = tu prompt (no lo tocamos mucho)
// input = { last_user_message, conversation_history, current_conversation_state, ... }
export async function llmPlan({ promptSystem, input }) {
const resp = await openai.responses.create({
model: "gpt-5-mini", // o gpt-5 (más caro/mejor) / el que estés usando
input: [
{ role: "system", content: promptSystem },
{ role: "user", content: JSON.stringify(input) }
],
// Si estás usando "Structured Outputs" nativo, acá va tu schema.
// En caso de que tu SDK no lo soporte directo, lo hacemos con zod/JSON parse robusto.
});
const text = resp.output_text; // ojo: depende del SDK/model; es el agregado de outputs
return text;
}