27 Commits

Author SHA1 Message Date
f57309b06a Preparado despliegue 2026-03-02 15:07:30 +01:00
3be2b8f20d Nuevo nombre del container 2026-03-02 14:57:43 +01:00
4853fec7ff Fix de gestion de orders
Proceso de cancelacion verificado
2026-02-27 13:43:09 +01:00
04a6e50b7a Orders en todas las etapas 2026-02-27 11:16:45 +01:00
8ca3d095e6 Fix suspension && paso a plantilla de caso de uso 2026-02-26 17:47:32 +01:00
ca1144b55c Orders en los consumidores y gestion de los demas casos de uso 2026-02-26 17:30:32 +01:00
18422fbe38 Order para pause, activate y terminate 2026-02-25 17:42:16 +01:00
f221035c8b Visualizacion via api de las operaciones pendientes 2026-02-25 17:23:22 +01:00
02c80cd503 Orders con endpoints para monitorizacion 2026-02-25 12:20:52 +01:00
c416114c50 Arrglos por el cambio de nombre 2026-02-24 12:44:19 +01:00
e329b36933 Orders para test y flujo de migraciones mas simple 2026-02-24 11:27:47 +01:00
5c64c84e2a Todos los test de orders pasan 2026-02-23 13:35:36 +01:00
fc319372be Integracion completa de las migraciones 2026-02-23 12:04:21 +01:00
12dae135b5 Scripts de inicio con migraciones 2026-02-20 10:59:15 +01:00
b208c9c301 Preparando proceso de despliegue local para que se parezca al de
desarrollo, problema de las migraciones
2026-02-20 10:47:28 +01:00
1583ae539e Organizadas las migraciones para el despliegue 2026-02-19 17:24:47 +01:00
b6ec37c339 cambio de nombre por proposito 2026-02-17 17:24:13 +01:00
459523666f Mejora migraciones con tabla de versiones 2026-02-17 17:22:20 +01:00
8427613114 Intento de migraciones con script generador 2026-02-17 13:46:16 +01:00
5d3465fd97 Test para todo el repositorio de orders 2026-02-17 09:33:51 +01:00
39a2622cb1 base de datos de orders con repositorio y test 2026-02-16 17:31:20 +01:00
0a42e4776d Merge branch 'main' into seguimiento-tareas 2026-02-13 10:57:54 +01:00
44fea21a56 Fix de api-key y mejora del control de versiones 2026-02-13 10:55:19 +01:00
46ac54f7ab Seguimiento de ordenes desde la ingesta 2026-02-11 12:19:16 +01:00
2c9bf9dd93 Mejora del commit anterior 2026-02-10 17:28:32 +01:00
19b2958a9c Intento de mejorar el proceso de validacion de los controladores 2026-02-10 17:26:04 +01:00
a39b84e107 Validaciones para los endpints 2026-02-10 15:57:03 +01:00
73 changed files with 2511 additions and 609 deletions

18
.env
View File

@@ -1,11 +1,12 @@
PORT=3000
API_HOSTNAME=0.0.0.0
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
ENVIORMENT=development
#RABBITMQ_HOST=rabbitmq-sim-broker
RABBITMQ_HOST=localhost
RABBITMQ_HOST=rabbitmq-sim-broker
# RABBITMQ_HOST=localhost
RABBITMQ_PORT=5672
RABBITMQ_USER=guest
RABBITMQ_PASSWORD=guest
@@ -13,11 +14,16 @@ RABBITMQ_SECURE=false
RABBITMQ_VHOST=sim-vhost
# Hay cosas que unificar de varios servicios
POSTGRES_HOST=postgresql-sim
# POSTGRES_HOST=localhost
POSTGRES_DB=postgres
POSTGRES_DATABASE=postgres
#POSTGRES_HOST=postgresql-sim
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
DEV_POSTGRES_PORT=5432
POSTGRES_PORT=5433
POSTGRES_USER=postgres
POSTGRES_PASSWORD=1234
# Para el postgres local para generar el script de resultado de migraciones
PGHOST=localhost
PGUSER=alvar
PGPASSWORD=alvar
PGPORT=5433

View File

@@ -1,4 +1,10 @@
#/bin/bash
rm deployment/database/init.sql
cat deployment/database/*.sql >deployment/database/init.sql
# cat deployment/database/*.sql >deployment/database/init.sql
cp deployment/database/esquema_final* deployment/database/init.sql
# compatibilidad con postgresql < 17
sed -i '/\\restrict/d' deployment/database/init.sql
sed -i '/\\unrestrict/d' deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ build

View File

@@ -4,11 +4,13 @@ CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','
-- Tabla para gestionar las peticiones de cambio de objenious.
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
-- con certeza cuando van a terminar.
-- Estas tablas está fuertemente ligadas al sistema que usa la plataforma
-- de objenioius y no debe unsarse para otra compañia.
CREATE TABLE if not exists objenious_operation (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
retry_count INT DEFAULT 0,
max_retry INT DEFAULT 5,
max_date_retry TIMESTAMP DEFAULT NULL,
retry_count INT DEFAULT 0, -- No implementado en codigo
max_retry INT DEFAULT 5, -- No implementado en codigo
max_date_retry TIMESTAMP DEFAULT NULL, -- No implementado en codigo
iccids TEXT,
request_id TEXT,
mass_action_id TEXT,
@@ -24,7 +26,7 @@ CREATE TABLE if not exists objenious_operation (
-- operaciones pendientes para revisar
CREATE INDEX IF NOT EXISTS pending_operations
ON objenious_operation(start_date)
WHERE end_date IS NULL;
WHERE end_date IS NULL;
CREATE TABLE if not exists objenious_operation_change (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,

View File

@@ -0,0 +1,106 @@
#!/bin/bash
# --- Para que siempre se ejecute en el mismo path
cd "$(dirname "$0")"
# --- Configuración por defecto ---
MIGRATIONS_DIR="./migrations"
OUTPUT_FILE_PREFIX="esquema_final"
DB_NAME="temp_schema_build_$(date +%s)"
# --- Función de Ayuda ---
usage() {
echo "Uso: $0 -v <version> [-e <ruta_env>]"
echo " -v Versión semántica objetivo (ej: 1.2.0)"
echo " -e (Opcional) Ruta al archivo .env para cargar variables"
echo " Los archivos de verions tienen que tener el formato x.x.x_descripcion.sql (Es importante la _ para serpar las partes) "
exit 1
}
# --- Procesar Argumentos (Flags) ---
# v: obligatorio
# e: opcionar
while getopts "v:e:" opt; do
case $opt in
v) TARGET_VERSION="$OPTARG" ;;
e) ENV_PATH="$OPTARG" ;;
*) usage ;;
esac
done
# Validar que la versión esté presente
if [ -z "$TARGET_VERSION" ]; then
echo "Error: La versión es obligatoria."
usage
fi
# --- Cargar variables de entorno ---
if [ ! -z "$ENV_PATH" ]; then
if [ -f "$ENV_PATH" ]; then
echo "~> Cargando configuración desde: $ENV_PATH"
# Exporta automáticamente las variables definidas en el archivo
set -o allexport
source "$ENV_PATH"
set +o allexport
else
echo "Error: No se encontró el archivo .env en: $ENV_PATH"
exit 1
fi
else
echo "!> No se especificó archivo .env, usando variables del sistema actual"
fi
# echo "Debug: Usuario es '$PGUSER'"
# echo "Debug: Host es '$PGHOST'"
# echo "Debug: Password es '$PGPASSWORD'" # Cuidado con mostrar esto
# --- Función de limpieza (Safety Net) ---
cleanup() {
echo "~> Limpiando: Eliminando base de datos temporal '$DB_NAME'"
# Usamos las variables de conexión cargadas (si las hay)
dropdb $DB_NAME --if-exists 2>/dev/null
}
trap cleanup EXIT
# --- Inicio del Proceso ---
echo "~> Iniciando build para versión: $TARGET_VERSION"
# 1. Crear BD temporal
# Nota: Si tu .env tiene PGHOST, la BD se creará allí. Si no, en localhost.
createdb $DB_NAME
# 2. Ejecutar script base (si existe)
rm -rf init.sql
cat base/*.sql >init.sql
if [ -f "init.sql" ]; then
echo "~> Ejecutando init.sql..."
psql -d $DB_NAME -f init.sql >/dev/null
fi
# 3. Iterar y filtrar migraciones
echo "~> Aplicando migraciones hasta la versión $TARGET_VERSION..."
for f in $(ls $MIGRATIONS_DIR/*.sql | sort -V); do
FILENAME=$(basename "$f")
# Extraer versión (Asume formato V1.0.0_desc.sql o 1.0.0_desc.sql)
FILE_VER=$(echo "$FILENAME" | sed -E 's/^V//' | awk -F_ '{print $1}')
# Comparación semántica
echo "comparando $TARGET_VERSION con $FILE_VER"
LOWEST=$(echo -e "$TARGET_VERSION\n$FILE_VER" | sort -V | head -n1)
if [ "$LOWEST" == "$FILE_VER" ] || [ "$FILE_VER" == "$TARGET_VERSION" ]; then
echo "~> Aplicando: $FILENAME ($FILE_VER)"
psql -d $DB_NAME -f "$f" >/dev/null
else
echo "~> Saltando: $FILENAME ($FILE_VER) - Mayor que objetivo"
fi
done
# 4. Generar nombre de archivo de salida
OUTPUT_FILE="${OUTPUT_FILE_PREFIX}_v${TARGET_VERSION}.sql"
# 5. Extraer el esquema FINAL
echo "~> Generando $OUTPUT_FILE ---"
pg_dump -d $DB_NAME -s --no-owner --no-privileges >$OUTPUT_FILE
echo "o> Esquema guardado en $OUTPUT_FILE"

View File

@@ -1,150 +0,0 @@
-- eliminar los drop para prod
drop domain if exists imei_type cascade;
CREATE DOMAIN imei_type as varchar(15);
drop domain if exists iccid_type cascade;
CREATE DOMAIN iccid_type as varchar(22);
drop domain if exists imsi_type cascade;
CREATE DOMAIN imsi_type as varchar(15);
CREATE table if not exists sim_cards (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
imei imei_type,
iccid iccid_type,
imsi imsi_type,
user_id BIGINT,
subscription_id BIGINT,
created_at TIMESTAMP,
last_update TIMESTAMP,
deleted_at TIMESTAMP
);
CREATE TABLE if not exists sim_envio (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
codigo_origen TEXT,
codigo_distrito TEXT,
pedido_id BIGINT,
sim_id BIGINT,
fecha_envio TIMESTAMP,
fecha_email TIMESTAMP,
is_preactivado BOOLEAN,
fecha_devolucion TIMESTAMP,
created_at TIMESTAMP,
CONSTRAINT fk_sim_id
FOREIGN KEY(sim_id) REFERENCES sim_cards(id)
);
-- Mock, No es parte de SIMs
CREATE TABLE if not exists sf_subscription (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY
);
-- No habria que meterle las propiedades del tipo de subscripcion
CREATE TABLE if not exists sim_subscription_types (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
subscription TEXT NOT NULL,
created_at TIMESTAMP,
updated_at TIMESTAMP,
deleted_at TIMESTAMP
);
CREATE TABLE if not exists sim_company (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
name TEXT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
deleted_at TIMESTAMP
);
CREATE TABLE sim_subscription (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
company_id INT,
subscription_type_id INT,
sim_id BIGINT,
order_id BIGINT,
created_at TIMESTAMP,
updated_at TIMESTAMP,
deleted_at TIMESTAMP,
CONSTRAINT fk_sim_id
FOREIGN KEY(sim_id) REFERENCES sim_cards(id),
CONSTRAINT fk_company_id
FOREIGN KEY(company_id) REFERENCES sim_company(id),
CONSTRAINT fk_subscription_type_id
FOREIGN KEY(subscription_type_id) REFERENCES sim_subscription_types(id)
);
CREATE TABLE if not exists sim_subscription_operations (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
sim_id BIGINT,
operation_type TEXT NOT NULL,
happened_at TIMESTAMP,
CONSTRAINT valid_operations CHECK (
operation_type in ('free','preactivate','activate','pause','cancel')
),
CONSTRAINT fk_subscription_id
FOREIGN KEY(sim_id)
REFERENCES sim_subscription(id)
);
-- Se supone que indica un cambio
CREATE TABLE sim_subscription_historic (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
subscription_id BIGINT,
iccid iccid_type,
company_id INT
);
CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','error','other');
-- Tabla para gestionar las peticiones de cambio de objenious.
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
-- con certeza cuando van a terminar.
CREATE TABLE if not exists objenious_operation (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
retry_count INT DEFAULT 0,
max_retry INT DEFAULT 5,
max_date_retry TIMESTAMP DEFAULT NULL,
iccids TEXT,
request_id TEXT,
mass_action_id TEXT,
operation TEXT NOT NULL,
start_date TIMESTAMP NOT NULL DEFAULT now(),
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
end_date TIMESTAMP,
error TEXT,
status status_enum,
objenious_status TEXT
);
-- operaciones pendientes para revisar
CREATE INDEX IF NOT EXISTS pending_operations
ON objenious_operation(start_date)
WHERE end_date IS NULL;
CREATE TABLE if not exists objenious_operation_change (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
operation_id BIGINT,
creation_date TIMESTAMP NOT NULL DEFAULT now(),
error TEXT,
new_status status_enum,
previous_status status_enum,
new_objenious_status TEXT,
previous_objenious_status TEXT,
new_request_id TEXT,
new_mass_action_id TEXT,
CONSTRAINT fk_operation_id
FOREIGN KEY(operation_id) REFERENCES objenious_operation(id)
);
CREATE INDEX operation_change
ON objenious_operation_change(operation_id);

View File

@@ -0,0 +1,48 @@
CREATE TYPE status_enum AS ENUM ('noRequestID','noMassID','running','finished','error','other');
-- Tabla para gestionar las peticiones de cambio de objenious.
-- Para una o mas lineas se pueden lanzar operacione que no sabemos
-- con certeza cuando van a terminar.
-- Estas tablas está fuertemente ligadas al sistema que usa la plataforma
-- de objenioius y no debe unsarse para otra compañia.
CREATE TABLE if not exists objenious_operation (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
retry_count INT DEFAULT 0, -- No implementado en codigo
max_retry INT DEFAULT 5, -- No implementado en codigo
max_date_retry TIMESTAMP DEFAULT NULL, -- No implementado en codigo
iccids TEXT,
request_id TEXT,
mass_action_id TEXT,
operation TEXT NOT NULL,
start_date TIMESTAMP NOT NULL DEFAULT now(),
last_change_date TIMESTAMP NOT NULL DEFAULT now(),
end_date TIMESTAMP,
error TEXT,
status status_enum,
objenious_status TEXT
);
-- operaciones pendientes para revisar
CREATE INDEX IF NOT EXISTS pending_operations
ON objenious_operation(start_date)
WHERE end_date IS NULL;
CREATE TABLE if not exists objenious_operation_change (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
operation_id BIGINT,
creation_date TIMESTAMP NOT NULL DEFAULT now(),
error TEXT,
new_status status_enum,
previous_status status_enum,
new_objenious_status TEXT,
previous_objenious_status TEXT,
new_request_id TEXT,
new_mass_action_id TEXT,
CONSTRAINT fk_operation_id
FOREIGN KEY(operation_id) REFERENCES objenious_operation(id)
);
CREATE INDEX operation_change
ON objenious_operation_change(operation_id);

View File

@@ -0,0 +1,67 @@
-- Tablas para el seguimiento de las operaciones de SIM sin importar
-- la cmpañia.
DO $$ BEGIN
CREATE TYPE order_types AS ENUM ('activate','preactivate','cancel','pause','reactivate','unknown');
CREATE TYPE order_status AS ENUM (
'pending', -- Mensaje creado/enviado a RabbitMQ
'running', -- Consumidor ha cogido el mensaje (opcional)
'finished', -- Procesado correctamente
'failed', -- Falló, pero podría reintentarse (Pasar a delay?)
'dlx' -- Falló definitivamente y está en Dead Letter Exchange
);
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
CREATE TABLE IF NOT EXISTS order_tracking (
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
correlation_id VARCHAR(255) NOT NULL, -- ID compartido con RabbitMQ (message_id)
exchange VARCHAR(100), -- Exchange al que se envia (de momento solo hay 1 principal sin contar delay y dlx)
routing_key VARCHAR(100), -- Routing key del mensaje
order_type order_types NOT NULL DEFAULT 'unknown',
payload JSONB, -- Duda si es optimo guardar la copia, es útil en caso de fallo
-- Campos de reintentos?
status order_status NOT NULL DEFAULT 'pending',
retry_count INT DEFAULT 0,
error_message TEXT, -- Razón del fallo
error_stacktrace TEXT,
start_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
update_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
finish_date TIMESTAMP
);
-- Busqueda según id de rabbit
CREATE INDEX IF NOT EXISTS idx_order_correlation
ON order_tracking(correlation_id);
-- Ordenenes que todavia no han finalizado
CREATE INDEX IF NOT EXISTS pending_orders
ON order_tracking(start_date)
WHERE order_tracking.finish_date IS NULL;
CREATE TABLE IF NOT EXISTS order_history(
id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
order_id BIGINT NOT NULL,
previous_status order_status NOT NULL, -- Siempre hay un estado anterior, para casos excepcioneale "unknown"
new_status order_status NOT NULL,
change_reason TEXT,
change_date TIMESTAMP NOT NULL DEFAULT (now() at time zone 'utc'),
CONSTRAINT fk_order_id
FOREIGN KEY(order_id)
REFERENCES order_tracking(id)
ON DELETE CASCADE
);
-- fk de order
CREATE INDEX IF NOT EXISTS idx_order_id
ON order_history(order_id);
-- busquedas por fecha
CREATE INDEX IF NOT EXISTS idx_order_change_date
ON order_history(change_date);

View File

@@ -0,0 +1,12 @@
/*
* Fechas modificadas para que todas sean en base a 'UTC'
* */
ALTER TABLE objenious_operation
ALTER COLUMN start_date SET DEFAULT (now() at time zone 'utc'),
ALTER COLUMN last_change_date SET DEFAULT (now() at time zone 'utc');
ALTER TABLE objenious_operation_change
ALTER COLUMN creation_date SET DEFAULT (now() at time zone 'utc');

View File

@@ -0,0 +1,30 @@
/*
* Fechas modificadas para que se puedan hacer query en base a la zona horaria objetivo
* SELECT col_date at time zone 'cet' -- devuleve la fecha en esa zona
* SELECT col_date -- devuleve la fecha en UTC con el offset de la zona horaria
*
* */
ALTER TABLE objenious_operation
ALTER COLUMN start_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN start_date SET DEFAULT now(),
ALTER COLUMN last_change_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN last_change_date SET DEFAULT now(),
ALTER COLUMN end_date SET DATA TYPE TIMESTAMP WITH TIME ZONE;
ALTER TABLE objenious_operation_change
ALTER COLUMN creation_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN creation_date SET DEFAULT now();
ALTER TABLE order_tracking
ALTER COLUMN start_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN start_date SET DEFAULT now(),
ALTER COLUMN update_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN update_date SET DEFAULT now(),
ALTER COLUMN finish_date SET DATA TYPE TIMESTAMP WITH TIME ZONE;
ALTER TABLE order_history
ALTER COLUMN change_date SET DATA TYPE TIMESTAMP WITH TIME ZONE,
ALTER COLUMN change_date SET DEFAULT now();

View File

@@ -0,0 +1,10 @@
/**
* A que endpoint actualizar el estado de los order, si se especificase.
* Se asume que siempre se usa POST.
* Se separa host de enpoint para dejar host como default el origen de la
* peticion anterior y poder hacer filtrados
*/
ALTER TABLE order_tracking
ADD COLUMN webhook_host TEXT,
ADD COLUMN webhook_endpoint TEXT;

View File

@@ -0,0 +1,7 @@
/**
* En la tabla de orders de objenious no hay forma de saber a a que mensaje está Solicitando
* cada operación.
*/
ALTER TABLE objenious_operation
ADD COLUMN correlation_id TEXT;

View File

@@ -1,5 +1,6 @@
# --- Release image ---
FROM node:22-alpine AS release
RUN apk --no-cache add git
WORKDIR /home/node/app
RUN corepack enable
@@ -19,4 +20,5 @@ COPY ./entrypoint.sh ./
RUN chmod +x entrypoint.sh
EXPOSE ${PORT:-3000}
ENTRYPOINT ["./entrypoint.sh"]

View File

@@ -0,0 +1,27 @@
# Stage base para coordinar las fases de build y ejecucion
FROM node:22-alpine AS base
# Hace falta para la herramienta de migraciones, cuando se publique se
# sustituira por el paquete de npm
RUN apk --no-cache add git
WORKDIR /usr/local/app
RUN corepack enable && \
corepack prepare yarn@4.12.0 --activate
COPY ./package.json ./yarn.lock ./
COPY ./packages ./packages
# copia el codigo en general
COPY tsconfig*.json ./
COPY .env* ./
COPY ./.yarnrc.yml ./
COPY ./deployment/local/docker/start.sh ./
# Copiar el archivo de migrations? porque ahora no creo que se esté lanzando nada
COPY ./deployment/database/migrations ./deployment/database/migrations
RUN yarn install && \
yarn cache clean && \
yarn build && \
chmod +x start.sh
EXPOSE ${PORT}
ENTRYPOINT [ "./start.sh" ]

View File

@@ -24,14 +24,15 @@ services:
RABBITMQ_DEFAULT_PASS: ${RABBITMQ_PASSWORD}
volumes:
- ./rabbitmq_plugins/enabled_plugins:/etc/rabbitmq/enabled_plugins:ro
- ./deployment/rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
- ./deployment/rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
- ./deployment/local/rabbit/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro
- ./deployment/local/rabbit/definitions.json:/etc/rabbitmq/definitions.json:ro
sim-gateway:
container_name: sim-gateway
sf-sims-api:
container_name: sf-sims-api
image: sf-sims-api
build:
context: ./
dockerfile: deployment/Dockerfile.dev
dockerfile: deployment/local/docker/Dockerfile.dev
args:
PORT: "${PORT:-3000}"
develop:
@@ -46,16 +47,29 @@ services:
env_file:
- .env
restart: unless-stopped
healthcheck:
test:
[
"CMD-SHELL",
'node -e "fetch(''http://localhost:'' + (process.env.PORT || 3000) + ''/health'').then(r => { if (!r.ok) process.exit(1) }).catch(() => process.exit(1))"',
]
interval: 10s
timeout: 5s
retries: 5
start_period: 15s
depends_on:
rabbitmq-sim-broker:
condition: service_healthy
postgresql-sim:
condition: service_healthy
postgresql-sim:
container_name: postgresql-sim
image: postgres:16.1
env_file:
- .env
ports:
- "5432:${DEV_POSTGRES_PORT}"
- "${POSTGRES_PORT}:${POSTGRES_PORT}"
volumes:
- ./sql-data/:/var/lib/postgres/data
- ./deployment/database/init.sql:/docker-entrypoint-initdb.d/init.sql

View File

@@ -1,9 +1,10 @@
#!/bin/bash
cd /mnt/docker-storage/containers/savefamily/sf-shopify-orders
# cd /mnt/docker-storage/containers/savefamily/sf-shopify-orders
cd /mnt/docker-storage/containers/savefamily/sf-sims-api
docker stop sf-shopify-orders-api || true
docker rm sf-shopify-orders-api || true
docker rmi sf-shopify-orders-api || true
docker stop sf-sims-api || true
docker rm sf-sims-api || true
docker rmi sf-sims-api || true
docker compose -f docker-compose.yaml up --build -d

View File

@@ -0,0 +1,3 @@
#!/bin/sh
echo "Lanzando migraciones e iniciando servidor"
yarn migrate && yarn start

View File

@@ -1,6 +1,3 @@
default_user = guest
default_pass = guest
listeners.tcp.default = 5672
management.tcp.port = 15672

View File

@@ -11,7 +11,7 @@ post {
}
body:form-urlencoded {
iccid: 8933201125065160406
iccid: 8933201125065160331
offer: SAVEFAMILY1
}

View File

@@ -0,0 +1,16 @@
meta {
name: Activation Email Health
type: http
seq: 8
}
post {
url: https://sf-sim-activation.savefamily.net/health
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,42 @@
meta {
name: Activation Email
type: http
seq: 6
}
post {
url: https://sf-sim-activation.savefamily.net/send-activation-mail
body: json
auth: inherit
}
headers {
x-apikey-sim-activation: 9e48c4ac-1ab0-4397-b3f3-6c239200dfe6
}
body:json {
{
"id": "11",
"retry_count": 0,
"max_retry": null,
"max_date_retry": null,
"iccids": [
"8933201125068886080"
],
"request_id": "14362",
"mass_action_id": "5208468",
"operation": "activate",
"start_date": "2026-02-13T11:08:42.499Z",
"last_change_date": "2026-02-16T09:24:36.073Z",
"end_date": "2026-02-16T09:24:36.073Z",
"error": null,
"status": "finished",
"objenious_status": "Terminé",
"msisdn": "33764399870"
}
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -11,7 +11,7 @@ post {
}
body:form-urlencoded {
iccid: 8933201124059176320
iccid: 8933201125068886692
}
settings {

View File

@@ -0,0 +1,16 @@
meta {
name: Get pending orders
type: http
seq: 11
}
get {
url: {{baseurl}}/orders/pending
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,16 @@
meta {
name: Order by id
type: http
seq: 9
}
get {
url: {{baseurl}}/orders/
body: none
auth: inherit
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,20 @@
meta {
name: Orders by message_id
type: http
seq: 12
}
get {
url: {{baseurl}}/orders/message_id/019c93d3-014a-711d-b958-03dd629be78d
body: none
auth: inherit
}
params:query {
~message_id: 019c93d3-014a-711d-b958-03dd629be78d
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,21 @@
meta {
name: Test Order
type: http
seq: 9
}
post {
url: {{baseurl}}/sim/test
body: formUrlEncoded
auth: inherit
}
body:form-urlencoded {
iccid: 8933201125065160999
offer: SAVEFAMILY1
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -0,0 +1,38 @@
meta {
name: Alarmas disponibles
type: http
seq: 20
}
get {
url: https://api-getway.objenious.com/ws/alarms
body: formUrlEncoded
auth: bearer
}
auth:bearer {
token: {{ws-access-token-partenaire}}
}
body:json {
{
"identifier": {
"identifiers": ["8933201124059175967"],
"identifierType": "ICCID"
}
}
}
body:form-urlencoded {
~identifier.identifierType: "ICCID"
~identifier.identifiers: ["8933201124059175967"]
}
vars:pre-request {
~id: 5187320
}
settings {
encodeUrl: true
timeout: 0
}

View File

@@ -37,7 +37,7 @@ body:form-urlencoded {
}
vars:pre-request {
params.id: 14111
params.id: 14557
}
settings {

View File

@@ -1,5 +1,6 @@
{
"name": "sim-eventos",
"version": "1.0.0",
"packageManager": "yarn@4.12.0",
"workspaces": [
"packages/*"
@@ -14,7 +15,8 @@
"lint": "eslint .",
"lint:fix": "eslint --fix .",
"format": "prettier --write .",
"format:check": "prettier --check ."
"format:check": "prettier --check .",
"migrate": "yarn db-migrate -e .env -m deployment/database/migrations -t 99.0.0"
},
"dependencies": {
"@tsconfig/node22": "^22.0.5",
@@ -22,10 +24,12 @@
"amqplib": "^0.10.9",
"axios": "^1.13.3",
"cors": "^2.8.5",
"db-migrate": "https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git",
"dotenv": "^17.2.3",
"express": "^5.2.1",
"pg": "^8.18.0",
"typescript": "^5.9.3",
"uuidv7": "^1.1.0",
"vite": "^7.3.1",
"vite-tsconfig-paths": "^6.0.5"
},

View File

@@ -1,3 +1,3 @@
console.log("Template")
console.log(new Date().toISOString())
export default {}

View File

@@ -5,4 +5,6 @@ OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSI
OBJ_CLIENT_ID=savefamily_rest_ws
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
OBJ_BASE_URL=https://api-getway.objenious.com/ws
OBJ_CUSTOMER_CODE=9.49411.10
//OBJ_BASE_URL=https://api-getway.objenious.com/ws/test

View File

@@ -3,6 +3,7 @@ import { ConsumeMessage } from "amqplib";
import { SimUseCases } from "./Sim.usecases.js";
import { SimEvents } from "sim-shared/domain/SimEvents.js";
import { Result } from "sim-shared/domain/Result.js";
import { env } from "#config/env/index.js";
/**
* La clase usa generadores de funciones para mantener el contexto
@@ -64,6 +65,8 @@ export class SimController {
}
public activate() {
const DUE_DATE_SECONDS = 2 * 60
return async (msg: ConsumeMessage) => {
let msgData;
try {
@@ -80,9 +83,10 @@ export class SimController {
throw new Error("Error activando la sim, no se ha especificado la oferta")
}
this.tryUseCase(msg, this.useCases.activate({
dueDate: this.genDueDate(2 * 60).toISOString(),
customerAccountCode: "9.49411.10", // TODO: Al .env
const resp = await this.tryUseCase(msg, this.useCases.activate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(DUE_DATE_SECONDS).toISOString(),
customerAccountCode: env.OBJ_CUSTOMER_CODE,
identifier: {
identifierType: "ICCID",
identifiers: [iccid]
@@ -92,6 +96,11 @@ export class SimController {
services: []
}
}))
// TODO:
// - Crear un registro de operación
// - Si ha salido bien id de operación -> webhook?
// - Si ha salido mal notificar solo cuando se manda a dlx ??
}
}
@@ -109,7 +118,8 @@ export class SimController {
}
const iccid = msgData.payload.iccid
this.tryUseCase(msg, this.useCases.preActivate({
const res = await this.tryUseCase(msg, this.useCases.preActivate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
@@ -135,7 +145,8 @@ export class SimController {
}
const iccid = msgData.payload.iccid
this.tryUseCase(msg, this.useCases.suspend({
const res = await this.tryUseCase(msg, this.useCases.reActivate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
@@ -160,7 +171,8 @@ export class SimController {
}
const iccid = msgData.payload.iccid
this.tryUseCase(msg, this.useCases.suspend({
const res = await this.tryUseCase(msg, this.useCases.suspend({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
@@ -184,8 +196,9 @@ export class SimController {
return Promise.reject("Mensaje invalido")
}
const iccid = msgData.payload.iccid
console.log("Mensaje procesado", String(msgData))
this.tryUseCase(msg, this.useCases.terminate({
console.log("Mensaje procesado", msgData)
const res = await this.tryUseCase(msg, this.useCases.terminate({
correlation_id: msgData.headers?.message_id,
dueDate: this.genDueDate(2 * 60).toISOString(),
identifier: {
identifierType: "ICCID",
@@ -213,3 +226,4 @@ export class SimController {
return dueDate
}
}

View File

@@ -18,7 +18,7 @@ export class SimRouter {
this.routes = new Map([
["activate", this.simController.activate()],
["pause", this.simController.suspend()],
["cancel", this.simController.terminate()], // terminate
["cancel", this.simController.terminate()],
["reActivate", this.simController.reActivate()],
["preActivate", this.simController.preActivate()]
]);
@@ -27,6 +27,8 @@ export class SimRouter {
/**
* Enruta el mensaje a la acción correspondiente basándose en la routing key
* TODO: No estoy seguro que deba meter el nack aqui
* - De moemento el ack-nack se gestiona en los controller, por si acaso hay casos
* limite en
*/
public route = async (msg: ConsumeMessage | null): Promise<void> => {
if (!msg) {

View File

@@ -3,6 +3,8 @@ import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js"
import { AxiosError } from "axios"
import { Result } from "sim-shared/domain/Result.js"
import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort } from "sim-shared/domain/operationsRepository.port.js"
import assert from "node:assert"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
// TODO:
// - Pasar a un archivo de DTOs
@@ -11,12 +13,16 @@ import { ObjeniousOperation, IOperationsRepository as OperationsRepositoryPort }
export class SimUseCases {
private readonly httpClient: HttpClient
private readonly operationRepository: OperationsRepositoryPort
private readonly orderRepository: OrderRepository
constructor(args: {
httpClient: HttpClient,
operationRepository: OperationsRepositoryPort
operationRepository: OperationsRepositoryPort,
orderRepository: OrderRepository
}) {
this.httpClient = args.httpClient
this.operationRepository = args.operationRepository
this.orderRepository = args.orderRepository
}
private async logOperation(data: ObjeniousOperation) {
@@ -25,6 +31,81 @@ export class SimUseCases {
})
}
/**
* Garantiza el flujo de todos los casos de uso de:
* - Petición según la acción
* - Control de errores
* - Siempre devuelve un Result
* - Almacena la operacion en la base de datos
* - Actualiza el estado del order
*
* Necesita:
* - Mas control según el codigo de error
*/
private generateUseCase<
PAYLOAD,
RESPONSETYPE extends { requestId: string }
>(args: {
correlation_id?: string,
url: string,
operation: string,
operationPayload: PAYLOAD,
iccid: string
onError?: (_: any) => void
// on code response??
}): () => Promise<Result<string, boolean>> {
return async () => {
const req = this.httpClient.client.post<RESPONSETYPE>(args.url, {
...args.operationPayload
})
try {
const response = await req;
if (response.status == 200) {
assert(response.data.requestId != undefined)
// Creacion de la operacion inicial, antes de tener los datos
const operation: ObjeniousOperation = {
operation: args.operation,
iccids: String(args.iccid),
status: "noMassID",
request_id: response.data.requestId
}
this.logOperation(operation)
.then().catch(e => console.error(e))
if (args.correlation_id != undefined) {
this.orderRepository.updateOrder({
correlation_id: args.correlation_id!,
new_status: "running", // Siempre es runing la primera vez que se consume
})
.then(e => console.log("Order actualizado: ", e))
.catch(e => console.error("Error actualizando order", args.correlation_id))
}
return <Result<string, boolean>>{
error: undefined,
data: true
}
} else {
return {
error: String(response.status),
data: undefined
}
}
} catch (error) {
console.error(`[Sim.usecase] Error ${args.operation}`, (error as AxiosError).response?.status)
return {
error: "Error general de la peticion",
data: undefined
}
}
}
}
public activate(activationData: ActivationData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/activateLine"
return async () => {
@@ -51,8 +132,6 @@ export class SimUseCases {
error: undefined,
data: true
}
} else {
// muy mejorable el control de errores
return {
@@ -82,6 +161,7 @@ export class SimUseCases {
if (resp.status == 200) {
console.log("Sim preactivada con exito", resp.data)
const operation: ObjeniousOperation = {
correlation_id: preActivateData.correlation_id,
operation: "preActivate",
iccids: String(preActivateData.identifier.identifiers),
status: "noMassID",
@@ -96,14 +176,14 @@ export class SimUseCases {
} else {
return <Result<string, boolean>>{
error: String(resp.status),
data: true
data: undefined
}
}
} catch (error) {
console.error("Error preactivacion", preActivateData)
return <Result<string, boolean>>{
error: "Error preactivando la sim" + preActivateData.identifier,
data: true
data: undefined
}
}
}
@@ -117,17 +197,25 @@ export class SimUseCases {
})
try {
const e = await req
console.log("Sim reactivada con exito", e.data)
const response = await req
if (response.status == 200) {
console.log("[o] Sim solicitud de reactivacion ", response.data)
return <Result<string, boolean>>{
error: undefined,
data: true
}
} else {
return {
error: String(response.status),
data: undefined
}
}
} catch (error) {
console.error("Error reactivacion", error)
console.error("[x] Error reactivacion", (error as AxiosError).response?.status)
return <Result<string, boolean>>{
error: "Error reactivando la sim" + pauseData.identifier,
data: true
data: undefined
}
}
}
@@ -135,53 +223,24 @@ export class SimUseCases {
public suspend(suspendData: ActionData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/suspendLine"
return async () => {
const req = this.httpClient.client.post(OPERATION_URL, {
...suspendData
return this.generateUseCase({
correlation_id: suspendData.correlation_id,
operationPayload: suspendData,
url: OPERATION_URL,
iccid: suspendData.identifier.identifiers[0], //
operation: "suspend"
})
try {
const e = await req
console.log("Sim pausada/suspendida con exito", e.data)
return <Result<string, boolean>>{
error: undefined,
data: true
}
} catch (error) {
console.error("[Pausa Use case] Error pausa")
return {
error: "Error general pausando/suspendiendo la sim" + suspendData.identifier,
data: undefined
}
}
}
}
public terminate(terminationData: ActionData): () => Promise<Result<string, boolean>> {
const OPERATION_URL = "/actions/terminateLine"
return async () => {
const req = this.httpClient.client.post(OPERATION_URL, {
...terminationData
return this.generateUseCase({
correlation_id: terminationData.correlation_id,
operationPayload: terminationData,
url: OPERATION_URL,
iccid: terminationData.identifier.identifiers[0], //
operation: "terminate"
})
// TODO: para cuando estemos listos.
throw new Error("Peticion no reversible desactivada de momento")
try {
const e = await req
console.log("Sim cancelada con exito", e.data)
return <Result<string, boolean>>{
error: undefined,
data: true
}
} catch (error) {
console.error("Error pausa", error)
return <Result<string, boolean>>{
error: "Error cancelando/terminate la sim" + terminationData.identifier,
data: undefined
}
}
}
}

View File

@@ -28,7 +28,7 @@ export const env = {
OBJ_CLI_ASSERTION: String(process.env.OBJ_CLI_ASSERTION),
OBJ_CLIENT_ID: String(process.env.OBJ_CLIENT_ID),
OBJ_KID: String(process.env.OBJ_KID),
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL)
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
OBJ_CUSTOMER_CODE: String(process.env.OBJ_CUSTOMER_CODE)
};

View File

@@ -1,5 +1,6 @@
export type ActionData = {
correlation_id?: string;
dueDate: string, // isodate
filter?: {} // no se si hace falta
identifier: {

View File

@@ -1,5 +1,5 @@
import { OperationsRepository } from "sim-shared/infrastructure/OperationRepository.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
import { startRMQClient } from "#config/eventBus.config.js"
import { httpInstance } from "#config/httpClient.config.js"
import { pgPool } from "#config/postgreConfig.js"
@@ -7,6 +7,7 @@ import { PgClient } from "sim-shared/infrastructure/PgClient.js"
import { SimUseCases } from "./aplication/Sim.usecases.js"
import { SimController } from "./aplication/Sim.controller.js"
import { SimRouter } from "./aplication/Sim.router.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
async function startWorker() {
const rmqClient = await startRMQClient()
@@ -17,13 +18,15 @@ async function startWorker() {
await pgClient.checkDatabaseConnection()
const operationRepository = new OperationsRepository(pgClient)
const operationRepository = new ObjeniousOperationsRepository(pgClient)
const orderRepository = new OrderRepository(pgClient)
const simActivationController = new SimController(
rmqClient,
new SimUseCases({
httpClient: httpClient,
operationRepository: operationRepository
operationRepository: operationRepository,
orderRepository: orderRepository
})
)
const simRouter = new SimRouter(simActivationController, rmqClient)

View File

@@ -68,7 +68,6 @@
"cors": "*",
"dotenv": "*",
"express": "*",
"sim-consumidor-objenious": "sim-consumidor-objenious:*",
"sim-shared": "sim-shared:*",
"typescript": "*"
},

View File

@@ -0,0 +1,127 @@
import { BodyValidator } from "sim-shared/aplication/BodyValidator.js"
import { OrderUsecases } from "./Order.usecases.js"
import { Request, Response } from "express"
import { PaginationArgs } from "#domain/common.js"
import { idValidator, uuidValidator } from "./httpValidators.js"
export class OrderController {
private orderUseCases: OrderUsecases
constructor(args: {
orderUseCases: OrderUsecases
}) {
this.orderUseCases = args.orderUseCases
}
public getById() {
return this.controllerGenerator<{ id: number }, { id: number }>({
validator: idValidator,
useCase: this.orderUseCases.getById(),
onError: (data, error) => { console.error(error) },
onSuccess: (data) => console.log(data)
})
}
public getPending() {
return this.controllerGenerator<PaginationArgs, PaginationArgs>({
validator: undefined,
useCase: this.orderUseCases.getPending(),
onError: (data, error) => { console.error(error) },
onSuccess: (data) => console.log(data)
})
}
public getByQueueId() {
return this.controllerGenerator<{ correlation_id: string }, { correlation_id: string }>({
validator: uuidValidator,
useCase: this.orderUseCases.getByQueueId(),
onError: (data, error) => { console.error(error) },
onSuccess: (data) => console.log(data)
})
}
/**
* TODO:
* - En proceso de validacion, tiene varios problemas
* - Está copiado, planteado inyectarlo
* - Map para la respuesta?
*
* Abstrae el proceso de
* Peticion -> validacion del body -> map del body -> useCase -> OK/ERR
*
* <O> Representa el dato original
* <P> Representa el dato después del mapeo
*/
public controllerGenerator<O extends object, P extends object>(args: {
validator?: BodyValidator<O>,
mapBody?: (body: O) => P,
useCase: (args: P) => Promise<any>,
onError: (args: O | P, error: string) => void,
onSuccess: (args: P) => void,
}) {
return async (req: Request, res: Response) => {
//scketchy
const body = { ...req.body, ...req.params }
// 1. Validacion del body
try {
if (args.validator != undefined)
args.validator.validate(body)
} catch (e) {
if (args.onError != undefined) args.onError(body, e as string)
res.status(422).json({
errors: {
msg: e
}
})
}
// 2. Transformacion del body
let data: P = body;
try {
if (args.mapBody != undefined)
data = args.mapBody(body)
} catch (e) {
res.status(422).json({
errors: {
msg: "Error parseando el body: " + e
}
})
}
// 3. Aplicacion del UseCase
try {
const usecaseResult = await args.useCase(data)
// 4.1 Se devuelve el caso de exito pero no encontrado
if (usecaseResult.data == undefined && usecaseResult.error == undefined) {
res.status(404).json(usecaseResult).send()
args.onSuccess(data)
return;
}
// 4.2 Caso de error controlado desde el caso de uso
if (usecaseResult.error != undefined) {
res.status(500).json(usecaseResult).send()
return;
}
// 4.2 Se devuelve al usuario el caso de exito de encontrado
res.status(200).json(
usecaseResult
).send()
args.onSuccess(data)
} catch (err) {
// 4.3 Error del caso de uso
res.status(500).json({
errors: {
msg: "Error general:" + err
}
}).send()
return;
}
}
}
}

View File

@@ -0,0 +1,39 @@
import { PaginationArgs } from "#domain/common.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
export class OrderUsecases {
private orderRepository: OrderRepository;
constructor(args: {
orderRepository: OrderRepository
}
) {
this.orderRepository = args.orderRepository
}
public getById() {
return async (args: {
id: number
}) => {
const order = await this.orderRepository.getOrderById(args)
return order
}
}
public getByQueueId() {
return async (args: {
correlation_id: string
}) => {
const order = await this.orderRepository.getOrderByQueueId(args)
return order
}
}
public getPending() {
return async (args: PaginationArgs & {
}) => {
return await this.orderRepository.getPendingOrders(args)
}
}
}

View File

@@ -1,45 +1,115 @@
import { Request, Response } from "express"
import { SimUsecases } from "./Sim.usecases.js"
import { activationValidator, iccidValidator } from "./httpValidators.js"
import { companyFromIccid } from "#domain/companies.js"
import { BodyValidator } from "sim-shared/aplication/BodyValidator.js"
// Partiendo del caracter 3 2 de pais + 2 de compañia
// Metiendolo a la BDD podria ser mas dinamico pero perderia
// tiempo de query
// Puede que esté bien crear un endpoint para administrarlo
const COMPAÑIASICCID = new Map<string, string>(
[
["3490", "alai"],
["3510", "nos"],
["3320", "objenious"]
])
export class SimController {
private simUseCases: SimUsecases
constructor(args: {
simUseCases: SimUsecases
simUseCases: SimUsecases,
}) {
this.simUseCases = args.simUseCases
this.activation = this.activation.bind(this)
}
public preactivation() {
/**
* TODO:
* En proceso, tiene varios problemas
*
* Abstrae el proceso de
* Peticion -> validacion del body -> map del body -> useCase -> OK/ERR
*
* <O> Representa el dato original
* <P> Representa el dato después del mapeo
*/
public controllerGenerator<O extends Object, P extends Object>(args: {
validator?: BodyValidator<O>,
mapBody?: (body: O) => P,
useCase: (args: P) => Promise<any>,
onError: (args: O | P, error: string) => void,
onSuccess: (args: P) => void,
}) {
return async (req: Request, res: Response) => {
const valido = this.validateBody(req.body, res)
if (valido == false) return;
const body = req.body
const { iccid } = req.body
const compañia = this.compañiaFromIccid(iccid)
if (compañia == undefined) {
res.status(500).json({
// 1. Validacion del body
try {
if (args.validator != undefined)
args.validator.validate(body)
} catch (e) {
if (args.onError != undefined) args.onError(body, e as string)
res.status(422).json({
errors: {
msg: "El iccid no pertenece a una compañia conocida"
msg: e
}
})
}
// 2. Transformacion del body
let data: P = body;
try {
if (args.mapBody != undefined)
data = args.mapBody(body)
} catch (e) {
res.status(422).json({
errors: {
msg: "Error parseando el body: " + e
}
})
}
// 3. Aplicacion del UseCase
try {
const usecaseResult = await args.useCase(data)
// 4. Se devuelve al usuario el caso de exito
res.status(200).json(
usecaseResult
).send()
args.onSuccess(data)
} catch (err) {
// 4.1 Error del caso de uso
res.status(500).json({
errors: {
msg: "Error general:" + err
}
}).send()
return;
}
}
}
public test() {
return this.controllerGenerator<{ iccid: string, offer: string }, { iccid: string }>({
validator: iccidValidator,
useCase: (args) => this.simUseCases.test(args),
onError: (data, error) => console.error(error),
onSuccess: (data) => {
console.log("OK", data)
}
})
}
public preactivation() {
return async (req: Request, res: Response) => {
console.warn("[!] Se deberia de usar la peticion /sim/activate directamente")
try {
iccidValidator.validate(req.body)
} catch (e) {
res.status(422).json({
errors: {
msg: e
}
})
}
const { iccid } = req.body
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.preActivation({ iccid, compañia })
@@ -62,13 +132,21 @@ export class SimController {
public activation() {
return async (req: Request, res: Response) => {
const valido = this.validateBody(req.body, res)
if (valido == false) return; // Si no es valido ya se ha enviado el error
try {
activationValidator.validate(req.body)
} catch (e) {
res.status(422).json({
errors: {
msg: e
}
})
console.error("[!] Error validando mensaje")
return;
}
const { iccid, offer } = req.body
const compañia = this.compañiaFromIccid(iccid)
const compañia = companyFromIccid(iccid)
if (compañia == undefined) {
res.status(500).json({
@@ -87,6 +165,8 @@ export class SimController {
iccid: iccid,
operation: "activation"
}).send()
return;
} catch (err) {
console.error("Error activando la sim ", req.body)
res.status(500).json({
@@ -101,12 +181,18 @@ export class SimController {
public cancelation() {
return async (req: Request, res: Response) => {
const valido = this.validateBody(req.body, res)
if (valido == false) return; // Si no es valido ya se ha enviado el error
try {
iccidValidator.validate(req.body)
} catch (e) {
res.status(422).json({
errors: {
msg: e
}
})
}
const { iccid } = req.body
const compañia = this.compañiaFromIccid(iccid)
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.cancelation({ iccid, compañia })
@@ -127,12 +213,18 @@ export class SimController {
public pause() {
return async (req: Request, res: Response) => {
const valido = this.validateBody(req.body, res)
if (valido == false) return; // Si no es valido ya se ha enviado el error
try {
iccidValidator.validate(req.body)
} catch (e) {
res.status(422).json({
errors: {
msg: e
}
})
}
const { iccid } = req.body
const compañia = this.compañiaFromIccid(iccid)
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.pause({ iccid, compañia })
@@ -153,12 +245,18 @@ export class SimController {
public free() {
return async (req: Request, res: Response) => {
const valido = this.validateBody(req.body, res)
if (valido == false) return; // Si no es valido ya se ha enviado el error
try {
iccidValidator.validate(req.body)
} catch (e) {
res.status(422).json({
errors: {
msg: e
}
})
}
const { iccid } = req.body
const compañia = this.compañiaFromIccid(iccid)
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.cancelation({ iccid, compañia })
@@ -178,14 +276,19 @@ export class SimController {
}
public save() {
return async (req: Request, res: Response) => {
const valido = this.validateBody(req.body, res)
if (valido == false) return; // Si no es valido ya se ha enviado el error
try {
iccidValidator.validate(req.body)
} catch (e) {
res.status(422).json({
errors: {
msg: e
}
})
}
const { iccid } = req.body
const compañia = this.compañiaFromIccid(iccid)
const compañia = companyFromIccid(iccid)
try {
await this.simUseCases.cancelation({ iccid, compañia })
@@ -203,40 +306,4 @@ export class SimController {
}
}
}
private validateBody(body: any, res: Response) {
const { iccid } = body
let errors = {}
let valid = true
if (iccid == undefined) {
res.status(400)
errors = {
...errors,
iccid: "El iccid es undefined"
}
valid = false
}
if (valid == false) {
res.json({
errors: errors
})
}
return valid;
}
/**
* A partir del iccid completo devuelve la compañia a la que pertenece
* @throws Error si no hay una compañia definida en COMPAÑIASICCID con el codigo
*/
private compañiaFromIccid(iccid: string) {
const caracteresCommpañia = iccid.slice(2, 6)
const compañia = COMPAÑIASICCID.get(caracteresCommpañia)
if (compañia == undefined) throw new Error("El la compañia es desconocida: " + caracteresCommpañia)
return compañia
}
}

View File

@@ -1,23 +1,90 @@
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import { Result } from "sim-shared/domain/Result.js";
import assert from "node:assert";
import { EventBus } from "sim-shared/domain/EventBus.port";
import { SimEvents } from "sim-shared/domain/SimEvents";
import { uuidv7 } from "uuidv7";
import { CreateOrderDTO, OrderTracking, OrderType, OrderTypeOptions } from "sim-shared/domain/Order.js";
/**
* TODO:
* - Conexion con la BDD
* - Conexion con RabbitMQ
* - Pasar a clase cuando existan las conexiones
* Casos de uso de tarjetas sim. Garantiza que todos los metodos usan el mismo bus de mensajes
* y repositorio de registro de las ordenes.
*/
export class SimUsecases {
private eventBus: EventBus
private eventBus: EventBus;
private orderRepository: OrderRepository;
constructor(args: {
eventBus: EventBus
eventBus: EventBus,
orderRepository: OrderRepository
}
) {
this.eventBus = args.eventBus
this.orderRepository = args.orderRepository
}
/**
* Añade un id de mensaje (correlation_id en ala base de datos)
*/
private addMessage_id(event: SimEvents.general): SimEvents.general & { headers: { message_id: string } } {
const uuid = uuidv7()
return {
...event,
headers: {
...event.headers,
message_id: uuid
}
}
}
/**
* El tipo T es el tipo del payload del Order
*/
private async saveOrder<T extends any>(event: SimEvents.general): Promise<Result<string, OrderTracking<T>>> {
if (event.headers?.message_id == undefined) {
return <Result<string, any>>{
error: "El evento no tiene una cabecera message_id definido"
}
}
const orderType = (event.key.split(".")[2] as OrderType ?? "unknown")
// Estoy pensando en la posibilidad de pasarlo a unknown
if (!OrderTypeOptions.has(orderType)) {
return <Result<string, any>>{
error: `El evento no tiene un tipo valido: ${orderType} no existe como tipo valido`
}
}
const order: CreateOrderDTO = {
correlation_id: event.headers.message_id,
order_type: orderType,
routing_key: event.key,
payload: event
}
const result = await this.orderRepository.createOrder<T>(order)
return result;
}
async test(args: { iccid: string }) {
assert(args.iccid != undefined)
const event = <SimEvents.general>{
key: `sim.test.unknown`,
payload: {
iccid: args.iccid
}
}
const eventWithId = this.addMessage_id(event)
const publish = await this.eventBus.publish([eventWithId])
await this.saveOrder(eventWithId)
return eventWithId
}
/**
* WIP
* Crea una nueva sim de la que no se tenia registro anteriormente
* Si ya existia se modifican los campos pero no se hace un cambio
* de estado.
@@ -43,8 +110,11 @@ export class SimUsecases {
offer: args.offer
}
}
console.log("[d] Activation ", activationEvent)
return this.eventBus.publish([activationEvent])
const activationWithId = this.addMessage_id(activationEvent)
console.log("[d] Activation ", activationWithId)
await this.eventBus.publish([activationWithId])
await this.saveOrder(activationWithId)
}
async preActivation(args: { iccid: string, compañia: string }) {
@@ -64,14 +134,18 @@ export class SimUsecases {
*/
async cancelation(args: { iccid: string, compañia: string }) {
const activationEvent = <SimEvents.cancel>{
const cancelationEvent = <SimEvents.cancel>{
key: `sim.${args.compañia}.cancel`,
payload: {
iccid: args.iccid
}
}
console.log("[d] Cancelation ", activationEvent)
return this.eventBus.publish([activationEvent])
const cancelationWithId = this.addMessage_id(cancelationEvent)
console.log("[d] Cancelation ", cancelationWithId)
await this.eventBus.publish([cancelationWithId])
await this.saveOrder(cancelationWithId)
return cancelationWithId
}
// alias por si acaso
public terminate = this.cancelation;
@@ -80,15 +154,19 @@ export class SimUsecases {
* alias de bloquear / suspender en objenious
*/
async pause(args: { iccid: string, compañia: string }) {
const cancelationEvent = <SimEvents.pause>{
const pauseEvent = <SimEvents.pause>{
key: `sim.${args.compañia}.pause`,
payload: {
iccid: args.iccid
}
}
return this.eventBus.publish([cancelationEvent])
const pauseWithId = this.addMessage_id(pauseEvent)
console.log("[d] Cancelation ", pauseWithId)
await this.eventBus.publish([pauseWithId])
await this.saveOrder(pauseWithId)
return pauseWithId
}
async free(args: { iccid: string, compañia: string }) {
const cancelationEvent = <SimEvents.free>{
key: `sim.${args.compañia}.free`,

View File

@@ -0,0 +1,38 @@
import { describe, it } from "node:test";
import { iccidValidator } from "./httpValidators.js";
import assert from "node:assert";
describe("test validators", () => {
it("should validate 19 char iccid", () => {
const validBody = {
iccid: "8933201125068886692"
}
const res = iccidValidator.validate(validBody)
assert(res == true)
}),
it("shouldnt validate empty string iccid", () => {
const validBody = {
iccid: ""
}
assert
.throws(() => iccidValidator.validate(validBody), { message: "La longitud del iccid es incorrecta debera ser de 19 caracteres" })
}),
it("shouldnt validate >19 char iccid", () => {
const validBody = {
iccid: "893320112506888669212345"
}
assert
.throws(() => iccidValidator.validate(validBody), { message: "La longitud del iccid es incorrecta debera ser de 19 caracteres" })
}),
it("shouldnt validate <19 char iccid", () => {
const validBody = {
iccid: "8933201125"
}
assert
.throws(() => iccidValidator.validate(validBody), { message: "La longitud del iccid es incorrecta debera ser de 19 caracteres" })
})
})

View File

@@ -0,0 +1,82 @@
import { companyFromIccid } from "#domain/companies.js";
import { BodyValidator, Validator } from "sim-shared/aplication/BodyValidator.js";
const offers = new Map([
["mensual", "SAVEFAMILY1"],
["anual", "SAVEFAMILY2"]
])
const iccidLongitudValidator = <Validator<{ iccid: string }>>{
field: "iccid",
errorMsg: "La longitud del iccid es incorrecta debera ser de 19 caracteres",
validationFunc: (a: { iccid: string }) => a.iccid.length == 19,
}
const iccidRequired = <Validator<{ iccid: string }>>{
field: "iccid",
errorMsg: "El iccid debe estara definido",
validationFunc: (a: { iccid: string }) => a.iccid != undefined,
}
const iccidWithValidCompany = <Validator<{ iccid: string }>>{
field: "iccid",
errorMsg: "El iccid no corresponde a una compañia registrada",
validationFunc: (a: { iccid: string }) => companyFromIccid(a.iccid) != undefined,
}
const offerExists = <Validator<{ offer: string }>>{
field: "offer",
errorMsg: "La oferta introducida no es valida",
validationFunc: (a: { offer: string }) => offers.has(a.offer),
}
const isUuidv7 = <Validator<{ correlation_id?: string }>>{
field: "correlation_id",
errorMsg: "El uuid no es un uuidv7 valido",
validationFunc: (a) => a.correlation_id != undefined && a.correlation_id.length < 36
}
const definedId = <Validator<{ id?: number }>>{
field: "id",
errorMsg: "El id no se ha definido",
validationFunc: (e) => e.id != undefined
}
const isIntegerId = <Validator<{ id?: number }>>{
field: "id",
errorMsg: "El id no se ha definido",
validationFunc: (e) => Number.isInteger(e.id)
}
const validNumericId = <Validator<{ id?: number }>>{
field: "id",
errorMsg: "El id introducido no es un numero >= 0",
validationFunc: (e) => e.id! >= 0
}
export const activationValidator = new BodyValidator<{ iccid: string, offer: string }>(
[
iccidRequired,
iccidLongitudValidator,
iccidWithValidCompany,
offerExists,
]
)
export const iccidValidator = new BodyValidator<{ iccid: string }>(
[
iccidRequired,
iccidLongitudValidator,
iccidWithValidCompany,
]
)
export const uuidValidator = new BodyValidator<{ correlation_id?: string }>([
isUuidv7
])
export const idValidator = new BodyValidator<{ id?: number }>([
definedId,
isIntegerId,
validNumericId
])

View File

@@ -0,0 +1,18 @@
import { Pool } from 'pg';
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
import { env } from './env/index.js';
// Configuracion de la conexion a la BDD, deberia ser la
// Misma para todos los servicios pero hasta que se unifique todo
// se hace una por servicio.
export const pgPool = new Pool({
user: env.POSTGRES_USER,
host: env.POSTGRES_HOST,
database: env.POSTGRES_DATABASE,
password: env.POSTGRES_PASSWORD,
port: Number(env.POSTGRES_PORT) || 5432,
});
export const postgresClient = new PgClient({
pool: pgPool
})

View File

@@ -0,0 +1,6 @@
export type PaginationArgs = {
limit?: number,
offset?: number,
start?: number
}

View File

@@ -0,0 +1,22 @@
// Partiendo del caracter 3 2 de pais + 2 de compañia
// Metiendolo a la BDD podria ser mas dinamico pero perderia
// tiempo de query
// Puede que esté bien crear un endpoint para administrarlo
export const COMPANYICCID = new Map<string, string>(
[
["3490", "alai"],
["3510", "nos"],
["3320", "objenious"]
])
/**
* A partir del iccid completo devuelve la compañia a la que pertenece
* @throws Error si no hay una compañia definida en COMPAÑIASICCID con el codigo
*/
export function companyFromIccid(iccid: string) {
const caracteresCommpañia = iccid.slice(2, 6)
const compañia = COMPANYICCID.get(caracteresCommpañia)
if (compañia == undefined) throw new Error("El la compañia es desconocida: " + caracteresCommpañia)
return compañia
}

View File

@@ -3,6 +3,7 @@ import cors from 'cors';
import { simRoutes } from "./infrastructure/simRoutes.http.js"
import { rabbitmqEventBus } from '#config/eventBusConfig.js';
import { env } from "#config/env/index.js"
import { orderRoutes } from "#adapters/orderRoutes.http.js";
const PORT = env.API_PORT
const HOSTNAME = "0.0.0.0"
@@ -24,6 +25,7 @@ app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use("/sim", simRoutes)
app.use("/orders", orderRoutes)
app.get("/health", (req, res) => {
res.status(200).json({ status: "ok" })

View File

@@ -0,0 +1,40 @@
/**
* Rutas para consultar el estado de los order
*/
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
import { Router } from "express"
import { postgresClient } from '#config/postgreConfig.js';
import { OrderController } from "../aplication/Order.controller.js";
import { OrderUsecases } from "../aplication/Order.usecases.js";
const orderRoutes = Router()
// orderRepository no se trata como singleton
const orderRepository = new OrderRepository(postgresClient)
const orderUseCases = new OrderUsecases({
orderRepository: orderRepository
})
const orderController = new OrderController({
orderUseCases: orderUseCases
})
/**
* Todas las orders, o un resumen, admite filtros
* por:
* - status
* - fecha inicio
* - fecha fin
* - pendientes
* */
orderRoutes.get("/", (req, res) => { res.send("ok") })
orderRoutes.get("/message_id/:correlation_id", orderController.getByQueueId())
/** Operaciones pendientes */
orderRoutes.get("/pending", orderController.getPending())
/** Order por id (uuid del mensaje) */
orderRoutes.get("/:id", orderController.getById())
export { orderRoutes }

View File

@@ -2,17 +2,22 @@ import { rabbitmqEventBus } from '#config/eventBusConfig.js';
import { SimUsecases } from '../aplication/Sim.usecases.js';
import { SimController } from '../aplication/Sim.controller.js';
import { Router } from 'express';
import { OrderRepository } from 'sim-shared/infrastructure/OrderRepository.js';
import { postgresClient } from '#config/postgreConfig.js';
const simRoutes = Router()
const orderRepository = new OrderRepository(postgresClient)
const simUseCases = new SimUsecases({
eventBus: rabbitmqEventBus
eventBus: rabbitmqEventBus,
orderRepository: orderRepository
})
const simController = new SimController({
simUseCases: simUseCases
})
// TODO: status de todos los proyectos
simRoutes.get("/status", () => { })
simRoutes.post("/save", simController.save())
@@ -25,6 +30,8 @@ simRoutes.post("/pause", simController.pause())
simRoutes.post("/cancel", simController.cancelation())
simRoutes.post("/test", simController.test())
// Proceso especifico de ALAI para liberar sims canceladas
simRoutes.post("/free", simController.free())

View File

@@ -13,12 +13,6 @@
"types": "./config/*.ts",
"default": "./config/*.js"
},
"#shared/*.js": {
"default": "../sim-shared/*.js"
},
"#shared/*": {
"default": "../sim-shared/*.js"
},
"#adapters/*.js": {
"types": "./infrastructure/*.ts",
"default": "./infrastructure/*.js"
@@ -45,7 +39,7 @@
}
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"test": "node --import tsx --test ./**/*.test.ts",
"build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-entrada-eventos/",
"dev": "tsx watch index.ts",
"start": "node ../../dist/packages/sim-entrada-eventos/index.js"
@@ -59,6 +53,7 @@
"cors": "*",
"dotenv": "*",
"express": "*",
"sim-shared": "sim-shared:*",
"typescript": "*"
},
"devDependencies": {

View File

@@ -5,6 +5,8 @@ OBJ_CLI_ASSERTION=XOc7FtwXD8hUX2SFVX94XSty8wkOmChkwDNF09O_aIxPubMDdFUdCDCB4zpzSI
OBJ_CLIENT_ID=savefamily_rest_ws
OBJ_KID=xNfbMiyL1ORXGP8lElhcv8nVaG3EJKye4Lc1YoN3I1E
OBJ_BASE_URL=https://api-getway.objenious.com/ws
//OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
# OBJ_BASE_URL=https://api-getway.objenious.com/ws/test
NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
# NOTIFICATION_URL="https://sf-sim-activation.savefamilygps.net/send-activation-mail"
NOTIFICATION_URL="localhost"
SIM_ACTIVATION_API_KEY=9e48c4ac-1ab0-4397-b3f3-6c239200dfe6

View File

@@ -1,5 +1,6 @@
import { loadEnvFile } from "node:process";
import path from "node:path";
import assert from "node:assert";
loadEnvFile(path.join("../../.env")) // Global
loadEnvFile(path.join("./.env")) // base
@@ -12,9 +13,9 @@ export const env = {
POSTGRES_HOST: process.env.POSTGRES_HOST,
POSTGRES_DATABASE: process.env.POSTGRES_DATABASE,
RABBITMQ_HOST: String(process.env.RABBITMQ_HOST ?? "localhost"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER ?? "guest"),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD ?? "guest"),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE ?? "/"),
RABBITMQ_USER: String(process.env.RABBITMQ_USER),
RABBITMQ_PASSWORD: String(process.env.RABBITMQ_PASSWORD),
RABBITMQ_EXCHANGE: String(process.env.RABBITMQ_EXCHANGE),
RABBITMQ_PORT: parseInt(process.env.RABBITMQ_PORT ?? "5672"),
RABBITMQ_MODULENAME: process.env.MODULENAME,
RABBITMQ_TTL: process.env.RABBITMQ_TTL,
@@ -30,6 +31,20 @@ export const env = {
OBJ_KID: String(process.env.OBJ_KID),
OBJ_BASE_URL: String(process.env.OBJ_BASE_URL),
NOTIFICATION_URL: String(process.env.NOTIFICATION_URL)
NOTIFICATION_URL: String(process.env.NOTIFICATION_URL),
SIM_ACTIVATION_API_KEY: String(process.env.SIM_ACTIVATION_API_KEY)
};
// assert las partes criticas
assert(env.RABBITMQ_PASSWORD != undefined)
assert(env.RABBITMQ_USER != undefined)
assert(env.SIM_ACTIVATION_API_KEY != undefined)
assert(env.NOTIFICATION_URL != undefined)
if (env.ENVIRONMENT == "production") {
assert(env.RABBITMQ_PASSWORD != "guest")
assert(env.RABBITMQ_HOST != "localhost")
}
console.log("CRON: ENV", env)

View File

@@ -1,4 +1,4 @@
import { Pool, QueryResult } from 'pg';
import { Pool } from 'pg';
import { PgClient } from 'sim-shared/infrastructure/PgClient.js'
import { env } from './env/index.js';

View File

@@ -3,7 +3,8 @@ import { pgPool } from "./config/postgreConfig.js"
import { PgClient } from "sim-shared/infrastructure/PgClient.js"
import { httpInstance } from "./config/httpClient.config.js"
import { CheckObjeniousRequests } from "./tasks/check_objenious_request.js"
import { OperationsRepository } from "sim-shared/infrastructure/OperationRepository.js"
import { ObjeniousOperationsRepository } from "sim-shared/infrastructure/ObjeniousOperationRepository.js"
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js"
async function startCron() {
const commonSettings = {
@@ -15,19 +16,22 @@ async function startCron() {
const pgClient = new PgClient({ pool: pgPool })
await pgClient.checkDatabaseConnection()
await pgClient.checkDatabaseConnection()
const operationRepository = new OperationsRepository(pgClient)
const operationRepository = new ObjeniousOperationsRepository(pgClient)
const orderRepository = new OrderRepository(pgClient)
const objTask = new CheckObjeniousRequests(
operationRepository,
httpClient
orderRepository,
httpClient,
)
await objTask.getPendingOperations()
const interval = setInterval(async () => {
console.log("Updating...")
await objTask.getPendingOperations()
console.log("Update finished")
}, 60 * 1000)
}, 10 * 60 * 1000)
/*
const task = cron.createTask("* * * * *", async () => {
}

View File

@@ -1 +0,0 @@
export const task = async () => console.log("Background " + new Date().toISOString())

View File

@@ -1,4 +1,5 @@
import { env } from "#config/env/index.js";
import { OrderRepository } from "sim-shared/infrastructure/OrderRepository.js";
import axios from "axios";
import { IOperationsRepository, Objenious, ObjeniousOperation, ObjeniousOperationChange, StatusEnum } from "sim-shared/domain/operationsRepository.port.js";
import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js";
@@ -6,6 +7,7 @@ import { HttpClient } from "sim-shared/infrastructure/HTTPClient.js";
export class CheckObjeniousRequests {
constructor(
private readonly operationsRepository: IOperationsRepository,
private readonly orderRepository: OrderRepository,
private readonly httpClient: HttpClient
) {
}
@@ -14,43 +16,47 @@ export class CheckObjeniousRequests {
* TODO: meter a una funcion a parte task con los 3 pasos
*/
public async getPendingOperations() {
// 1. Se obtienen todas las operaciones pendientes de la BDD
const pendingOperations = await this.operationsRepository.getPendingOperations()
if (pendingOperations.error != undefined) {
throw new Error("Error obteniendo las tareas pendientes " + pendingOperations.error)
}
if (pendingOperations.data == undefined || pendingOperations.data.length == 0) {
//Nada pendiente
console.log("[cron] No hay operaciones pendientes de Objenious")
return;
}
// 2. Clasificación de las tareas pendientes
// Erroneas => no se les ha dado un request_id, no se pueden comprobar
const erroneas = pendingOperations.data
.filter((e) => e.request_id == undefined)
// Todas las validas
const operacionesValidas = pendingOperations.data
.filter((e) => e.request_id != undefined)
// Validas sin MassId
const solicitarMassId = operacionesValidas
.filter((e) => e.mass_action_id == undefined)
// Validas con MassId
const consultarEstado = pendingOperations.data
.filter(e => e.mass_action_id != undefined)
// TODO: Validas sin/con massID que lleven mucho tiempo sin actualizarse
console.log("[cron] Solicitando mass id para", solicitarMassId.map(e => e.id))
const newMassActions = await this.getMassIdFromRequest(solicitarMassId)
const merged = [...newMassActions || [], ...consultarEstado]
console.log("[cron] Solicitando status para", merged.map(e => e.id))
const result = await this.getMassActionsStatus(merged)
}
/**
* Para una lista de operaciones **con mass_action_id** se comprueba si han tenido alguna actualizacion
* Devuelve el numero de operaciones comprobadas.
*/
private async getMassActionsStatus(requestList: ObjeniousOperation[]) {
if (requestList.length == 0) return;
if (requestList.length == 0) return 0;
const operationsList = structuredClone(requestList)
const PATH = "/actions/massActions/"
@@ -91,8 +97,10 @@ export class CheckObjeniousRequests {
// 2. Se comprueba si ha habido un cambio de estado
const { id, status, info } = data
if (status != originalAction.objenious_status) {
const hasStatusChanged = status != originalAction.objenious_status
if (hasStatusChanged) {
console.log("[cron] Actualizando", originalAction.id, originalAction.iccids, status)
/** Status convertido al que se usa en la aplicacion */
const uorStatus = this.mapStatus(status)
const updateData: ObjeniousOperationChange = {
operation_id: originalAction.id!,
@@ -102,42 +110,59 @@ export class CheckObjeniousRequests {
previous_status: originalAction.status
}
originalAction.status = uorStatus;
originalAction.objenious_status = status;
originalAction.last_change_date = new Date().toISOString()
originalAction.end_date = originalAction.last_change_date
console.log(" ----> Status", uorStatus)
if (uorStatus /*== "finished"*/) {
const updatedAction = structuredClone(originalAction)
updatedAction.status = uorStatus;
updatedAction.objenious_status = status;
updatedAction.last_change_date = new Date().toISOString()
updatedAction.end_date = originalAction.last_change_date
if (uorStatus == "finished") {
console.log(" ****> Status", uorStatus)
if (uorStatus != "finished") {
console.error("!!! Notificando estado no finished")
}
const targetIccids = originalAction.iccids
const lineData = await this.getLineData(targetIccids)
console.log("lineData", lineData.content[0])
console.log("[i] lineData", lineData.content[0])
const msisdn = lineData.content[0].identifier.msisdn
if (originalAction.correlation_id != undefined) {
this.orderRepository.finishOrder({ correlation_id: originalAction.correlation_id })
.then(e => console.log("[o] Finalizada order", e))
.catch(e => {
console.error("[x] Error finalizando la order ", e)
console.error(e)
})
}
if (originalAction.operation == "activation") {
this.notifyFinalization({
...originalAction,
msisdn
})
// TODO la accion no siempre es activacion!
.then(e => {
console.log("Notificada la activacion de ", originalAction.iccids)
console.log("[o] Notificada la activacion de ", originalAction.iccids)
})
.catch(e => {
console.error("Error enviando la activacion de ", originalAction)
console.error("[x] Error enviando la activacion de ", originalAction)
console.error(e)
})
}
}
if (info != undefined) {
updateData.info = info
}
try {
console.log("Subiendo un update")
console.log("[i] Subiendo un update")
console.log(updateData)
await this.operationsRepository.updateOperation(updateData)
updated.push(originalAction)
} catch (e) {
console.error("Error actualizando el estado de ", originalAction, e)
console.error("[x] Error actualizando el estado de ", originalAction, e)
return;
}
}
@@ -151,7 +176,8 @@ export class CheckObjeniousRequests {
// ! Importante las claves siempre en minuscula, los valores son cammelCase
const equivalentMap = new Map<string, StatusEnum>([
["en cours", "running"],
["terminé", "finished"]
["terminé", "finished"],
["annulé", "finished"]
])
const res = equivalentMap.get(sanitizedStatus)
if (res == undefined) return "running"
@@ -236,29 +262,15 @@ export class CheckObjeniousRequests {
* al servicio que manda los mails
*/
private async notifyFinalization(operation: ObjeniousOperation & { msisdn: string }) {
console.log("Notificada, ", operation)
const req = axios.post(env.NOTIFICATION_URL, {
...operation,
iccids: [operation.iccids]
}, {
headers: {
"x-apikey-sim-activation": "9e48c4ac-1ab0-4397-b3f3-6c239200dfe6"
"x-apikey-sim-activation": env.SIM_ACTIVATION_API_KEY
}
})
/*
const req = this.httpClient.client.post<any>("",
{ operation: operation },
{
headers: {
"x-apikey-sim-activation": "9e48c4ac-1ab0-4397-b3f3-6c239200dfe6"
},
baseURL: env.NOTIFICATION_URL
}
)*/
await req
}
}

View File

@@ -0,0 +1,25 @@
export type Validator<T extends Object> = {
field: keyof T,
errorMsg: string,
validationFunc: (obj: T) => boolean
}
/**
* Ejecuta una lista de validadores en orden, si alguno
* falla devuelve un Error
*/
export class BodyValidator<T extends Object> {
validatorList: Validator<T>[] = []
constructor(
validators: Validator<T>[]
) {
this.validatorList = validators
}
public validate(obj: T) {
for (const validator of this.validatorList) {
if (validator.validationFunc(obj) == false) throw new Error(validator.errorMsg)
}
return true;
}
}

View File

@@ -0,0 +1,27 @@
/**
* !Importate
* Configuración unicamente para lanzar los test, este código no debe de ejecutarse
* en produccion
*/
import { env, loadEnvFile } from "node:process";
import { Pool } from "pg";
import { PgClient } from "../infrastructure/PgClient.js";
console.warn("[i!] Se está corriendo codigo de test")
loadEnvFile("../../.env") // Global
// se hace una por servicio.
export const pgPool = new Pool({
user: env.POSTGRES_USER,
host: env.POSTGRES_HOST,
database: env.POSTGRES_DATABASE,
password: env.POSTGRES_PASSWORD,
port: Number(env.POSTGRES_PORT) || 5432,
});
export const postgresClient = new PgClient({
pool: pgPool
})
console.warn(`[T] TEST DB : ${env.POSTGRES_DATABASE}@${env.POSTGRES_HOST}`)

View File

@@ -7,9 +7,11 @@ export type DomainEventType = string
export type DomainEvent = {
key: string,
payload: Object,
options: Object,
occurredOn: Date,
payload: object,
headers?: object & {
message_id?: string
},
occurredOn?: Date,
}
export interface DomainEventSubscriber<T extends DomainEvent> {

View File

@@ -2,7 +2,7 @@ import { ConsumeMessage } from "amqplib";
import { DomainEvent, DomainEventSubscriber } from "./DomainEvent.js";
export interface EventBus {
publish(events: Array<DomainEvent>): Promise<void>;
publish(events: Array<DomainEvent>): Promise<{ success: DomainEvent[], error: DomainEvent[] }>;
// Sacado de NEKI, posiblemente no haga falta
addSubscribers(subscribers: Array<DomainEventSubscriber<DomainEvent>>): void;

View File

@@ -0,0 +1,85 @@
// Reemplaza al enum OrderStatus
export type OrderStatus =
| 'pending'
| 'running'
| 'finished'
| 'failed'
| 'dlx';
// Reemplaza al enum OrderTypes
export type OrderType =
| 'activate'
| 'preactivate'
| 'cancel'
| 'pause'
| 'reactivate'
| 'unknown';
export const OrderTypeOptions = new Set<OrderType>([
'activate',
'preactivate',
'cancel',
'pause',
'reactivate',
'unknown'
])
// Interfaz para la tabla order_tracking
export interface OrderTracking<T> {
id: number;
correlation_id: string;
exchange?: string | null;
routing_key?: string | null;
order_type: OrderType;
payload?: Record<string, T> | null; // Por no especificar el tipo del json hasta que no se cree
status: OrderStatus;
retry_count: number;
error_message?: string | null;
error_stacktrace?: string | null;
/* TODO: Importante decidir si trabajar con fecha y tener que crear los objetos o seguir como string */
start_date: string | Date;
update_date: string | Date;
finish_date?: string | Date | null;
// desde la 1.1.0
webhook_host?: string | null;
webhook_endpoint?: string | null;
}
// Interfaz para la tabla order_history
export interface OrderHistory {
id: number;
order_id: number;
previous_status: OrderStatus;
new_status: OrderStatus;
change_reason?: string | null;
change_date: Date;
}
// Tipo útil para la creación (Omitiendo campos generados por la DB)
export type CreateOrderDTO = Pick<
OrderTracking<any>, // Aqui realmente no importan los campos
'correlation_id' | 'exchange' | 'routing_key' | 'order_type' | 'payload' | 'webhook_host' | 'webhook_endpoint'
>;
export type UpdateOrderDTO =
(
{ id: number, correlation_id?: never } |
{ id?: never, correlation_id: string }
)
&
{
new_status: OrderStatus,
reason?: string
}
export type FinishOrderDTO =
(
{ id: number, correlation_id?: never } |
{ id?: never, correlation_id: string }
)
&
{
reason?: string
}

View File

@@ -1,7 +1,14 @@
/**
* Result<Error,Data>
*/
export type Result<E, D> = {
error: E | undefined,
data: D | undefined
}
export type Result<E, D> =
{
error: E,
data?: undefined
}
|
{
error?: undefined,
data: D
}

View File

@@ -10,6 +10,8 @@ export interface IOperationsRepository {
export type ObjeniousOperation = {
id?: number;
/** Uuid del mensaje asociado a la operacion */
correlation_id?: string;
operation: string;
retry_count?: number;
max_retry?: number;

View File

@@ -2,7 +2,7 @@ import { IOperationsRepository, ObjeniousOperation, ObjeniousOperationChange } f
import { Result } from "sim-shared/domain/Result.js";
import { PgClient } from "sim-shared/infrastructure/PgClient.js";
export class OperationsRepository implements IOperationsRepository {
export class ObjeniousOperationsRepository implements IOperationsRepository {
constructor(
private readonly pgClient: PgClient
@@ -45,8 +45,8 @@ export class OperationsRepository implements IOperationsRepository {
error = COALESCE($3,error),
request_id = COALESCE($4, request_id),
mass_action_id = COALESCE($5, mass_action_id),
last_change_date = now(),
end_date = CASE WHEN $2 IN ('finished') THEN now() ELSE end_date END,
last_change_date = now() at time zone 'utc',
end_date = CASE WHEN $2 IN ('finished') THEN now() at time zone 'utc' ELSE end_date END,
objenious_status = $6
WHERE id = $1`;

View File

@@ -0,0 +1,172 @@
import { before, describe, it } from "node:test";
import { OrderRepository } from "./OrderRepository.js";
import { CreateOrderDTO } from "../domain/Order.js";
import { postgresClient } from "../config/config.test.js";
import assert from "node:assert";
const order1 = <CreateOrderDTO>{
correlation_id: "fakeRMQid-1234",
exchange: "fake.ex",
routing_key: "test.order.idk",
order_type: "activate",
payload: { iccid: "1234", action: "activate" }
}
const order2 = <CreateOrderDTO>{
correlation_id: "fakeRMQid-5678",
exchange: "fake.ex",
routing_key: "test.order.idk",
order_type: "activate",
payload: { iccid: "5678", action: "activate" }
}
describe("Test OrderRepository", {}, (ctx) => {
const orderRepo = new OrderRepository(postgresClient)
let testIds: number[] = []
before(async () => {
// Order1
const result1 = await orderRepo.createOrder(order1)
assert(result1.data != undefined)
testIds.push(result1.data.id)
// Order2 -> Para el test de crearOrder
// const result2 = await orderRepo.createOrder(order2)
// assert(result2.data != undefined)
// testIds.push(result2.data.id)
})
it("Insert new Order", async () => {
const newOrder = order1
const result = await orderRepo.createOrder(newOrder)
assert(result.error == undefined)
assert(result.data != undefined)
const order = result.data!
assert(order.id != undefined)
assert(order.correlation_id == newOrder.correlation_id)
assert(order.status == 'pending')
console.log("[T] Creada Order", typeof (result.data.start_date))
})
it("Find by valid id should return the order", async () => {
const result = await orderRepo.getOrderById({ id: testIds[0]! })
assert(result.error == undefined)
assert(result.data != undefined)
const order = result.data
assert(order.id == testIds[0])
assert(order.correlation_id == order1.correlation_id)
})
it("Find by correlation id should return a valid order", async () => {
const result = await orderRepo.getOrderByQueueId({ correlation_id: order1.correlation_id })
assert(result.error == undefined)
assert(result.data != undefined)
const order = result.data
assert(order.correlation_id == order1.correlation_id)
})
it("Get pending orders should return all pending orders in ASC order", async () => {
// We already have 'testId' from before block
// Insert two more orders
const orderA = { ...order1, correlation_id: "pending-A" }
const orderB = { ...order1, correlation_id: "pending-B" }
const resA = await orderRepo.createOrder(orderA)
const resB = await orderRepo.createOrder(orderB)
assert(resA.data != undefined)
assert(resB.data != undefined)
const idA = resA.data.id
const idB = resB.data.id
const result = await orderRepo.getPendingOrders()
assert(result.error == undefined)
assert(Array.isArray(result.data))
const ids = result.data.map(o => o.id)
assert(ids.includes(testIds[0]!))
assert(ids.includes(idA))
assert(ids.includes(idB))
// Verify ordering (ASC by start_date, which maps to ID order in this sequential test)
const indexTest = result.data.findIndex(o => o.id === testIds[0])
const indexA = result.data.findIndex(o => o.id === idA)
const indexB = result.data.findIndex(o => o.id === idB)
assert(indexTest < indexA)
assert(indexA < indexB)
})
it("Update order status should change status and add history", async () => {
const newStatus = "running"
const reason = "Test update"
const result = await orderRepo.updateOrder({ id: testIds[0]!, new_status: newStatus, reason: reason })
assert(result.error == undefined)
assert(result.data != undefined)
assert(result.data.status === newStatus)
})
it("Finish order should set status to finished and set finish_date", async () => {
const result = await orderRepo.finishOrder({ id: testIds[0]!, reason: "Test finish" })
assert(result.error == undefined)
assert(result.data != undefined)
assert(result.data.status === "finished")
assert(result.data.finish_date != null)
})
it("Error order (failed) should increment retry_count and set status", async () => {
// Create another order for this test
const order2 = { ...order1, correlation_id: "fake-error-test" }
const createResult = await orderRepo.createOrder(order2)
assert(createResult.data != undefined)
const errTestId = createResult.data.id
const result = await orderRepo.errorOrder({
id: errTestId,
status: "failed",
reason: "Test failure",
error: "Some error",
stackTrace: "Some stack"
})
assert(result.error == undefined)
assert(result.data != undefined)
assert(result.data.status === "failed")
assert(result.data.retry_count > 0)
assert(result.data.finish_date == null)
})
it("Error order (dlx) should set finish_date", async () => {
// Create another order for this test
const order3 = { ...order1, correlation_id: "fake-dlx-test" }
const createResult = await orderRepo.createOrder(order3)
assert(createResult.data != undefined)
const dlxTestId = createResult.data.id
const result = await orderRepo.errorOrder({
id: dlxTestId,
status: "dlx",
reason: "Test DLX",
error: "Fatal error",
stackTrace: "Fatal stack"
})
assert(result.error == undefined)
assert(result.data != undefined)
assert(result.data.status === "dlx")
assert(result.data.finish_date != null)
})
})

View File

@@ -0,0 +1,446 @@
/**
* TODO: Usar
*/
import { PoolClient, QueryResult, QueryResultRow } from "pg";
import { CreateOrderDTO, FinishOrderDTO, OrderTracking, UpdateOrderDTO } from "../domain/Order.js";
import { Result } from "../domain/Result.js";
import { PgClient } from "./PgClient.js";
import assert from "node:assert";
import { error } from "node:console";
/**
* Agrupa todas las operaciones de *Order*.
* Las *Order* son seguimientos de operaciones que han entrado correctamente a cualquier cola
* de mensajes independientemente del pais/empresa objetivo de la tarjeta.
*
* Todas las operaciones devuelven un tipo Result<Error,Data> para gestionar los errores
* de acceso a la BDD, para las operaciones correctas se devuleve Error = undefined, para
* las erroneas Data = undefined.
*/
export class OrderRepository {
constructor(
private readonly pgClient: PgClient
) {
}
/**
* Comprobacion de la query y devolucion del primer resulado
* Garantiza la gestion de errores
*/
private async getFirst<T extends QueryResultRow>(queryPromise: Promise<QueryResult<T>>) {
try {
const queryResult = await queryPromise
return <Result<string, T>>{
data: queryResult.rows[0]
}
} catch (e) {
return <Result<string, T>>{
error: e as string
}
}
}
/**
* Se asume que se va a devolver una lista del tipo T
*/
private async getAll<T extends QueryResultRow>(queryPromise: Promise<QueryResult<T>>) {
try {
const queryResult = await queryPromise
return <Result<string, T[]>>{
data: queryResult.rows
}
} catch (e) {
return <Result<string, T[]>>{
error: e as string
}
}
}
/**
* El tipo <T> representa el contenido del mensaje de los order
*/
public async getOrderById<T>(data: { id: number }): Promise<Result<string, OrderTracking<T>>> {
const query = `
SELECT * FROM order_tracking
WHERE id = $1
`
const values = [data.id]
const queryPromise = this.pgClient.query<OrderTracking<T>>(query, values)
const result = await this.getFirst(queryPromise);
return result
}
/**
* Busqueda según la id de RabbitMq
*/
public async getOrderByQueueId<T>(data: { correlation_id: string }, pool?: PoolClient) {
const query = `
SELECT * FROM order_tracking
WHERE correlation_id = $1
`
const values = [data.correlation_id]
const queryPromise = this.pgClient.query<OrderTracking<T>>(query, values)
const result = await this.getFirst(queryPromise);
return result
}
/**
* Operaciones que no han concluido con filtros de limit, offset y start
* @param options ()
* @returns
*/
public async getPendingOrders<T>(options?: {
limit?: number,
offset?: number,
start?: number // id de inicio
}) {
const client = await this.pgClient.connect();
const offsetFragment = (options?.offset != undefined) ? `OFFSET ${options?.offset}` : ""
const limitFragment = (options?.limit != undefined) ? `LIMIT ${options?.limit}` : ""
const startFragment = (options?.start != undefined) ? `AND id >= ${options?.start}` : ""
const query = `
SELECT * FROM order_tracking
WHERE finish_date IS NULL
${startFragment}
ORDER BY start_date ASC
${offsetFragment}
${limitFragment}
`
const values: string[] = []
const queryPromise = client.query<OrderTracking<T>>(query, values)
const result = await this.getAll(queryPromise)
client.release()
return result
}
public async createOrder<T extends any>(data: CreateOrderDTO): Promise<Result<string, OrderTracking<T>>> {
const client = await this.pgClient.connect();
await client.query("BEGIN")
const query = `
INSERT INTO order_tracking (
correlation_id,
exchange,
routing_key,
order_type,
payload,
status,
webhook_host,
webhook_endpoint
)
VALUES (
$1, -- correlation_id
$2, -- exchange
$3, -- routing_key
$4, -- order_type (ej: 'activate')
$5, -- payload (json object)
'pending',
$6, -- webhook_host,
$7 -- webhook_endpoint
)
RETURNING
id,
correlation_id,
exchange,
routing_key,
order_type,
payload,
status,
webhook_host,
webhook_endpoint
`
const values = [data.correlation_id, data.exchange, data.routing_key, data.order_type, data.payload, data.webhook_host, data.webhook_endpoint]
const queryPromise = client.query<OrderTracking<T>>(query, values)
// TODO comprobar si start_date convierte a Date por defecto, añadir enum de status
const result = await this.getFirst(queryPromise)
if (result.error == undefined) {
await client.query("COMMIT")
} else {
await client.query("ROLLBACK")
}
client.release()
return result
}
/**
* Actualizacion "correcta" del estado de un order
*/
public async updateOrder(args: UpdateOrderDTO): Promise<Result<string, OrderTracking<any>>> {
// XOR id o correlation_id
assert((args.id != undefined) != (args.correlation_id != undefined))
const client = await this.pgClient.connect();
await client.query('BEGIN');
const idType = ('id' in args) ? "id" : "correlation_id"
const idValue = (args.id != undefined) ? args.id : args.correlation_id
// 1. Se consulta la order de base
const qCurrentOrder = `
SELECT * FROM order_tracking
WHERE ${idType} = $1
`
const vCurrentOrder = [idValue]
const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder))
const orderId = currentOrderResult.data?.id
if (orderId == undefined) {
return {
error: "El order a actualizar no existe " + idType + ": " + idValue
}
}
if (currentOrderResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return currentOrderResult
}
const currentOrder = currentOrderResult.data!
// 2. Si todo ok se actualiza el order
const uOrderTracking = `
UPDATE order_tracking
SET
status = $2::order_status,
update_date = (now() at time zone 'utc')
WHERE id = $1
RETURNING id, status, update_date;
`
const vOrderTracking = [orderId, args.new_status]
const updatedOrderResult = await this.getFirst(
client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking)
)
if (updatedOrderResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return updatedOrderResult
}
// 3. Si todo ok se añade una entradad de order_history con los datos modificados
const iOrderHistory = `
INSERT INTO order_history (
order_id,
previous_status,
new_status,
change_reason
)
VALUES (
$1, -- ID de la orden
$2, -- Estado anterior
$3::order_status, -- Nuevo estado
$4 -- Razón (ej: "Consumer processed successfully" o "RabbitMQ NACK")
)
RETURNING id;
`
const vOrderHistory = [orderId, currentOrder.status, args.new_status, args.reason]
const newOrderHistoryResult = await this.getFirst(
client.query<{ id: number }>(iOrderHistory, vOrderHistory)
)
if (newOrderHistoryResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return newOrderHistoryResult
}
await client.query("COMMIT")
const updatedOrder = await this.getFirst(
client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder)
)
client.release()
return updatedOrder
}
public async finishOrder(args: FinishOrderDTO) {
const client = await this.pgClient.connect();
assert((args.id != undefined) != (args.correlation_id != undefined))
await client.query('BEGIN');
const idType = ('id' in args) ? "id" : "correlation_id"
const idValue = (args.id != undefined) ? args.id : args.correlation_id
// 1. Se consulta la order de base
const qCurrentOrder = `
SELECT * FROM order_tracking
WHERE ${idType} = $1
`
const vCurrentOrder = [idValue]
const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder))
const orderId = currentOrderResult.data?.id
if (orderId == undefined) {
return {
error: "El order a actualizar no existe " + idType + ": " + idValue
}
}
if (currentOrderResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return currentOrderResult
}
const currentOrder = currentOrderResult.data!
// 2. Si todo ok se actualiza el order
const uOrderTracking = `
UPDATE order_tracking
SET
status = 'finished',
update_date = (now() at time zone 'utc'),
finish_date = (now() at time zone 'utc')
WHERE id = $1
RETURNING id, status, update_date;
`
const vOrderTracking = [orderId]
const updatedOrderResult = await this.getFirst(
client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking)
)
if (updatedOrderResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return updatedOrderResult
}
// 3. Si todo ok se guardo un nuevo registro de history
const iOrderHistory = `
INSERT INTO order_history (
order_id,
previous_status,
new_status,
change_reason
)
VALUES (
$1, -- ID de la orden
$2, -- Estado anterior
'finished',
$3 -- Siempre "finished successfully" a no ser que se especifique otra razón
)
RETURNING id;
`
const vOrderHistory = [orderId, currentOrder.status, args.reason ?? "finished successfully"]
const newOrderHistoryResult = await this.getFirst(
client.query<{ id: number }>(iOrderHistory, vOrderHistory)
)
if (newOrderHistoryResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return newOrderHistoryResult
}
await client.query("COMMIT")
const updatedOrder = await this.getFirst(
client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder)
)
client.release()
return updatedOrder
}
// TODO: tema de poder filtrar por correlation_id
public async errorOrder(args: {
id: number,
status: "failed" | "dlx",
reason: string,
error?: string,
stackTrace?: string
}) {
const client = await this.pgClient.connect();
await client.query('BEGIN');
// 1. Se consulta la order de base
const qCurrentOrder = `
SELECT * FROM order_tracking
WHERE id = $1
`
const vCurrentOrder = [args.id]
const currentOrderResult = await this.getFirst(client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder))
if (currentOrderResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return currentOrderResult
}
const currentOrder = currentOrderResult.data!
// 3. Si todo ok se actualiza el order
// Si el status es dlx se asume que ha terminado y no va a reintentarse
// Si es failed se asume que se ha movido a la cola de delay y en algún momento se va a reintentar
const uOrderTracking = `
UPDATE order_tracking
SET
status = $2::order_status,
update_date = (now() at time zone 'utc'),
finish_date = CASE WHEN $2::order_status = 'dlx' THEN (now() at time zone 'utc') ELSE null END,
retry_count = retry_count + 1,
error_message = $3,
error_stacktrace = $4
WHERE id = $1
RETURNING id, status, update_date;
`
const vOrderTracking = [args.id, args.status, args.error, args.stackTrace]
const updatedOrderResult = await this.getFirst(
client.query<{ id: number, status: string, update_date: string }>(uOrderTracking, vOrderTracking)
)
if (updatedOrderResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return updatedOrderResult
}
// 3. Si todo ok se guardo un nuevo registro de history
const iOrderHistory = `
INSERT INTO order_history (
order_id,
previous_status,
new_status,
change_reason
)
VALUES (
$1, -- ID de la orden
$2, -- Estado anterior
$3::order_status, -- En este caso particular 'dlx' o 'failed'
$4 -- En este caso el motivo de fallo completo
)
RETURNING id;
`
const vOrderHistory = [args.id, currentOrder.status, args.status, args.reason]
const newOrderHistoryResult = await this.getFirst(
client.query<{ id: number }>(iOrderHistory, vOrderHistory)
)
if (newOrderHistoryResult.error != undefined) {
await client.query("ROLLBACK")
client.release()
return newOrderHistoryResult
}
await client.query("COMMIT")
const updatedOrder = await this.getFirst(
client.query<OrderTracking<any>>(qCurrentOrder, vCurrentOrder)
)
client.release()
return updatedOrder
}
}

View File

@@ -17,6 +17,11 @@ export class RabbitMQEventBus implements EventBus {
private buildStructure?: (chan: Channel) => Promise<void>
private maxRetry: number = 0
connection?: AmqpConnectionManager
channel?: ChannelWrapper
connected: Boolean = false
private connectionOptions: RMQConnectionParams
constructor(args: {
connectionParams: RMQConnectionParams,
buildStructure?: (chan: Channel) => Promise<void>,
@@ -73,11 +78,6 @@ export class RabbitMQEventBus implements EventBus {
//return this.channel.nack(msg, false, requeue)
}
connection?: AmqpConnectionManager
channel?: ChannelWrapper
connected: Boolean = false
private connectionOptions: RMQConnectionParams
public async connect() {
@@ -96,28 +96,39 @@ export class RabbitMQEventBus implements EventBus {
} catch (e) {
console.error("[RMQ] Error estableciendo la conexion con el servidor", e)
}
}
publish(events: DomainEvent[]): Promise<void> {
return new Promise((res, rej) => {
publish(events: DomainEvent[]): Promise<{ success: DomainEvent[], error: DomainEvent[] }> {
return new Promise(async (res, rej) => {
const successEvents: DomainEvent[] = []
const errorEvents: DomainEvent[] = []
try {
for (const event of events) {
const exchange = "sim.exchange"
const routingKey = event.key
const content = Buffer.from(JSON.stringify(event))
this.channel?.publish(exchange, routingKey, content, {}, (err, ok) => {
const isPublished = await this.channel?.publish(exchange, routingKey, content, {
headers: {
...event.headers
}
}, (err, ok) => {
if (err == undefined) {
console.log("Evento publicado ", event)
} else {
console.error("Error publicando", event)
}
})
// Hay que revisarlo pero en principio la libreria se encarga que el mensaje se publique
// si o si
successEvents.push(event)
}
return res()
return res({
success: successEvents,
error: errorEvents
})
} catch (err) {
return rej(err)
}
@@ -161,7 +172,8 @@ export class RabbitMQEventBus implements EventBus {
if (this.connection == undefined) throw new Error("[RMQ] Intentando crear un canal sin una conexion")
const channel = this.connection.createChannel({
setup: async (channel: Channel) => {
confirm: true,
setup: async (channel: ConfirmChannel) => {
// Exchanges comunes a todos
channel.assertExchange("sim.exchange", "topic", { durable: true })
channel.assertExchange("sim.dlx", "topic", { durable: true })
@@ -195,6 +207,6 @@ export class RabbitMQEventBus implements EventBus {
Promise.reject(error);
});
return channel as ChannelWrapper;
return channel;
}
}

View File

@@ -4,6 +4,14 @@
"version": "1.0.0",
"description": "",
"exports": {
"./aplication/*.js": {
"types": "./aplication/*.ts",
"default": "./aplication/*.js"
},
"./aplication/*": {
"types": "./aplication/*.ts",
"default": "./aplication/*.js"
},
"./infrastructure/*.js": {
"types": "./infrastructure/*.ts",
"default": "./infrastructure/*.js"
@@ -30,7 +38,7 @@
}
},
"scripts": {
"test": "echo \"Error: no test specified\" ",
"test": "node --import tsx --test ./**/*.test.ts",
"dev": "echo \" Shared no es un modulo ejecutable \" ",
"build": "tsc --build && tsc-alias -p tsconfig.json && cp package.json ../../dist/packages/sim-shared/"
},

View File

@@ -1,4 +1,12 @@
#/bin/bash
rm deployment/database/init.sql
cat deployment/database/*.sql >deployment/database/init.sql
# init sql debe juntar todos los scripts de "base" (sin contar migraciones)
cat deployment/database/base/*.sql >deployment/database/init.sql
#cp deployment/database/esquema_final* deployment/database/init.sql
# compatibilidad con postgresql < 17
sed -i '/\\restrict/d' deployment/database/init.sql
sed -i '/\\unrestrict/d' deployment/database/init.sql
docker compose -f deployment/local/docker/docker-compose.yaml --project-directory ./ up --watch

462
yarn.lock
View File

@@ -187,22 +187,6 @@ __metadata:
languageName: node
linkType: hard
"@isaacs/balanced-match@npm:^4.0.1":
version: 4.0.1
resolution: "@isaacs/balanced-match@npm:4.0.1"
checksum: 10/102fbc6d2c0d5edf8f6dbf2b3feb21695a21bc850f11bc47c4f06aa83bd8884fde3fe9d6d797d619901d96865fdcb4569ac2a54c937992c48885c5e3d9967fe8
languageName: node
linkType: hard
"@isaacs/brace-expansion@npm:^5.0.1":
version: 5.0.1
resolution: "@isaacs/brace-expansion@npm:5.0.1"
dependencies:
"@isaacs/balanced-match": "npm:^4.0.1"
checksum: 10/aec226065bc4285436a27379e08cc35bf94ef59f5098ac1c026495c9ba4ab33d851964082d3648d56d63eb90f2642867bd15a3e1b810b98beb1a8c14efce6a94
languageName: node
linkType: hard
"@isaacs/fs-minipass@npm:^4.0.0":
version: 4.0.1
resolution: "@isaacs/fs-minipass@npm:4.0.1"
@@ -284,177 +268,177 @@ __metadata:
languageName: node
linkType: hard
"@rollup/rollup-android-arm-eabi@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-android-arm-eabi@npm:4.57.1"
"@rollup/rollup-android-arm-eabi@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-android-arm-eabi@npm:4.59.0"
conditions: os=android & cpu=arm
languageName: node
linkType: hard
"@rollup/rollup-android-arm64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-android-arm64@npm:4.57.1"
"@rollup/rollup-android-arm64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-android-arm64@npm:4.59.0"
conditions: os=android & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-darwin-arm64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-darwin-arm64@npm:4.57.1"
"@rollup/rollup-darwin-arm64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-darwin-arm64@npm:4.59.0"
conditions: os=darwin & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-darwin-x64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-darwin-x64@npm:4.57.1"
"@rollup/rollup-darwin-x64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-darwin-x64@npm:4.59.0"
conditions: os=darwin & cpu=x64
languageName: node
linkType: hard
"@rollup/rollup-freebsd-arm64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-freebsd-arm64@npm:4.57.1"
"@rollup/rollup-freebsd-arm64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-freebsd-arm64@npm:4.59.0"
conditions: os=freebsd & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-freebsd-x64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-freebsd-x64@npm:4.57.1"
"@rollup/rollup-freebsd-x64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-freebsd-x64@npm:4.59.0"
conditions: os=freebsd & cpu=x64
languageName: node
linkType: hard
"@rollup/rollup-linux-arm-gnueabihf@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.57.1"
"@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.59.0"
conditions: os=linux & cpu=arm & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-arm-musleabihf@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.57.1"
"@rollup/rollup-linux-arm-musleabihf@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.59.0"
conditions: os=linux & cpu=arm & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-arm64-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.57.1"
"@rollup/rollup-linux-arm64-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.59.0"
conditions: os=linux & cpu=arm64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-arm64-musl@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-arm64-musl@npm:4.57.1"
"@rollup/rollup-linux-arm64-musl@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-arm64-musl@npm:4.59.0"
conditions: os=linux & cpu=arm64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-loong64-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.57.1"
"@rollup/rollup-linux-loong64-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-loong64-gnu@npm:4.59.0"
conditions: os=linux & cpu=loong64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-loong64-musl@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-loong64-musl@npm:4.57.1"
"@rollup/rollup-linux-loong64-musl@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-loong64-musl@npm:4.59.0"
conditions: os=linux & cpu=loong64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-ppc64-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.57.1"
"@rollup/rollup-linux-ppc64-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.59.0"
conditions: os=linux & cpu=ppc64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-ppc64-musl@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-ppc64-musl@npm:4.57.1"
"@rollup/rollup-linux-ppc64-musl@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-ppc64-musl@npm:4.59.0"
conditions: os=linux & cpu=ppc64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-riscv64-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.57.1"
"@rollup/rollup-linux-riscv64-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.59.0"
conditions: os=linux & cpu=riscv64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-riscv64-musl@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.57.1"
"@rollup/rollup-linux-riscv64-musl@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.59.0"
conditions: os=linux & cpu=riscv64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-s390x-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.57.1"
"@rollup/rollup-linux-s390x-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.59.0"
conditions: os=linux & cpu=s390x & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-x64-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-x64-gnu@npm:4.57.1"
"@rollup/rollup-linux-x64-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-x64-gnu@npm:4.59.0"
conditions: os=linux & cpu=x64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-x64-musl@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-linux-x64-musl@npm:4.57.1"
"@rollup/rollup-linux-x64-musl@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-linux-x64-musl@npm:4.59.0"
conditions: os=linux & cpu=x64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-openbsd-x64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-openbsd-x64@npm:4.57.1"
"@rollup/rollup-openbsd-x64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-openbsd-x64@npm:4.59.0"
conditions: os=openbsd & cpu=x64
languageName: node
linkType: hard
"@rollup/rollup-openharmony-arm64@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-openharmony-arm64@npm:4.57.1"
"@rollup/rollup-openharmony-arm64@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-openharmony-arm64@npm:4.59.0"
conditions: os=openharmony & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-win32-arm64-msvc@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.57.1"
"@rollup/rollup-win32-arm64-msvc@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.59.0"
conditions: os=win32 & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-win32-ia32-msvc@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.57.1"
"@rollup/rollup-win32-ia32-msvc@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.59.0"
conditions: os=win32 & cpu=ia32
languageName: node
linkType: hard
"@rollup/rollup-win32-x64-gnu@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-win32-x64-gnu@npm:4.57.1"
"@rollup/rollup-win32-x64-gnu@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-win32-x64-gnu@npm:4.59.0"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
"@rollup/rollup-win32-x64-msvc@npm:4.57.1":
version: 4.57.1
resolution: "@rollup/rollup-win32-x64-msvc@npm:4.57.1"
"@rollup/rollup-win32-x64-msvc@npm:4.59.0":
version: 4.59.0
resolution: "@rollup/rollup-win32-x64-msvc@npm:4.59.0"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
@@ -579,11 +563,11 @@ __metadata:
linkType: hard
"@types/node@npm:*, @types/node@npm:^25.0.3":
version: 25.2.2
resolution: "@types/node@npm:25.2.2"
version: 25.3.0
resolution: "@types/node@npm:25.3.0"
dependencies:
undici-types: "npm:~7.16.0"
checksum: 10/a5d689d69168065da0ef2972c711b0382664fa52ad99e9988d174028444c71ecb6c5b641178c404acb6b526f27cd791341aa3998a66e71b9a9ee193d853db61e
undici-types: "npm:~7.18.0"
checksum: 10/061b00c8de070a606a052afaa4c45dca5f8d6a8e7e39c0c3e196bb650ee37e986bbb161991ea39076a05aada102f36b13c974528448a09efd8d36bdfee75de4b
languageName: node
linkType: hard
@@ -785,6 +769,13 @@ __metadata:
languageName: node
linkType: hard
"ansi-regex@npm:^6.0.1":
version: 6.2.2
resolution: "ansi-regex@npm:6.2.2"
checksum: 10/9b17ce2c6daecc75bcd5966b9ad672c23b184dc3ed9bf3c98a0702f0d2f736c15c10d461913568f2cf527a5e64291c7473358885dd493305c84a1cfed66ba94f
languageName: node
linkType: hard
"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0":
version: 4.3.0
resolution: "ansi-styles@npm:4.3.0"
@@ -794,6 +785,13 @@ __metadata:
languageName: node
linkType: hard
"ansi-styles@npm:^6.2.1":
version: 6.2.3
resolution: "ansi-styles@npm:6.2.3"
checksum: 10/c49dad7639f3e48859bd51824c93b9eb0db628afc243c51c3dd2410c4a15ede1a83881c6c7341aa2b159c4f90c11befb38f2ba848c07c66c9f9de4bcd7cb9f30
languageName: node
linkType: hard
"anymatch@npm:~3.1.2":
version: 3.1.3
resolution: "anymatch@npm:3.1.3"
@@ -857,6 +855,13 @@ __metadata:
languageName: node
linkType: hard
"balanced-match@npm:^4.0.2":
version: 4.0.4
resolution: "balanced-match@npm:4.0.4"
checksum: 10/fb07bb66a0959c2843fc055838047e2a95ccebb837c519614afb067ebfdf2fa967ca8d712c35ced07f2cd26fc6f07964230b094891315ad74f11eba3d53178a0
languageName: node
linkType: hard
"binary-extensions@npm:^2.0.0":
version: 2.3.0
resolution: "binary-extensions@npm:2.3.0"
@@ -881,6 +886,15 @@ __metadata:
languageName: node
linkType: hard
"brace-expansion@npm:^5.0.2":
version: 5.0.3
resolution: "brace-expansion@npm:5.0.3"
dependencies:
balanced-match: "npm:^4.0.2"
checksum: 10/8ba7deae4ca333d52418d2cde3287ac23f44f7330d92c3ecd96a8941597bea8aab02227bd990944d6711dd549bcc6e550fe70be5d94aa02e2fdc88942f480c9b
languageName: node
linkType: hard
"braces@npm:^3.0.3, braces@npm:~3.0.2":
version: 3.0.3
resolution: "braces@npm:3.0.3"
@@ -997,6 +1011,17 @@ __metadata:
languageName: node
linkType: hard
"cliui@npm:^9.0.1":
version: 9.0.1
resolution: "cliui@npm:9.0.1"
dependencies:
string-width: "npm:^7.2.0"
strip-ansi: "npm:^7.1.0"
wrap-ansi: "npm:^9.0.0"
checksum: 10/df43d8d1c6e3254cbb64b1905310d5f6672c595496a3cbe76946c6d24777136886470686f2772ac9edfe547a74bb70e8017530b3554715aee119efd7752fc0d9
languageName: node
linkType: hard
"color-convert@npm:^2.0.1":
version: 2.0.1
resolution: "color-convert@npm:2.0.1"
@@ -1098,6 +1123,18 @@ __metadata:
languageName: node
linkType: hard
"db-migrate@https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git":
version: 1.1.0
resolution: "db-migrate@https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git#commit=f84d68ba79161b9b06b747919979db00aac34b49"
dependencies:
pg: "npm:^8.18.0"
yargs: "npm:^18.0.0"
bin:
db-migrate: ./lib/index.js
checksum: 10/2468cfd14a5f218845f5437f530a68993a51b3998cdd9d0c7f28cdb810314200c471debac9ca19c34d4978907b7c4ced5c95e777eebd40c3baa795ad945d8892
languageName: node
linkType: hard
"debug@npm:4, debug@npm:^4.1.1, debug@npm:^4.3.4, debug@npm:^4.3.7, debug@npm:^4.4.0, debug@npm:^4.4.3":
version: 4.4.3
resolution: "debug@npm:4.4.3"
@@ -1144,9 +1181,9 @@ __metadata:
linkType: hard
"dotenv@npm:*, dotenv@npm:^17.2.3":
version: 17.2.4
resolution: "dotenv@npm:17.2.4"
checksum: 10/3b0fdfb40aebfec3a3b4421990b5874cb93c5a259d20ef637ead237a360cbaf753879d3a66e5c044147658824d86ffead6a4090e8ebb727782e5717fcdb636dd
version: 17.3.1
resolution: "dotenv@npm:17.3.1"
checksum: 10/4dde6571dff22c2323a3e33ac25e1e7d51c4b6d60dc884f59e3efe85c8fd3cc8800d6b3925d05c46b19dca08cba821a0a24e22f75a1b9b768c859b98bb927b04
languageName: node
linkType: hard
@@ -1168,6 +1205,13 @@ __metadata:
languageName: node
linkType: hard
"emoji-regex@npm:^10.3.0":
version: 10.6.0
resolution: "emoji-regex@npm:10.6.0"
checksum: 10/98cc0b0e1daed1ed25afbf69dcb921fee00f712f51aab93aa1547e4e4e8171725cc4f0098aaa645b4f611a19da11ec9f4623eb6ff2b72314b39a8f2ae7c12bf2
languageName: node
linkType: hard
"emoji-regex@npm:^8.0.0":
version: 8.0.0
resolution: "emoji-regex@npm:8.0.0"
@@ -1577,6 +1621,13 @@ __metadata:
languageName: node
linkType: hard
"get-east-asian-width@npm:^1.0.0":
version: 1.5.0
resolution: "get-east-asian-width@npm:1.5.0"
checksum: 10/60bc34cd1e975055ab99f0f177e31bed3e516ff7cee9c536474383954a976abaa6b94a51d99ad158ef1e372790fa096cab7d07f166bb0778f6587954c0fbe946
languageName: node
linkType: hard
"get-intrinsic@npm:^1.2.5, get-intrinsic@npm:^1.2.6, get-intrinsic@npm:^1.3.0":
version: 1.3.1
resolution: "get-intrinsic@npm:1.3.1"
@@ -1627,13 +1678,13 @@ __metadata:
linkType: hard
"glob@npm:^13.0.0":
version: 13.0.1
resolution: "glob@npm:13.0.1"
version: 13.0.6
resolution: "glob@npm:13.0.6"
dependencies:
minimatch: "npm:^10.1.2"
minipass: "npm:^7.1.2"
path-scurry: "npm:^2.0.0"
checksum: 10/465e8cc269ab88d7415a3906cdc0f4543a2ae54df99207204af5bc28a944396d8d893822f546a8056a78ec714e608ab4f3502532c4d6b9cc5e113adf0fe5109e
minimatch: "npm:^10.2.2"
minipass: "npm:^7.1.3"
path-scurry: "npm:^2.0.2"
checksum: 10/201ad69e5f0aa74e1d8c00a481581f8b8c804b6a4fbfabeeb8541f5d756932800331daeba99b58fb9e4cd67e12ba5a7eba5b82fb476691588418060b84353214
languageName: node
linkType: hard
@@ -1843,17 +1894,17 @@ __metadata:
languageName: node
linkType: hard
"isexe@npm:^3.1.1":
version: 3.1.4
resolution: "isexe@npm:3.1.4"
checksum: 10/41b2006cb0f545092b73246edccdaaf12243cc14b29dff4c7dc6759eba659fa8c16de7d9c64a9fb4d05be9cc83b12399ea553ef8a9763db02b1433261005b98a
"isexe@npm:^4.0.0":
version: 4.0.0
resolution: "isexe@npm:4.0.0"
checksum: 10/2ead327ef596042ef9c9ec5f236b316acfaedb87f4bb61b3c3d574fb2e9c8a04b67305e04733bde52c24d9622fdebd3270aadb632adfbf9cadef88fe30f479e5
languageName: node
linkType: hard
"lru-cache@npm:^11.0.0, lru-cache@npm:^11.1.0, lru-cache@npm:^11.2.1":
version: 11.2.5
resolution: "lru-cache@npm:11.2.5"
checksum: 10/be50f66c6e23afeaab9c7eefafa06344dd13cde7b3528809c2660c4ad70d93b9ba537366634623cbb2eb411671f526b5a4af2c602507b9258aead0fa8d713f6c
version: 11.2.6
resolution: "lru-cache@npm:11.2.6"
checksum: 10/91222bbd59f793a0a0ad57789388f06b34ac9bb1613433c1d1810457d09db5cd3ec8943227ce2e1f5d6a0a15d6f1a9f129cb2c49ae9b6b10e82d4965fddecbef
languageName: node
linkType: hard
@@ -1971,12 +2022,12 @@ __metadata:
languageName: node
linkType: hard
"minimatch@npm:^10.1.2":
version: 10.1.2
resolution: "minimatch@npm:10.1.2"
"minimatch@npm:^10.2.2":
version: 10.2.2
resolution: "minimatch@npm:10.2.2"
dependencies:
"@isaacs/brace-expansion": "npm:^5.0.1"
checksum: 10/6f0ef975463739207144e411bdd54f7205ce38770b162fa3bc4c9be4987a16cb20d0962a82f26c2372598cfba90faa97b327239d303b529b774f17681c163b46
brace-expansion: "npm:^5.0.2"
checksum: 10/e135be7b502ac97c02bcee42ccc1c55dc26dbac036c0f4acde69e42fe339d7fb53fae711e57b3546cb533426382ea492c73a073c7f78832e0453d120d48dd015
languageName: node
linkType: hard
@@ -2040,10 +2091,10 @@ __metadata:
languageName: node
linkType: hard
"minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.0.4, minipass@npm:^7.1.2":
version: 7.1.2
resolution: "minipass@npm:7.1.2"
checksum: 10/c25f0ee8196d8e6036661104bacd743785b2599a21de5c516b32b3fa2b83113ac89a2358465bc04956baab37ffb956ae43be679b2262bf7be15fce467ccd7950
"minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.0.4, minipass@npm:^7.1.2, minipass@npm:^7.1.3":
version: 7.1.3
resolution: "minipass@npm:7.1.3"
checksum: 10/175e4d5e20980c3cd316ae82d2c031c42f6c746467d8b1905b51060a0ba4461441a0c25bb67c025fd9617f9a3873e152c7b543c6b5ac83a1846be8ade80dffd6
languageName: node
linkType: hard
@@ -2184,13 +2235,13 @@ __metadata:
languageName: node
linkType: hard
"path-scurry@npm:^2.0.0":
version: 2.0.1
resolution: "path-scurry@npm:2.0.1"
"path-scurry@npm:^2.0.2":
version: 2.0.2
resolution: "path-scurry@npm:2.0.2"
dependencies:
lru-cache: "npm:^11.0.0"
minipass: "npm:^7.1.2"
checksum: 10/1e9c74e9ccf94d7c16056a5cb2dba9fa23eec1bc221ab15c44765486b9b9975b4cd9a4d55da15b96eadf67d5202e9a2f1cec9023fbb35fe7d9ccd0ff1891f88b
checksum: 10/2b4257422bcb870a4c2d205b3acdbb213a72f5e2250f61c80f79c9d014d010f82bdf8584441612c8e1fa4eb098678f5704a66fa8377d72646bad4be38e57a2c3
languageName: node
linkType: hard
@@ -2418,11 +2469,11 @@ __metadata:
linkType: hard
"qs@npm:^6.14.0, qs@npm:^6.14.1":
version: 6.14.1
resolution: "qs@npm:6.14.1"
version: 6.15.0
resolution: "qs@npm:6.15.0"
dependencies:
side-channel: "npm:^1.1.0"
checksum: 10/34b5ab00a910df432d55180ef39c1d1375e550f098b5ec153b41787f1a6a6d7e5f9495593c3b112b77dbc6709d0ae18e55b82847a4c2bbbb0de1e8ccbb1794c5
checksum: 10/a3458f2f389285c3512e0ebc55522ee370ac7cb720ba9f0eff3e30fb2bb07631caf556c08e2a3d4481a371ac14faa9ceb7442a0610c5a7e55b23a5bdee7b701c
languageName: node
linkType: hard
@@ -2511,34 +2562,34 @@ __metadata:
linkType: hard
"rollup@npm:^4.43.0":
version: 4.57.1
resolution: "rollup@npm:4.57.1"
version: 4.59.0
resolution: "rollup@npm:4.59.0"
dependencies:
"@rollup/rollup-android-arm-eabi": "npm:4.57.1"
"@rollup/rollup-android-arm64": "npm:4.57.1"
"@rollup/rollup-darwin-arm64": "npm:4.57.1"
"@rollup/rollup-darwin-x64": "npm:4.57.1"
"@rollup/rollup-freebsd-arm64": "npm:4.57.1"
"@rollup/rollup-freebsd-x64": "npm:4.57.1"
"@rollup/rollup-linux-arm-gnueabihf": "npm:4.57.1"
"@rollup/rollup-linux-arm-musleabihf": "npm:4.57.1"
"@rollup/rollup-linux-arm64-gnu": "npm:4.57.1"
"@rollup/rollup-linux-arm64-musl": "npm:4.57.1"
"@rollup/rollup-linux-loong64-gnu": "npm:4.57.1"
"@rollup/rollup-linux-loong64-musl": "npm:4.57.1"
"@rollup/rollup-linux-ppc64-gnu": "npm:4.57.1"
"@rollup/rollup-linux-ppc64-musl": "npm:4.57.1"
"@rollup/rollup-linux-riscv64-gnu": "npm:4.57.1"
"@rollup/rollup-linux-riscv64-musl": "npm:4.57.1"
"@rollup/rollup-linux-s390x-gnu": "npm:4.57.1"
"@rollup/rollup-linux-x64-gnu": "npm:4.57.1"
"@rollup/rollup-linux-x64-musl": "npm:4.57.1"
"@rollup/rollup-openbsd-x64": "npm:4.57.1"
"@rollup/rollup-openharmony-arm64": "npm:4.57.1"
"@rollup/rollup-win32-arm64-msvc": "npm:4.57.1"
"@rollup/rollup-win32-ia32-msvc": "npm:4.57.1"
"@rollup/rollup-win32-x64-gnu": "npm:4.57.1"
"@rollup/rollup-win32-x64-msvc": "npm:4.57.1"
"@rollup/rollup-android-arm-eabi": "npm:4.59.0"
"@rollup/rollup-android-arm64": "npm:4.59.0"
"@rollup/rollup-darwin-arm64": "npm:4.59.0"
"@rollup/rollup-darwin-x64": "npm:4.59.0"
"@rollup/rollup-freebsd-arm64": "npm:4.59.0"
"@rollup/rollup-freebsd-x64": "npm:4.59.0"
"@rollup/rollup-linux-arm-gnueabihf": "npm:4.59.0"
"@rollup/rollup-linux-arm-musleabihf": "npm:4.59.0"
"@rollup/rollup-linux-arm64-gnu": "npm:4.59.0"
"@rollup/rollup-linux-arm64-musl": "npm:4.59.0"
"@rollup/rollup-linux-loong64-gnu": "npm:4.59.0"
"@rollup/rollup-linux-loong64-musl": "npm:4.59.0"
"@rollup/rollup-linux-ppc64-gnu": "npm:4.59.0"
"@rollup/rollup-linux-ppc64-musl": "npm:4.59.0"
"@rollup/rollup-linux-riscv64-gnu": "npm:4.59.0"
"@rollup/rollup-linux-riscv64-musl": "npm:4.59.0"
"@rollup/rollup-linux-s390x-gnu": "npm:4.59.0"
"@rollup/rollup-linux-x64-gnu": "npm:4.59.0"
"@rollup/rollup-linux-x64-musl": "npm:4.59.0"
"@rollup/rollup-openbsd-x64": "npm:4.59.0"
"@rollup/rollup-openharmony-arm64": "npm:4.59.0"
"@rollup/rollup-win32-arm64-msvc": "npm:4.59.0"
"@rollup/rollup-win32-ia32-msvc": "npm:4.59.0"
"@rollup/rollup-win32-x64-gnu": "npm:4.59.0"
"@rollup/rollup-win32-x64-msvc": "npm:4.59.0"
"@types/estree": "npm:1.0.8"
fsevents: "npm:~2.3.2"
dependenciesMeta:
@@ -2596,7 +2647,7 @@ __metadata:
optional: true
bin:
rollup: dist/bin/rollup
checksum: 10/0451371339e593967c979e498fac4dfd0ba15fadf0dac96875940796307a00d62ab68460366a65f4872ae8edd9339e3d9501e8e5764c1f23e25e0951f75047c6
checksum: 10/728237932aad7022c0640cd126b9fe5285f2578099f22a0542229a17785320a6553b74582fa5977877541c1faf27de65ed2750bc89dbb55b525405244a46d9f1
languageName: node
linkType: hard
@@ -2769,7 +2820,7 @@ __metadata:
languageName: unknown
linkType: soft
"sim-consumidor-objenious@sim-consumidor-objenious:*, sim-consumidor-objenious@workspace:packages/sim-consumidor-objenious":
"sim-consumidor-objenious@workspace:packages/sim-consumidor-objenious":
version: 0.0.0-use.local
resolution: "sim-consumidor-objenious@workspace:packages/sim-consumidor-objenious"
dependencies:
@@ -2784,7 +2835,6 @@ __metadata:
dotenv: "npm:*"
express: "npm:*"
prettier: "npm:*"
sim-consumidor-objenious: "sim-consumidor-objenious:*"
sim-shared: "sim-shared:*"
supertest: "npm:*"
tsc-alias: "npm:^1.8.16"
@@ -2809,6 +2859,7 @@ __metadata:
dotenv: "npm:*"
express: "npm:*"
prettier: "npm:*"
sim-shared: "sim-shared:*"
supertest: "npm:*"
tsc-alias: "npm:^1.8.16"
tsx: "npm:*"
@@ -2833,6 +2884,7 @@ __metadata:
axios: "npm:^1.13.3"
concurrently: "npm:^9.2.1"
cors: "npm:^2.8.5"
db-migrate: "https://git.savefamilygps.net/alvarsanmartin/herramienta-migracion.git"
dotenv: "npm:^17.2.3"
express: "npm:^5.2.1"
pg: "npm:^8.18.0"
@@ -2841,6 +2893,7 @@ __metadata:
tsc-alias: "npm:^1.8.16"
tsx: "npm:^4.21.0"
typescript: "npm:^5.9.3"
uuidv7: "npm:^1.1.0"
vite: "npm:^7.3.1"
vite-tsconfig-paths: "npm:^6.0.5"
vitest: "npm:^4.0.16"
@@ -2965,11 +3018,11 @@ __metadata:
linkType: hard
"ssri@npm:^13.0.0":
version: 13.0.0
resolution: "ssri@npm:13.0.0"
version: 13.0.1
resolution: "ssri@npm:13.0.1"
dependencies:
minipass: "npm:^7.0.3"
checksum: 10/fd59bfedf0659c1b83f6e15459162da021f08ec0f5834dd9163296f8b77ee82f9656aa1d415c3d3848484293e0e6aefdd482e863e52ddb53d520bb73da1eeec1
checksum: 10/ae560d0378d074006a71b06af71bfbe84a3fe1ac6e16c1f07575f69e670d40170507fe52b21bcc23399429bc6a15f4bc3ea8d9bc88e9dfd7e87de564e6da6a72
languageName: node
linkType: hard
@@ -3005,6 +3058,17 @@ __metadata:
languageName: node
linkType: hard
"string-width@npm:^7.0.0, string-width@npm:^7.2.0":
version: 7.2.0
resolution: "string-width@npm:7.2.0"
dependencies:
emoji-regex: "npm:^10.3.0"
get-east-asian-width: "npm:^1.0.0"
strip-ansi: "npm:^7.1.0"
checksum: 10/42f9e82f61314904a81393f6ef75b832c39f39761797250de68c041d8ba4df2ef80db49ab6cd3a292923a6f0f409b8c9980d120f7d32c820b4a8a84a2598a295
languageName: node
linkType: hard
"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1":
version: 6.0.1
resolution: "strip-ansi@npm:6.0.1"
@@ -3014,6 +3078,15 @@ __metadata:
languageName: node
linkType: hard
"strip-ansi@npm:^7.1.0":
version: 7.1.2
resolution: "strip-ansi@npm:7.1.2"
dependencies:
ansi-regex: "npm:^6.0.1"
checksum: 10/db0e3f9654e519c8a33c50fc9304d07df5649388e7da06d3aabf66d29e5ad65d5e6315d8519d409c15b32fa82c1df7e11ed6f8cd50b0e4404463f0c9d77c8d0b
languageName: node
linkType: hard
"superagent@npm:^10.3.0":
version: 10.3.0
resolution: "superagent@npm:10.3.0"
@@ -3061,15 +3134,15 @@ __metadata:
linkType: hard
"tar@npm:^7.5.4":
version: 7.5.7
resolution: "tar@npm:7.5.7"
version: 7.5.9
resolution: "tar@npm:7.5.9"
dependencies:
"@isaacs/fs-minipass": "npm:^4.0.0"
chownr: "npm:^3.0.0"
minipass: "npm:^7.1.2"
minizlib: "npm:^3.1.0"
yallist: "npm:^5.0.0"
checksum: 10/0d6938dd32fe5c0f17c8098d92bd9889ee0ed9d11f12381b8146b6e8c87bb5aa49feec7abc42463f0597503d8e89e4c4c0b42bff1a5a38444e918b4878b7fd21
checksum: 10/1213cdde9c22d6acf8809ba5d2a025212ce3517bc99c4a4c6981b7dc0489bf3b164db9c826c9517680889194c9ba57448c8ff0da35eca9a60bb7689bf0b3897d
languageName: node
linkType: hard
@@ -3214,10 +3287,10 @@ __metadata:
languageName: node
linkType: hard
"undici-types@npm:~7.16.0":
version: 7.16.0
resolution: "undici-types@npm:7.16.0"
checksum: 10/db43439f69c2d94cc29f75cbfe9de86df87061d6b0c577ebe9bb3255f49b22c50162a7d7eb413b0458b6510b8ca299ac7cff38c3a29fbd31af9f504bcf7fbc0d
"undici-types@npm:~7.18.0":
version: 7.18.2
resolution: "undici-types@npm:7.18.2"
checksum: 10/e61a5918f624d68420c3ca9d301e9f15b61cba6e97be39fe2ce266dd6151e4afe424d679372638826cb506be33952774e0424141200111a9857e464216c009af
languageName: node
linkType: hard
@@ -3256,6 +3329,15 @@ __metadata:
languageName: node
linkType: hard
"uuidv7@npm:^1.1.0":
version: 1.1.0
resolution: "uuidv7@npm:1.1.0"
bin:
uuidv7: cli.js
checksum: 10/a611c4918b1662ec8f4f927b96ccc975ca050d3b221e2943ade6796c956117719a94a9edcc3f26ec8597c2ab270fed074ef5ab9fcc3b46f42c26cb4caafa4ac2
languageName: node
linkType: hard
"vary@npm:^1, vary@npm:^1.1.2":
version: 1.1.2
resolution: "vary@npm:1.1.2"
@@ -3264,15 +3346,15 @@ __metadata:
linkType: hard
"vite-tsconfig-paths@npm:^6.0.5":
version: 6.1.0
resolution: "vite-tsconfig-paths@npm:6.1.0"
version: 6.1.1
resolution: "vite-tsconfig-paths@npm:6.1.1"
dependencies:
debug: "npm:^4.1.1"
globrex: "npm:^0.1.2"
tsconfck: "npm:^3.0.3"
peerDependencies:
vite: "*"
checksum: 10/410104d95aca47679718ae5602d6a05c149e1a85b1777ebd670c53e7cd8c55b0f9f6d2d7740a04f33ecb73c0983e3f6897bfa4ee9d5fc5e719385f203ad27ab5
checksum: 10/f752bce4f3c5707f0df7af8a20294b1f325e26f50578b82c8262d851028616ebb1a3e73ab0789c55cf3c8da8d985e843193c0bec2cb31662c567ccdf137f1fd0
languageName: node
linkType: hard
@@ -3391,13 +3473,13 @@ __metadata:
linkType: hard
"which@npm:^6.0.0":
version: 6.0.0
resolution: "which@npm:6.0.0"
version: 6.0.1
resolution: "which@npm:6.0.1"
dependencies:
isexe: "npm:^3.1.1"
isexe: "npm:^4.0.0"
bin:
node-which: bin/which.js
checksum: 10/df19b2cd8aac94b333fa29b42e8e371a21e634a742a3b156716f7752a5afe1d73fb5d8bce9b89326f453d96879e8fe626eb421e0117eb1a3ce9fd8c97f6b7db9
checksum: 10/dbea77c7d3058bf6c78bf9659d2dce4d2b57d39a15b826b2af6ac2e5a219b99dc8a831b79fdbc453c0598adb4f3f84cf9c2491fd52beb9f5d2dececcad117f68
languageName: node
linkType: hard
@@ -3424,6 +3506,17 @@ __metadata:
languageName: node
linkType: hard
"wrap-ansi@npm:^9.0.0":
version: 9.0.2
resolution: "wrap-ansi@npm:9.0.2"
dependencies:
ansi-styles: "npm:^6.2.1"
string-width: "npm:^7.0.0"
strip-ansi: "npm:^7.1.0"
checksum: 10/f3907e1ea9717404ca53a338fa5a017c2121550c3a5305180e2bc08c03e21aa45068df55b0d7676bf57be1880ba51a84458c17241ebedea485fafa9ef16b4024
languageName: node
linkType: hard
"wrappy@npm:1":
version: 1.0.2
resolution: "wrappy@npm:1.0.2"
@@ -3466,6 +3559,13 @@ __metadata:
languageName: node
linkType: hard
"yargs-parser@npm:^22.0.0":
version: 22.0.0
resolution: "yargs-parser@npm:22.0.0"
checksum: 10/f13c42bad6ebed1a587a72f2db5694f5fa772bcaf409a701691d13cf74eb5adfcf61a2611de08807e319b829d3e5e6e1578b16ebe174cae8e8be3bf7b8e7a19e
languageName: node
linkType: hard
"yargs@npm:17.7.2":
version: 17.7.2
resolution: "yargs@npm:17.7.2"
@@ -3480,3 +3580,17 @@ __metadata:
checksum: 10/abb3e37678d6e38ea85485ed86ebe0d1e3464c640d7d9069805ea0da12f69d5a32df8e5625e370f9c96dd1c2dc088ab2d0a4dd32af18222ef3c4224a19471576
languageName: node
linkType: hard
"yargs@npm:^18.0.0":
version: 18.0.0
resolution: "yargs@npm:18.0.0"
dependencies:
cliui: "npm:^9.0.1"
escalade: "npm:^3.1.1"
get-caller-file: "npm:^2.0.5"
string-width: "npm:^7.2.0"
y18n: "npm:^5.0.5"
yargs-parser: "npm:^22.0.0"
checksum: 10/5af36234871390386b31cac99f00e79fcbc2ead858a61b30a8ca381c5fde5df8af0b407c36b000d3f774bcbe4aec5833f2f1c915f6ddc49ce97b78176b651801
languageName: node
linkType: hard